1 /* $NetBSD: rk_cru_composite.c,v 1.8 2022/08/23 05:39:06 ryo Exp $ */
2
3 /*-
4 * Copyright (c) 2018 Jared McNeill <jmcneill@invisible.ca>
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 * SUCH DAMAGE.
27 */
28
29 #include <sys/cdefs.h>
30 __KERNEL_RCSID(0, "$NetBSD: rk_cru_composite.c,v 1.8 2022/08/23 05:39:06 ryo Exp $");
31
32 #include <sys/param.h>
33 #include <sys/bus.h>
34
35 #include <dev/clk/clk_backend.h>
36
37 #include <arm/rockchip/rk_cru.h>
38
39 #include <dev/fdt/fdtvar.h>
40
41 int
rk_cru_composite_enable(struct rk_cru_softc * sc,struct rk_cru_clk * clk,int enable)42 rk_cru_composite_enable(struct rk_cru_softc *sc, struct rk_cru_clk *clk,
43 int enable)
44 {
45 struct rk_cru_composite *composite = &clk->u.composite;
46
47 KASSERT(clk->type == RK_CRU_COMPOSITE);
48
49 if (composite->gate_mask == 0)
50 return enable ? 0 : ENXIO;
51
52 const uint32_t write_mask = composite->gate_mask << 16;
53 const uint32_t write_val = enable ? 0 : composite->gate_mask;
54
55 CRU_WRITE(sc, composite->gate_reg, write_mask | write_val);
56
57 return 0;
58 }
59
60 u_int
rk_cru_composite_get_rate(struct rk_cru_softc * sc,struct rk_cru_clk * clk)61 rk_cru_composite_get_rate(struct rk_cru_softc *sc,
62 struct rk_cru_clk *clk)
63 {
64 struct rk_cru_composite *composite = &clk->u.composite;
65 struct clk *clkp, *clkp_parent;
66
67 KASSERT(clk->type == RK_CRU_COMPOSITE);
68
69 clkp = &clk->base;
70 clkp_parent = clk_get_parent(clkp);
71 if (clkp_parent == NULL)
72 return 0;
73
74 const u_int prate = clk_get_rate(clkp_parent);
75 if (prate == 0)
76 return 0;
77
78 if (composite->flags & RK_COMPOSITE_FRACDIV) {
79 const uint32_t val = CRU_READ(sc, composite->frac_reg);
80 const u_int num = (val >> 16) & 0xffff;
81 const u_int den = val & 0xffff;
82
83 return (u_int)((uint64_t)prate * num / den);
84 } else {
85 const uint32_t val = CRU_READ(sc, composite->muxdiv_reg);
86 u_int div;
87
88 if (composite->flags & RK_COMPOSITE_POW2) {
89 div = 1U << __SHIFTOUT(val, composite->div_mask);
90 } else if (composite->flags & RK_COMPOSITE_HALFDIV) {
91 div = __SHIFTOUT(val, composite->div_mask) * 2 + 3;
92 return ((uint64_t)prate * 2 + div - 1) / div;
93 } else {
94 div = (composite->div_mask != 0)
95 ? __SHIFTOUT(val, composite->div_mask) + 1 : 1;
96 }
97 return prate / div;
98 }
99 }
100
101 static u_int
rk_cru_composite_get_frac_div(u_int n,u_int d)102 rk_cru_composite_get_frac_div(u_int n, u_int d)
103 {
104 u_int tmp;
105
106 while (d > 0) {
107 tmp = d;
108 d = n % d;
109 n = tmp;
110 }
111
112 return n;
113 }
114
115 static int
rk_cru_composite_set_rate_frac(struct rk_cru_softc * sc,struct rk_cru_clk * clk,u_int rate)116 rk_cru_composite_set_rate_frac(struct rk_cru_softc *sc,
117 struct rk_cru_clk *clk, u_int rate)
118 {
119 struct rk_cru_composite *composite = &clk->u.composite;
120 struct clk *clk_parent;
121
122 clk_parent = clk_get_parent(&clk->base);
123 if (clk_parent == NULL)
124 return ENXIO;
125
126 const u_int prate = clk_get_rate(clk_parent);
127 const u_int v = rk_cru_composite_get_frac_div(prate, rate);
128 const u_int num = (prate / v) & 0xffff;
129 const u_int den = (rate / v) & 0xffff;
130 if (prate / num * den != rate)
131 return EINVAL;
132
133 CRU_WRITE(sc, composite->frac_reg, (den << 16) | num);
134
135 return 0;
136 }
137
138 int
rk_cru_composite_set_rate(struct rk_cru_softc * sc,struct rk_cru_clk * clk,u_int rate)139 rk_cru_composite_set_rate(struct rk_cru_softc *sc,
140 struct rk_cru_clk *clk, u_int rate)
141 {
142 struct rk_cru_composite *composite = &clk->u.composite;
143 u_int best_div, best_mux, best_diff;
144 struct rk_cru_clk *rclk_parent;
145 struct clk *clk_parent;
146
147 KASSERT(clk->type == RK_CRU_COMPOSITE);
148
149 if (composite->flags & RK_COMPOSITE_SET_RATE_PARENT) {
150 clk_parent = clk_get_parent(&clk->base);
151 if (clk_parent == NULL)
152 return ENXIO;
153 return clk_set_rate(clk_parent, rate);
154 }
155
156 if (composite->flags & RK_COMPOSITE_FRACDIV) {
157 return rk_cru_composite_set_rate_frac(sc, clk, rate);
158 }
159
160 if (composite->flags & RK_COMPOSITE_POW2) {
161 return ENXIO; /* TODO */
162 }
163 if (composite->flags & RK_COMPOSITE_HALFDIV) {
164 return ENXIO; /* TODO */
165 }
166
167 best_div = 0;
168 best_mux = 0;
169 best_diff = INT_MAX;
170 for (u_int mux = 0; mux < composite->nparents; mux++) {
171 rclk_parent = rk_cru_clock_find(sc, composite->parents[mux]);
172 if (rclk_parent != NULL)
173 clk_parent = &rclk_parent->base;
174 else
175 clk_parent = fdtbus_clock_byname(composite->parents[mux]);
176 if (clk_parent == NULL)
177 continue;
178
179 const u_int prate = clk_get_rate(clk_parent);
180 if (prate == 0)
181 continue;
182
183 for (u_int div = 1; div <= __SHIFTOUT_MASK(composite->div_mask) + 1; div++) {
184 const u_int cur_rate = prate / div;
185 const int diff = (int)rate - (int)cur_rate;
186 if (composite->flags & RK_COMPOSITE_ROUND_DOWN) {
187 if (diff >= 0 && diff < best_diff) {
188 best_diff = diff;
189 best_mux = mux;
190 best_div = div;
191 }
192 } else {
193 if (abs(diff) < best_diff) {
194 best_diff = abs(diff);
195 best_mux = mux;
196 best_div = div;
197 }
198 }
199 }
200 }
201 if (best_diff == INT_MAX)
202 return ERANGE;
203
204 uint32_t write_mask = composite->div_mask << 16;
205 uint32_t write_val = __SHIFTIN(best_div - 1, composite->div_mask);
206 if (composite->mux_mask) {
207 write_mask |= composite->mux_mask << 16;
208 write_val |= __SHIFTIN(best_mux, composite->mux_mask);
209 }
210
211 CRU_WRITE(sc, composite->muxdiv_reg, write_mask | write_val);
212
213 return 0;
214 }
215
216 const char *
rk_cru_composite_get_parent(struct rk_cru_softc * sc,struct rk_cru_clk * clk)217 rk_cru_composite_get_parent(struct rk_cru_softc *sc,
218 struct rk_cru_clk *clk)
219 {
220 struct rk_cru_composite *composite = &clk->u.composite;
221 uint32_t val;
222 u_int mux;
223
224 KASSERT(clk->type == RK_CRU_COMPOSITE);
225
226 if (composite->mux_mask) {
227 val = CRU_READ(sc, composite->muxdiv_reg);
228 mux = __SHIFTOUT(val, composite->mux_mask);
229 } else {
230 mux = 0;
231 }
232
233 return composite->parents[mux];
234 }
235
236 int
rk_cru_composite_set_parent(struct rk_cru_softc * sc,struct rk_cru_clk * clk,const char * parent)237 rk_cru_composite_set_parent(struct rk_cru_softc *sc,
238 struct rk_cru_clk *clk, const char *parent)
239 {
240 struct rk_cru_composite *composite = &clk->u.composite;
241
242 KASSERT(clk->type == RK_CRU_COMPOSITE);
243
244 if (!composite->mux_mask)
245 return EINVAL;
246
247 for (u_int mux = 0; mux < composite->nparents; mux++) {
248 if (strcmp(composite->parents[mux], parent) == 0) {
249 const uint32_t write_mask = composite->mux_mask << 16;
250 const uint32_t write_val = __SHIFTIN(mux, composite->mux_mask);
251
252 CRU_WRITE(sc, composite->muxdiv_reg, write_mask | write_val);
253 return 0;
254 }
255 }
256
257 return EINVAL;
258 }
259