1 /*
2 * Copyright (c) 2013, The Linux Foundation. All rights reserved.
3 *
4 * This software is licensed under the terms of the GNU General Public
5 * License version 2, as published by the Free Software Foundation, and
6 * may be copied, distributed, and modified under those terms.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 */
13
14 #include <linux/kernel.h>
15 #include <linux/bitops.h>
16 #include <linux/err.h>
17 #include <linux/bug.h>
18 #include <linux/export.h>
19 #include <linux/clk-provider.h>
20 #include <linux/delay.h>
21 #include <linux/regmap.h>
22 #include <linux/math64.h>
23
24 #include <asm/div64.h>
25
26 #include "clk-rcg.h"
27 #include "common.h"
28
29 #define CMD_REG 0x0
30 #define CMD_UPDATE BIT(0)
31 #define CMD_ROOT_EN BIT(1)
32 #define CMD_DIRTY_CFG BIT(4)
33 #define CMD_DIRTY_N BIT(5)
34 #define CMD_DIRTY_M BIT(6)
35 #define CMD_DIRTY_D BIT(7)
36 #define CMD_ROOT_OFF BIT(31)
37
38 #define CFG_REG 0x4
39 #define CFG_SRC_DIV_SHIFT 0
40 #define CFG_SRC_SEL_SHIFT 8
41 #define CFG_SRC_SEL_MASK (0x7 << CFG_SRC_SEL_SHIFT)
42 #define CFG_MODE_SHIFT 12
43 #define CFG_MODE_MASK (0x3 << CFG_MODE_SHIFT)
44 #define CFG_MODE_DUAL_EDGE (0x2 << CFG_MODE_SHIFT)
45
46 #define M_REG 0x8
47 #define N_REG 0xc
48 #define D_REG 0x10
49
clk_rcg2_is_enabled(struct clk_hw * hw)50 static int clk_rcg2_is_enabled(struct clk_hw *hw)
51 {
52 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
53 u32 cmd;
54 int ret;
55
56 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
57 if (ret)
58 return ret;
59
60 return (cmd & CMD_ROOT_OFF) == 0;
61 }
62
clk_rcg2_get_parent(struct clk_hw * hw)63 static u8 clk_rcg2_get_parent(struct clk_hw *hw)
64 {
65 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
66 int num_parents = clk_hw_get_num_parents(hw);
67 u32 cfg;
68 int i, ret;
69
70 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
71 if (ret)
72 goto err;
73
74 cfg &= CFG_SRC_SEL_MASK;
75 cfg >>= CFG_SRC_SEL_SHIFT;
76
77 for (i = 0; i < num_parents; i++)
78 if (cfg == rcg->parent_map[i].cfg)
79 return i;
80
81 err:
82 pr_debug("%s: Clock %s has invalid parent, using default.\n",
83 __func__, clk_hw_get_name(hw));
84 return 0;
85 }
86
update_config(struct clk_rcg2 * rcg)87 static int update_config(struct clk_rcg2 *rcg)
88 {
89 int count, ret;
90 u32 cmd;
91 struct clk_hw *hw = &rcg->clkr.hw;
92 const char *name = clk_hw_get_name(hw);
93
94 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
95 CMD_UPDATE, CMD_UPDATE);
96 if (ret)
97 return ret;
98
99 /* Wait for update to take effect */
100 for (count = 500; count > 0; count--) {
101 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd);
102 if (ret)
103 return ret;
104 if (!(cmd & CMD_UPDATE))
105 return 0;
106 udelay(1);
107 }
108
109 WARN(1, "%s: rcg didn't update its configuration.", name);
110 return -EBUSY;
111 }
112
clk_rcg2_set_parent(struct clk_hw * hw,u8 index)113 static int clk_rcg2_set_parent(struct clk_hw *hw, u8 index)
114 {
115 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
116 int ret;
117 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
118
119 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
120 CFG_SRC_SEL_MASK, cfg);
121 if (ret)
122 return ret;
123
124 return update_config(rcg);
125 }
126
127 /*
128 * Calculate m/n:d rate
129 *
130 * parent_rate m
131 * rate = ----------- x ---
132 * hid_div n
133 */
134 static unsigned long
calc_rate(unsigned long rate,u32 m,u32 n,u32 mode,u32 hid_div)135 calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 hid_div)
136 {
137 if (hid_div) {
138 rate *= 2;
139 rate /= hid_div + 1;
140 }
141
142 if (mode) {
143 u64 tmp = rate;
144 tmp *= m;
145 do_div(tmp, n);
146 rate = tmp;
147 }
148
149 return rate;
150 }
151
152 static unsigned long
clk_rcg2_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)153 clk_rcg2_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
154 {
155 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
156 u32 cfg, hid_div, m = 0, n = 0, mode = 0, mask;
157
158 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
159
160 if (rcg->mnd_width) {
161 mask = BIT(rcg->mnd_width) - 1;
162 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + M_REG, &m);
163 m &= mask;
164 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + N_REG, &n);
165 n = ~n;
166 n &= mask;
167 n += m;
168 mode = cfg & CFG_MODE_MASK;
169 mode >>= CFG_MODE_SHIFT;
170 }
171
172 mask = BIT(rcg->hid_width) - 1;
173 hid_div = cfg >> CFG_SRC_DIV_SHIFT;
174 hid_div &= mask;
175
176 return calc_rate(parent_rate, m, n, mode, hid_div);
177 }
178
_freq_tbl_determine_rate(struct clk_hw * hw,const struct freq_tbl * f,struct clk_rate_request * req)179 static int _freq_tbl_determine_rate(struct clk_hw *hw,
180 const struct freq_tbl *f, struct clk_rate_request *req)
181 {
182 unsigned long clk_flags, rate = req->rate;
183 struct clk_hw *p;
184 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
185 int index;
186
187 f = qcom_find_freq(f, rate);
188 if (!f)
189 return -EINVAL;
190
191 index = qcom_find_src_index(hw, rcg->parent_map, f->src);
192 if (index < 0)
193 return index;
194
195 clk_flags = clk_hw_get_flags(hw);
196 p = clk_hw_get_parent_by_index(hw, index);
197 if (!p)
198 return -EINVAL;
199
200 if (clk_flags & CLK_SET_RATE_PARENT) {
201 if (f->pre_div) {
202 if (!rate)
203 rate = req->rate;
204 rate /= 2;
205 rate *= f->pre_div + 1;
206 }
207
208 if (f->n) {
209 u64 tmp = rate;
210 tmp = tmp * f->n;
211 do_div(tmp, f->m);
212 rate = tmp;
213 }
214 } else {
215 rate = clk_hw_get_rate(p);
216 }
217 req->best_parent_hw = p;
218 req->best_parent_rate = rate;
219 req->rate = f->freq;
220
221 return 0;
222 }
223
clk_rcg2_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)224 static int clk_rcg2_determine_rate(struct clk_hw *hw,
225 struct clk_rate_request *req)
226 {
227 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
228
229 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req);
230 }
231
clk_rcg2_configure(struct clk_rcg2 * rcg,const struct freq_tbl * f)232 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f)
233 {
234 u32 cfg, mask;
235 struct clk_hw *hw = &rcg->clkr.hw;
236 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src);
237
238 if (index < 0)
239 return index;
240
241 if (rcg->mnd_width && f->n) {
242 mask = BIT(rcg->mnd_width) - 1;
243 ret = regmap_update_bits(rcg->clkr.regmap,
244 rcg->cmd_rcgr + M_REG, mask, f->m);
245 if (ret)
246 return ret;
247
248 ret = regmap_update_bits(rcg->clkr.regmap,
249 rcg->cmd_rcgr + N_REG, mask, ~(f->n - f->m));
250 if (ret)
251 return ret;
252
253 ret = regmap_update_bits(rcg->clkr.regmap,
254 rcg->cmd_rcgr + D_REG, mask, ~f->n);
255 if (ret)
256 return ret;
257 }
258
259 mask = BIT(rcg->hid_width) - 1;
260 mask |= CFG_SRC_SEL_MASK | CFG_MODE_MASK;
261 cfg = f->pre_div << CFG_SRC_DIV_SHIFT;
262 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT;
263 if (rcg->mnd_width && f->n && (f->m != f->n))
264 cfg |= CFG_MODE_DUAL_EDGE;
265 ret = regmap_update_bits(rcg->clkr.regmap,
266 rcg->cmd_rcgr + CFG_REG, mask, cfg);
267 if (ret)
268 return ret;
269
270 return update_config(rcg);
271 }
272
__clk_rcg2_set_rate(struct clk_hw * hw,unsigned long rate)273 static int __clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate)
274 {
275 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
276 const struct freq_tbl *f;
277
278 f = qcom_find_freq(rcg->freq_tbl, rate);
279 if (!f)
280 return -EINVAL;
281
282 return clk_rcg2_configure(rcg, f);
283 }
284
clk_rcg2_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)285 static int clk_rcg2_set_rate(struct clk_hw *hw, unsigned long rate,
286 unsigned long parent_rate)
287 {
288 return __clk_rcg2_set_rate(hw, rate);
289 }
290
clk_rcg2_set_rate_and_parent(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate,u8 index)291 static int clk_rcg2_set_rate_and_parent(struct clk_hw *hw,
292 unsigned long rate, unsigned long parent_rate, u8 index)
293 {
294 return __clk_rcg2_set_rate(hw, rate);
295 }
296
297 const struct clk_ops clk_rcg2_ops = {
298 .is_enabled = clk_rcg2_is_enabled,
299 .get_parent = clk_rcg2_get_parent,
300 .set_parent = clk_rcg2_set_parent,
301 .recalc_rate = clk_rcg2_recalc_rate,
302 .determine_rate = clk_rcg2_determine_rate,
303 .set_rate = clk_rcg2_set_rate,
304 .set_rate_and_parent = clk_rcg2_set_rate_and_parent,
305 };
306 EXPORT_SYMBOL_GPL(clk_rcg2_ops);
307
clk_rcg2_shared_force_enable(struct clk_hw * hw,unsigned long rate)308 static int clk_rcg2_shared_force_enable(struct clk_hw *hw, unsigned long rate)
309 {
310 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
311 const char *name = clk_hw_get_name(hw);
312 int ret, count;
313
314 /* force enable RCG */
315 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
316 CMD_ROOT_EN, CMD_ROOT_EN);
317 if (ret)
318 return ret;
319
320 /* wait for RCG to turn ON */
321 for (count = 500; count > 0; count--) {
322 ret = clk_rcg2_is_enabled(hw);
323 if (ret)
324 break;
325 udelay(1);
326 }
327 if (!count)
328 pr_err("%s: RCG did not turn on\n", name);
329
330 /* set clock rate */
331 ret = __clk_rcg2_set_rate(hw, rate);
332 if (ret)
333 return ret;
334
335 /* clear force enable RCG */
336 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG,
337 CMD_ROOT_EN, 0);
338 }
339
clk_rcg2_shared_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)340 static int clk_rcg2_shared_set_rate(struct clk_hw *hw, unsigned long rate,
341 unsigned long parent_rate)
342 {
343 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
344
345 /* cache the rate */
346 rcg->current_freq = rate;
347
348 if (!__clk_is_enabled(hw->clk))
349 return 0;
350
351 return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
352 }
353
354 static unsigned long
clk_rcg2_shared_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)355 clk_rcg2_shared_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
356 {
357 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
358
359 return rcg->current_freq = clk_rcg2_recalc_rate(hw, parent_rate);
360 }
361
clk_rcg2_shared_enable(struct clk_hw * hw)362 static int clk_rcg2_shared_enable(struct clk_hw *hw)
363 {
364 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
365
366 return clk_rcg2_shared_force_enable(hw, rcg->current_freq);
367 }
368
clk_rcg2_shared_disable(struct clk_hw * hw)369 static void clk_rcg2_shared_disable(struct clk_hw *hw)
370 {
371 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
372
373 /* switch to XO, which is the lowest entry in the freq table */
374 clk_rcg2_shared_set_rate(hw, rcg->freq_tbl[0].freq, 0);
375 }
376
377 const struct clk_ops clk_rcg2_shared_ops = {
378 .enable = clk_rcg2_shared_enable,
379 .disable = clk_rcg2_shared_disable,
380 .get_parent = clk_rcg2_get_parent,
381 .recalc_rate = clk_rcg2_shared_recalc_rate,
382 .determine_rate = clk_rcg2_determine_rate,
383 .set_rate = clk_rcg2_shared_set_rate,
384 };
385 EXPORT_SYMBOL_GPL(clk_rcg2_shared_ops);
386
387 struct frac_entry {
388 int num;
389 int den;
390 };
391
392 static const struct frac_entry frac_table_675m[] = { /* link rate of 270M */
393 { 52, 295 }, /* 119 M */
394 { 11, 57 }, /* 130.25 M */
395 { 63, 307 }, /* 138.50 M */
396 { 11, 50 }, /* 148.50 M */
397 { 47, 206 }, /* 154 M */
398 { 31, 100 }, /* 205.25 M */
399 { 107, 269 }, /* 268.50 M */
400 { },
401 };
402
403 static struct frac_entry frac_table_810m[] = { /* Link rate of 162M */
404 { 31, 211 }, /* 119 M */
405 { 32, 199 }, /* 130.25 M */
406 { 63, 307 }, /* 138.50 M */
407 { 11, 60 }, /* 148.50 M */
408 { 50, 263 }, /* 154 M */
409 { 31, 120 }, /* 205.25 M */
410 { 119, 359 }, /* 268.50 M */
411 { },
412 };
413
clk_edp_pixel_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)414 static int clk_edp_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
415 unsigned long parent_rate)
416 {
417 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
418 struct freq_tbl f = *rcg->freq_tbl;
419 const struct frac_entry *frac;
420 int delta = 100000;
421 s64 src_rate = parent_rate;
422 s64 request;
423 u32 mask = BIT(rcg->hid_width) - 1;
424 u32 hid_div;
425
426 if (src_rate == 810000000)
427 frac = frac_table_810m;
428 else
429 frac = frac_table_675m;
430
431 for (; frac->num; frac++) {
432 request = rate;
433 request *= frac->den;
434 request = div_s64(request, frac->num);
435 if ((src_rate < (request - delta)) ||
436 (src_rate > (request + delta)))
437 continue;
438
439 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
440 &hid_div);
441 f.pre_div = hid_div;
442 f.pre_div >>= CFG_SRC_DIV_SHIFT;
443 f.pre_div &= mask;
444 f.m = frac->num;
445 f.n = frac->den;
446
447 return clk_rcg2_configure(rcg, &f);
448 }
449
450 return -EINVAL;
451 }
452
clk_edp_pixel_set_rate_and_parent(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate,u8 index)453 static int clk_edp_pixel_set_rate_and_parent(struct clk_hw *hw,
454 unsigned long rate, unsigned long parent_rate, u8 index)
455 {
456 /* Parent index is set statically in frequency table */
457 return clk_edp_pixel_set_rate(hw, rate, parent_rate);
458 }
459
clk_edp_pixel_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)460 static int clk_edp_pixel_determine_rate(struct clk_hw *hw,
461 struct clk_rate_request *req)
462 {
463 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
464 const struct freq_tbl *f = rcg->freq_tbl;
465 const struct frac_entry *frac;
466 int delta = 100000;
467 s64 request;
468 u32 mask = BIT(rcg->hid_width) - 1;
469 u32 hid_div;
470 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
471
472 /* Force the correct parent */
473 req->best_parent_hw = clk_hw_get_parent_by_index(hw, index);
474 req->best_parent_rate = clk_hw_get_rate(req->best_parent_hw);
475
476 if (req->best_parent_rate == 810000000)
477 frac = frac_table_810m;
478 else
479 frac = frac_table_675m;
480
481 for (; frac->num; frac++) {
482 request = req->rate;
483 request *= frac->den;
484 request = div_s64(request, frac->num);
485 if ((req->best_parent_rate < (request - delta)) ||
486 (req->best_parent_rate > (request + delta)))
487 continue;
488
489 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
490 &hid_div);
491 hid_div >>= CFG_SRC_DIV_SHIFT;
492 hid_div &= mask;
493
494 req->rate = calc_rate(req->best_parent_rate,
495 frac->num, frac->den,
496 !!frac->den, hid_div);
497 return 0;
498 }
499
500 return -EINVAL;
501 }
502
503 const struct clk_ops clk_edp_pixel_ops = {
504 .is_enabled = clk_rcg2_is_enabled,
505 .get_parent = clk_rcg2_get_parent,
506 .set_parent = clk_rcg2_set_parent,
507 .recalc_rate = clk_rcg2_recalc_rate,
508 .set_rate = clk_edp_pixel_set_rate,
509 .set_rate_and_parent = clk_edp_pixel_set_rate_and_parent,
510 .determine_rate = clk_edp_pixel_determine_rate,
511 };
512 EXPORT_SYMBOL_GPL(clk_edp_pixel_ops);
513
clk_byte_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)514 static int clk_byte_determine_rate(struct clk_hw *hw,
515 struct clk_rate_request *req)
516 {
517 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
518 const struct freq_tbl *f = rcg->freq_tbl;
519 int index = qcom_find_src_index(hw, rcg->parent_map, f->src);
520 unsigned long parent_rate, div;
521 u32 mask = BIT(rcg->hid_width) - 1;
522 struct clk_hw *p;
523
524 if (req->rate == 0)
525 return -EINVAL;
526
527 req->best_parent_hw = p = clk_hw_get_parent_by_index(hw, index);
528 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, req->rate);
529
530 div = DIV_ROUND_UP((2 * parent_rate), req->rate) - 1;
531 div = min_t(u32, div, mask);
532
533 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
534
535 return 0;
536 }
537
clk_byte_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)538 static int clk_byte_set_rate(struct clk_hw *hw, unsigned long rate,
539 unsigned long parent_rate)
540 {
541 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
542 struct freq_tbl f = *rcg->freq_tbl;
543 unsigned long div;
544 u32 mask = BIT(rcg->hid_width) - 1;
545
546 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
547 div = min_t(u32, div, mask);
548
549 f.pre_div = div;
550
551 return clk_rcg2_configure(rcg, &f);
552 }
553
clk_byte_set_rate_and_parent(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate,u8 index)554 static int clk_byte_set_rate_and_parent(struct clk_hw *hw,
555 unsigned long rate, unsigned long parent_rate, u8 index)
556 {
557 /* Parent index is set statically in frequency table */
558 return clk_byte_set_rate(hw, rate, parent_rate);
559 }
560
561 const struct clk_ops clk_byte_ops = {
562 .is_enabled = clk_rcg2_is_enabled,
563 .get_parent = clk_rcg2_get_parent,
564 .set_parent = clk_rcg2_set_parent,
565 .recalc_rate = clk_rcg2_recalc_rate,
566 .set_rate = clk_byte_set_rate,
567 .set_rate_and_parent = clk_byte_set_rate_and_parent,
568 .determine_rate = clk_byte_determine_rate,
569 };
570 EXPORT_SYMBOL_GPL(clk_byte_ops);
571
clk_byte2_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)572 static int clk_byte2_determine_rate(struct clk_hw *hw,
573 struct clk_rate_request *req)
574 {
575 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
576 unsigned long parent_rate, div;
577 u32 mask = BIT(rcg->hid_width) - 1;
578 struct clk_hw *p;
579 unsigned long rate = req->rate;
580
581 if (rate == 0)
582 return -EINVAL;
583
584 p = req->best_parent_hw;
585 req->best_parent_rate = parent_rate = clk_hw_round_rate(p, rate);
586
587 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
588 div = min_t(u32, div, mask);
589
590 req->rate = calc_rate(parent_rate, 0, 0, 0, div);
591
592 return 0;
593 }
594
clk_byte2_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)595 static int clk_byte2_set_rate(struct clk_hw *hw, unsigned long rate,
596 unsigned long parent_rate)
597 {
598 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
599 struct freq_tbl f = { 0 };
600 unsigned long div;
601 int i, num_parents = clk_hw_get_num_parents(hw);
602 u32 mask = BIT(rcg->hid_width) - 1;
603 u32 cfg;
604
605 div = DIV_ROUND_UP((2 * parent_rate), rate) - 1;
606 div = min_t(u32, div, mask);
607
608 f.pre_div = div;
609
610 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
611 cfg &= CFG_SRC_SEL_MASK;
612 cfg >>= CFG_SRC_SEL_SHIFT;
613
614 for (i = 0; i < num_parents; i++) {
615 if (cfg == rcg->parent_map[i].cfg) {
616 f.src = rcg->parent_map[i].src;
617 return clk_rcg2_configure(rcg, &f);
618 }
619 }
620
621 return -EINVAL;
622 }
623
clk_byte2_set_rate_and_parent(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate,u8 index)624 static int clk_byte2_set_rate_and_parent(struct clk_hw *hw,
625 unsigned long rate, unsigned long parent_rate, u8 index)
626 {
627 /* Read the hardware to determine parent during set_rate */
628 return clk_byte2_set_rate(hw, rate, parent_rate);
629 }
630
631 const struct clk_ops clk_byte2_ops = {
632 .is_enabled = clk_rcg2_is_enabled,
633 .get_parent = clk_rcg2_get_parent,
634 .set_parent = clk_rcg2_set_parent,
635 .recalc_rate = clk_rcg2_recalc_rate,
636 .set_rate = clk_byte2_set_rate,
637 .set_rate_and_parent = clk_byte2_set_rate_and_parent,
638 .determine_rate = clk_byte2_determine_rate,
639 };
640 EXPORT_SYMBOL_GPL(clk_byte2_ops);
641
642 static const struct frac_entry frac_table_pixel[] = {
643 { 3, 8 },
644 { 2, 9 },
645 { 4, 9 },
646 { 1, 1 },
647 { }
648 };
649
clk_pixel_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)650 static int clk_pixel_determine_rate(struct clk_hw *hw,
651 struct clk_rate_request *req)
652 {
653 unsigned long request, src_rate;
654 int delta = 100000;
655 const struct frac_entry *frac = frac_table_pixel;
656
657 for (; frac->num; frac++) {
658 request = (req->rate * frac->den) / frac->num;
659
660 src_rate = clk_hw_round_rate(req->best_parent_hw, request);
661 if ((src_rate < (request - delta)) ||
662 (src_rate > (request + delta)))
663 continue;
664
665 req->best_parent_rate = src_rate;
666 req->rate = (src_rate * frac->num) / frac->den;
667 return 0;
668 }
669
670 return -EINVAL;
671 }
672
clk_pixel_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)673 static int clk_pixel_set_rate(struct clk_hw *hw, unsigned long rate,
674 unsigned long parent_rate)
675 {
676 struct clk_rcg2 *rcg = to_clk_rcg2(hw);
677 struct freq_tbl f = { 0 };
678 const struct frac_entry *frac = frac_table_pixel;
679 unsigned long request;
680 int delta = 100000;
681 u32 mask = BIT(rcg->hid_width) - 1;
682 u32 hid_div, cfg;
683 int i, num_parents = clk_hw_get_num_parents(hw);
684
685 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg);
686 cfg &= CFG_SRC_SEL_MASK;
687 cfg >>= CFG_SRC_SEL_SHIFT;
688
689 for (i = 0; i < num_parents; i++)
690 if (cfg == rcg->parent_map[i].cfg) {
691 f.src = rcg->parent_map[i].src;
692 break;
693 }
694
695 for (; frac->num; frac++) {
696 request = (rate * frac->den) / frac->num;
697
698 if ((parent_rate < (request - delta)) ||
699 (parent_rate > (request + delta)))
700 continue;
701
702 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG,
703 &hid_div);
704 f.pre_div = hid_div;
705 f.pre_div >>= CFG_SRC_DIV_SHIFT;
706 f.pre_div &= mask;
707 f.m = frac->num;
708 f.n = frac->den;
709
710 return clk_rcg2_configure(rcg, &f);
711 }
712 return -EINVAL;
713 }
714
clk_pixel_set_rate_and_parent(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate,u8 index)715 static int clk_pixel_set_rate_and_parent(struct clk_hw *hw, unsigned long rate,
716 unsigned long parent_rate, u8 index)
717 {
718 return clk_pixel_set_rate(hw, rate, parent_rate);
719 }
720
721 const struct clk_ops clk_pixel_ops = {
722 .is_enabled = clk_rcg2_is_enabled,
723 .get_parent = clk_rcg2_get_parent,
724 .set_parent = clk_rcg2_set_parent,
725 .recalc_rate = clk_rcg2_recalc_rate,
726 .set_rate = clk_pixel_set_rate,
727 .set_rate_and_parent = clk_pixel_set_rate_and_parent,
728 .determine_rate = clk_pixel_determine_rate,
729 };
730 EXPORT_SYMBOL_GPL(clk_pixel_ops);
731