1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (c) 2013 NVIDIA CORPORATION. All rights reserved.
4 */
5
6 #include <linux/clk-provider.h>
7 #include <linux/err.h>
8 #include <linux/slab.h>
9
clk_composite_get_parent(struct clk_hw * hw)10 static u8 clk_composite_get_parent(struct clk_hw *hw)
11 {
12 struct clk_composite *composite = to_clk_composite(hw);
13 const struct clk_ops *mux_ops = composite->mux_ops;
14 struct clk_hw *mux_hw = composite->mux_hw;
15
16 __clk_hw_set_clk(mux_hw, hw);
17
18 return mux_ops->get_parent(mux_hw);
19 }
20
clk_composite_set_parent(struct clk_hw * hw,u8 index)21 static int clk_composite_set_parent(struct clk_hw *hw, u8 index)
22 {
23 struct clk_composite *composite = to_clk_composite(hw);
24 const struct clk_ops *mux_ops = composite->mux_ops;
25 struct clk_hw *mux_hw = composite->mux_hw;
26
27 __clk_hw_set_clk(mux_hw, hw);
28
29 return mux_ops->set_parent(mux_hw, index);
30 }
31
clk_composite_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)32 static unsigned long clk_composite_recalc_rate(struct clk_hw *hw,
33 unsigned long parent_rate)
34 {
35 struct clk_composite *composite = to_clk_composite(hw);
36 const struct clk_ops *rate_ops = composite->rate_ops;
37 struct clk_hw *rate_hw = composite->rate_hw;
38
39 __clk_hw_set_clk(rate_hw, hw);
40
41 return rate_ops->recalc_rate(rate_hw, parent_rate);
42 }
43
clk_composite_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)44 static int clk_composite_determine_rate(struct clk_hw *hw,
45 struct clk_rate_request *req)
46 {
47 struct clk_composite *composite = to_clk_composite(hw);
48 const struct clk_ops *rate_ops = composite->rate_ops;
49 const struct clk_ops *mux_ops = composite->mux_ops;
50 struct clk_hw *rate_hw = composite->rate_hw;
51 struct clk_hw *mux_hw = composite->mux_hw;
52 struct clk_hw *parent;
53 unsigned long parent_rate;
54 long tmp_rate, best_rate = 0;
55 unsigned long rate_diff;
56 unsigned long best_rate_diff = ULONG_MAX;
57 long rate;
58 int i;
59
60 if (rate_hw && rate_ops && rate_ops->determine_rate) {
61 __clk_hw_set_clk(rate_hw, hw);
62 return rate_ops->determine_rate(rate_hw, req);
63 } else if (rate_hw && rate_ops && rate_ops->round_rate &&
64 mux_hw && mux_ops && mux_ops->set_parent) {
65 req->best_parent_hw = NULL;
66
67 if (clk_hw_get_flags(hw) & CLK_SET_RATE_NO_REPARENT) {
68 parent = clk_hw_get_parent(mux_hw);
69 req->best_parent_hw = parent;
70 req->best_parent_rate = clk_hw_get_rate(parent);
71
72 rate = rate_ops->round_rate(rate_hw, req->rate,
73 &req->best_parent_rate);
74 if (rate < 0)
75 return rate;
76
77 req->rate = rate;
78 return 0;
79 }
80
81 for (i = 0; i < clk_hw_get_num_parents(mux_hw); i++) {
82 parent = clk_hw_get_parent_by_index(mux_hw, i);
83 if (!parent)
84 continue;
85
86 parent_rate = clk_hw_get_rate(parent);
87
88 tmp_rate = rate_ops->round_rate(rate_hw, req->rate,
89 &parent_rate);
90 if (tmp_rate < 0)
91 continue;
92
93 rate_diff = abs(req->rate - tmp_rate);
94
95 if (!rate_diff || !req->best_parent_hw
96 || best_rate_diff > rate_diff) {
97 req->best_parent_hw = parent;
98 req->best_parent_rate = parent_rate;
99 best_rate_diff = rate_diff;
100 best_rate = tmp_rate;
101 }
102
103 if (!rate_diff)
104 return 0;
105 }
106
107 req->rate = best_rate;
108 return 0;
109 } else if (mux_hw && mux_ops && mux_ops->determine_rate) {
110 __clk_hw_set_clk(mux_hw, hw);
111 return mux_ops->determine_rate(mux_hw, req);
112 } else {
113 pr_err("clk: clk_composite_determine_rate function called, but no mux or rate callback set!\n");
114 return -EINVAL;
115 }
116 }
117
clk_composite_round_rate(struct clk_hw * hw,unsigned long rate,unsigned long * prate)118 static long clk_composite_round_rate(struct clk_hw *hw, unsigned long rate,
119 unsigned long *prate)
120 {
121 struct clk_composite *composite = to_clk_composite(hw);
122 const struct clk_ops *rate_ops = composite->rate_ops;
123 struct clk_hw *rate_hw = composite->rate_hw;
124
125 __clk_hw_set_clk(rate_hw, hw);
126
127 return rate_ops->round_rate(rate_hw, rate, prate);
128 }
129
clk_composite_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)130 static int clk_composite_set_rate(struct clk_hw *hw, unsigned long rate,
131 unsigned long parent_rate)
132 {
133 struct clk_composite *composite = to_clk_composite(hw);
134 const struct clk_ops *rate_ops = composite->rate_ops;
135 struct clk_hw *rate_hw = composite->rate_hw;
136
137 __clk_hw_set_clk(rate_hw, hw);
138
139 return rate_ops->set_rate(rate_hw, rate, parent_rate);
140 }
141
clk_composite_set_rate_and_parent(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate,u8 index)142 static int clk_composite_set_rate_and_parent(struct clk_hw *hw,
143 unsigned long rate,
144 unsigned long parent_rate,
145 u8 index)
146 {
147 struct clk_composite *composite = to_clk_composite(hw);
148 const struct clk_ops *rate_ops = composite->rate_ops;
149 const struct clk_ops *mux_ops = composite->mux_ops;
150 struct clk_hw *rate_hw = composite->rate_hw;
151 struct clk_hw *mux_hw = composite->mux_hw;
152 unsigned long temp_rate;
153
154 __clk_hw_set_clk(rate_hw, hw);
155 __clk_hw_set_clk(mux_hw, hw);
156
157 temp_rate = rate_ops->recalc_rate(rate_hw, parent_rate);
158 if (temp_rate > rate) {
159 rate_ops->set_rate(rate_hw, rate, parent_rate);
160 mux_ops->set_parent(mux_hw, index);
161 } else {
162 mux_ops->set_parent(mux_hw, index);
163 rate_ops->set_rate(rate_hw, rate, parent_rate);
164 }
165
166 return 0;
167 }
168
clk_composite_is_enabled(struct clk_hw * hw)169 static int clk_composite_is_enabled(struct clk_hw *hw)
170 {
171 struct clk_composite *composite = to_clk_composite(hw);
172 const struct clk_ops *gate_ops = composite->gate_ops;
173 struct clk_hw *gate_hw = composite->gate_hw;
174
175 __clk_hw_set_clk(gate_hw, hw);
176
177 return gate_ops->is_enabled(gate_hw);
178 }
179
clk_composite_enable(struct clk_hw * hw)180 static int clk_composite_enable(struct clk_hw *hw)
181 {
182 struct clk_composite *composite = to_clk_composite(hw);
183 const struct clk_ops *gate_ops = composite->gate_ops;
184 struct clk_hw *gate_hw = composite->gate_hw;
185
186 __clk_hw_set_clk(gate_hw, hw);
187
188 return gate_ops->enable(gate_hw);
189 }
190
clk_composite_disable(struct clk_hw * hw)191 static void clk_composite_disable(struct clk_hw *hw)
192 {
193 struct clk_composite *composite = to_clk_composite(hw);
194 const struct clk_ops *gate_ops = composite->gate_ops;
195 struct clk_hw *gate_hw = composite->gate_hw;
196
197 __clk_hw_set_clk(gate_hw, hw);
198
199 gate_ops->disable(gate_hw);
200 }
201
__clk_hw_register_composite(struct device * dev,const char * name,const char * const * parent_names,const struct clk_parent_data * pdata,int num_parents,struct clk_hw * mux_hw,const struct clk_ops * mux_ops,struct clk_hw * rate_hw,const struct clk_ops * rate_ops,struct clk_hw * gate_hw,const struct clk_ops * gate_ops,unsigned long flags)202 static struct clk_hw *__clk_hw_register_composite(struct device *dev,
203 const char *name, const char * const *parent_names,
204 const struct clk_parent_data *pdata, int num_parents,
205 struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
206 struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
207 struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
208 unsigned long flags)
209 {
210 struct clk_hw *hw;
211 struct clk_init_data init = {};
212 struct clk_composite *composite;
213 struct clk_ops *clk_composite_ops;
214 int ret;
215
216 composite = kzalloc(sizeof(*composite), GFP_KERNEL);
217 if (!composite)
218 return ERR_PTR(-ENOMEM);
219
220 init.name = name;
221 init.flags = flags;
222 if (parent_names)
223 init.parent_names = parent_names;
224 else
225 init.parent_data = pdata;
226 init.num_parents = num_parents;
227 hw = &composite->hw;
228
229 clk_composite_ops = &composite->ops;
230
231 if (mux_hw && mux_ops) {
232 if (!mux_ops->get_parent) {
233 hw = ERR_PTR(-EINVAL);
234 goto err;
235 }
236
237 composite->mux_hw = mux_hw;
238 composite->mux_ops = mux_ops;
239 clk_composite_ops->get_parent = clk_composite_get_parent;
240 if (mux_ops->set_parent)
241 clk_composite_ops->set_parent = clk_composite_set_parent;
242 if (mux_ops->determine_rate)
243 clk_composite_ops->determine_rate = clk_composite_determine_rate;
244 }
245
246 if (rate_hw && rate_ops) {
247 if (!rate_ops->recalc_rate) {
248 hw = ERR_PTR(-EINVAL);
249 goto err;
250 }
251 clk_composite_ops->recalc_rate = clk_composite_recalc_rate;
252
253 if (rate_ops->determine_rate)
254 clk_composite_ops->determine_rate =
255 clk_composite_determine_rate;
256 else if (rate_ops->round_rate)
257 clk_composite_ops->round_rate =
258 clk_composite_round_rate;
259
260 /* .set_rate requires either .round_rate or .determine_rate */
261 if (rate_ops->set_rate) {
262 if (rate_ops->determine_rate || rate_ops->round_rate)
263 clk_composite_ops->set_rate =
264 clk_composite_set_rate;
265 else
266 WARN(1, "%s: missing round_rate op is required\n",
267 __func__);
268 }
269
270 composite->rate_hw = rate_hw;
271 composite->rate_ops = rate_ops;
272 }
273
274 if (mux_hw && mux_ops && rate_hw && rate_ops) {
275 if (mux_ops->set_parent && rate_ops->set_rate)
276 clk_composite_ops->set_rate_and_parent =
277 clk_composite_set_rate_and_parent;
278 }
279
280 if (gate_hw && gate_ops) {
281 if (!gate_ops->is_enabled || !gate_ops->enable ||
282 !gate_ops->disable) {
283 hw = ERR_PTR(-EINVAL);
284 goto err;
285 }
286
287 composite->gate_hw = gate_hw;
288 composite->gate_ops = gate_ops;
289 clk_composite_ops->is_enabled = clk_composite_is_enabled;
290 clk_composite_ops->enable = clk_composite_enable;
291 clk_composite_ops->disable = clk_composite_disable;
292 }
293
294 init.ops = clk_composite_ops;
295 composite->hw.init = &init;
296
297 ret = clk_hw_register(dev, hw);
298 if (ret) {
299 hw = ERR_PTR(ret);
300 goto err;
301 }
302
303 if (composite->mux_hw)
304 composite->mux_hw->clk = hw->clk;
305
306 if (composite->rate_hw)
307 composite->rate_hw->clk = hw->clk;
308
309 if (composite->gate_hw)
310 composite->gate_hw->clk = hw->clk;
311
312 return hw;
313
314 err:
315 kfree(composite);
316 return hw;
317 }
318
clk_hw_register_composite(struct device * dev,const char * name,const char * const * parent_names,int num_parents,struct clk_hw * mux_hw,const struct clk_ops * mux_ops,struct clk_hw * rate_hw,const struct clk_ops * rate_ops,struct clk_hw * gate_hw,const struct clk_ops * gate_ops,unsigned long flags)319 struct clk_hw *clk_hw_register_composite(struct device *dev, const char *name,
320 const char * const *parent_names, int num_parents,
321 struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
322 struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
323 struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
324 unsigned long flags)
325 {
326 return __clk_hw_register_composite(dev, name, parent_names, NULL,
327 num_parents, mux_hw, mux_ops,
328 rate_hw, rate_ops, gate_hw,
329 gate_ops, flags);
330 }
331 EXPORT_SYMBOL_GPL(clk_hw_register_composite);
332
clk_hw_register_composite_pdata(struct device * dev,const char * name,const struct clk_parent_data * parent_data,int num_parents,struct clk_hw * mux_hw,const struct clk_ops * mux_ops,struct clk_hw * rate_hw,const struct clk_ops * rate_ops,struct clk_hw * gate_hw,const struct clk_ops * gate_ops,unsigned long flags)333 struct clk_hw *clk_hw_register_composite_pdata(struct device *dev,
334 const char *name,
335 const struct clk_parent_data *parent_data,
336 int num_parents,
337 struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
338 struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
339 struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
340 unsigned long flags)
341 {
342 return __clk_hw_register_composite(dev, name, NULL, parent_data,
343 num_parents, mux_hw, mux_ops,
344 rate_hw, rate_ops, gate_hw,
345 gate_ops, flags);
346 }
347
clk_register_composite(struct device * dev,const char * name,const char * const * parent_names,int num_parents,struct clk_hw * mux_hw,const struct clk_ops * mux_ops,struct clk_hw * rate_hw,const struct clk_ops * rate_ops,struct clk_hw * gate_hw,const struct clk_ops * gate_ops,unsigned long flags)348 struct clk *clk_register_composite(struct device *dev, const char *name,
349 const char * const *parent_names, int num_parents,
350 struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
351 struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
352 struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
353 unsigned long flags)
354 {
355 struct clk_hw *hw;
356
357 hw = clk_hw_register_composite(dev, name, parent_names, num_parents,
358 mux_hw, mux_ops, rate_hw, rate_ops, gate_hw, gate_ops,
359 flags);
360 if (IS_ERR(hw))
361 return ERR_CAST(hw);
362 return hw->clk;
363 }
364 EXPORT_SYMBOL_GPL(clk_register_composite);
365
clk_register_composite_pdata(struct device * dev,const char * name,const struct clk_parent_data * parent_data,int num_parents,struct clk_hw * mux_hw,const struct clk_ops * mux_ops,struct clk_hw * rate_hw,const struct clk_ops * rate_ops,struct clk_hw * gate_hw,const struct clk_ops * gate_ops,unsigned long flags)366 struct clk *clk_register_composite_pdata(struct device *dev, const char *name,
367 const struct clk_parent_data *parent_data,
368 int num_parents,
369 struct clk_hw *mux_hw, const struct clk_ops *mux_ops,
370 struct clk_hw *rate_hw, const struct clk_ops *rate_ops,
371 struct clk_hw *gate_hw, const struct clk_ops *gate_ops,
372 unsigned long flags)
373 {
374 struct clk_hw *hw;
375
376 hw = clk_hw_register_composite_pdata(dev, name, parent_data,
377 num_parents, mux_hw, mux_ops, rate_hw, rate_ops,
378 gate_hw, gate_ops, flags);
379 if (IS_ERR(hw))
380 return ERR_CAST(hw);
381 return hw->clk;
382 }
383
clk_unregister_composite(struct clk * clk)384 void clk_unregister_composite(struct clk *clk)
385 {
386 struct clk_composite *composite;
387 struct clk_hw *hw;
388
389 hw = __clk_get_hw(clk);
390 if (!hw)
391 return;
392
393 composite = to_clk_composite(hw);
394
395 clk_unregister(clk);
396 kfree(composite);
397 }
398
clk_hw_unregister_composite(struct clk_hw * hw)399 void clk_hw_unregister_composite(struct clk_hw *hw)
400 {
401 struct clk_composite *composite;
402
403 composite = to_clk_composite(hw);
404
405 clk_hw_unregister(hw);
406 kfree(composite);
407 }
408 EXPORT_SYMBOL_GPL(clk_hw_unregister_composite);
409