• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (C) 2015 Atmel Corporation,
3  *                     Nicolas Ferre <nicolas.ferre@atmel.com>
4  *
5  * Based on clk-programmable & clk-peripheral drivers by Boris BREZILLON.
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2 of the License, or
10  * (at your option) any later version.
11  *
12  */
13 
14 #include <linux/clk-provider.h>
15 #include <linux/clkdev.h>
16 #include <linux/clk/at91_pmc.h>
17 #include <linux/of.h>
18 #include <linux/mfd/syscon.h>
19 #include <linux/regmap.h>
20 
21 #include "pmc.h"
22 
23 #define PERIPHERAL_MAX		64
24 #define PERIPHERAL_ID_MIN	2
25 
26 #define GENERATED_SOURCE_MAX	6
27 #define GENERATED_MAX_DIV	255
28 
29 #define GCK_ID_SSC0		43
30 #define GCK_ID_SSC1		44
31 #define GCK_ID_I2S0		54
32 #define GCK_ID_I2S1		55
33 #define GCK_ID_CLASSD		59
34 #define GCK_INDEX_DT_AUDIO_PLL	5
35 
36 struct clk_generated {
37 	struct clk_hw hw;
38 	struct regmap *regmap;
39 	struct clk_range range;
40 	spinlock_t *lock;
41 	u32 id;
42 	u32 gckdiv;
43 	u8 parent_id;
44 	bool audio_pll_allowed;
45 };
46 
47 #define to_clk_generated(hw) \
48 	container_of(hw, struct clk_generated, hw)
49 
clk_generated_enable(struct clk_hw * hw)50 static int clk_generated_enable(struct clk_hw *hw)
51 {
52 	struct clk_generated *gck = to_clk_generated(hw);
53 	unsigned long flags;
54 
55 	pr_debug("GCLK: %s, gckdiv = %d, parent id = %d\n",
56 		 __func__, gck->gckdiv, gck->parent_id);
57 
58 	spin_lock_irqsave(gck->lock, flags);
59 	regmap_write(gck->regmap, AT91_PMC_PCR,
60 		     (gck->id & AT91_PMC_PCR_PID_MASK));
61 	regmap_update_bits(gck->regmap, AT91_PMC_PCR,
62 			   AT91_PMC_PCR_GCKDIV_MASK | AT91_PMC_PCR_GCKCSS_MASK |
63 			   AT91_PMC_PCR_CMD | AT91_PMC_PCR_GCKEN,
64 			   AT91_PMC_PCR_GCKCSS(gck->parent_id) |
65 			   AT91_PMC_PCR_CMD |
66 			   AT91_PMC_PCR_GCKDIV(gck->gckdiv) |
67 			   AT91_PMC_PCR_GCKEN);
68 	spin_unlock_irqrestore(gck->lock, flags);
69 	return 0;
70 }
71 
clk_generated_disable(struct clk_hw * hw)72 static void clk_generated_disable(struct clk_hw *hw)
73 {
74 	struct clk_generated *gck = to_clk_generated(hw);
75 	unsigned long flags;
76 
77 	spin_lock_irqsave(gck->lock, flags);
78 	regmap_write(gck->regmap, AT91_PMC_PCR,
79 		     (gck->id & AT91_PMC_PCR_PID_MASK));
80 	regmap_update_bits(gck->regmap, AT91_PMC_PCR,
81 			   AT91_PMC_PCR_CMD | AT91_PMC_PCR_GCKEN,
82 			   AT91_PMC_PCR_CMD);
83 	spin_unlock_irqrestore(gck->lock, flags);
84 }
85 
clk_generated_is_enabled(struct clk_hw * hw)86 static int clk_generated_is_enabled(struct clk_hw *hw)
87 {
88 	struct clk_generated *gck = to_clk_generated(hw);
89 	unsigned long flags;
90 	unsigned int status;
91 
92 	spin_lock_irqsave(gck->lock, flags);
93 	regmap_write(gck->regmap, AT91_PMC_PCR,
94 		     (gck->id & AT91_PMC_PCR_PID_MASK));
95 	regmap_read(gck->regmap, AT91_PMC_PCR, &status);
96 	spin_unlock_irqrestore(gck->lock, flags);
97 
98 	return status & AT91_PMC_PCR_GCKEN ? 1 : 0;
99 }
100 
101 static unsigned long
clk_generated_recalc_rate(struct clk_hw * hw,unsigned long parent_rate)102 clk_generated_recalc_rate(struct clk_hw *hw,
103 			  unsigned long parent_rate)
104 {
105 	struct clk_generated *gck = to_clk_generated(hw);
106 
107 	return DIV_ROUND_CLOSEST(parent_rate, gck->gckdiv + 1);
108 }
109 
clk_generated_best_diff(struct clk_rate_request * req,struct clk_hw * parent,unsigned long parent_rate,u32 div,int * best_diff,long * best_rate)110 static void clk_generated_best_diff(struct clk_rate_request *req,
111 				    struct clk_hw *parent,
112 				    unsigned long parent_rate, u32 div,
113 				    int *best_diff, long *best_rate)
114 {
115 	unsigned long tmp_rate;
116 	int tmp_diff;
117 
118 	if (!div)
119 		tmp_rate = parent_rate;
120 	else
121 		tmp_rate = parent_rate / div;
122 	tmp_diff = abs(req->rate - tmp_rate);
123 
124 	if (*best_diff < 0 || *best_diff > tmp_diff) {
125 		*best_rate = tmp_rate;
126 		*best_diff = tmp_diff;
127 		req->best_parent_rate = parent_rate;
128 		req->best_parent_hw = parent;
129 	}
130 }
131 
clk_generated_determine_rate(struct clk_hw * hw,struct clk_rate_request * req)132 static int clk_generated_determine_rate(struct clk_hw *hw,
133 					struct clk_rate_request *req)
134 {
135 	struct clk_generated *gck = to_clk_generated(hw);
136 	struct clk_hw *parent = NULL;
137 	struct clk_rate_request req_parent = *req;
138 	long best_rate = -EINVAL;
139 	unsigned long min_rate, parent_rate;
140 	int best_diff = -1;
141 	int i;
142 	u32 div;
143 
144 	for (i = 0; i < clk_hw_get_num_parents(hw) - 1; i++) {
145 		parent = clk_hw_get_parent_by_index(hw, i);
146 		if (!parent)
147 			continue;
148 
149 		parent_rate = clk_hw_get_rate(parent);
150 		min_rate = DIV_ROUND_CLOSEST(parent_rate, GENERATED_MAX_DIV + 1);
151 		if (!parent_rate ||
152 		    (gck->range.max && min_rate > gck->range.max))
153 			continue;
154 
155 		div = DIV_ROUND_CLOSEST(parent_rate, req->rate);
156 		if (div > GENERATED_MAX_DIV + 1)
157 			div = GENERATED_MAX_DIV + 1;
158 
159 		clk_generated_best_diff(req, parent, parent_rate, div,
160 					&best_diff, &best_rate);
161 
162 		if (!best_diff)
163 			break;
164 	}
165 
166 	/*
167 	 * The audio_pll rate can be modified, unlike the five others clocks
168 	 * that should never be altered.
169 	 * The audio_pll can technically be used by multiple consumers. However,
170 	 * with the rate locking, the first consumer to enable to clock will be
171 	 * the one definitely setting the rate of the clock.
172 	 * Since audio IPs are most likely to request the same rate, we enforce
173 	 * that the only clks able to modify gck rate are those of audio IPs.
174 	 */
175 
176 	if (!gck->audio_pll_allowed)
177 		goto end;
178 
179 	parent = clk_hw_get_parent_by_index(hw, GCK_INDEX_DT_AUDIO_PLL);
180 	if (!parent)
181 		goto end;
182 
183 	for (div = 1; div < GENERATED_MAX_DIV + 2; div++) {
184 		req_parent.rate = req->rate * div;
185 		__clk_determine_rate(parent, &req_parent);
186 		clk_generated_best_diff(req, parent, req_parent.rate, div,
187 					&best_diff, &best_rate);
188 
189 		if (!best_diff)
190 			break;
191 	}
192 
193 end:
194 	pr_debug("GCLK: %s, best_rate = %ld, parent clk: %s @ %ld\n",
195 		 __func__, best_rate,
196 		 __clk_get_name((req->best_parent_hw)->clk),
197 		 req->best_parent_rate);
198 
199 	if (best_rate < 0)
200 		return best_rate;
201 
202 	req->rate = best_rate;
203 	return 0;
204 }
205 
206 /* No modification of hardware as we have the flag CLK_SET_PARENT_GATE set */
clk_generated_set_parent(struct clk_hw * hw,u8 index)207 static int clk_generated_set_parent(struct clk_hw *hw, u8 index)
208 {
209 	struct clk_generated *gck = to_clk_generated(hw);
210 
211 	if (index >= clk_hw_get_num_parents(hw))
212 		return -EINVAL;
213 
214 	gck->parent_id = index;
215 	return 0;
216 }
217 
clk_generated_get_parent(struct clk_hw * hw)218 static u8 clk_generated_get_parent(struct clk_hw *hw)
219 {
220 	struct clk_generated *gck = to_clk_generated(hw);
221 
222 	return gck->parent_id;
223 }
224 
225 /* No modification of hardware as we have the flag CLK_SET_RATE_GATE set */
clk_generated_set_rate(struct clk_hw * hw,unsigned long rate,unsigned long parent_rate)226 static int clk_generated_set_rate(struct clk_hw *hw,
227 				  unsigned long rate,
228 				  unsigned long parent_rate)
229 {
230 	struct clk_generated *gck = to_clk_generated(hw);
231 	u32 div;
232 
233 	if (!rate)
234 		return -EINVAL;
235 
236 	if (gck->range.max && rate > gck->range.max)
237 		return -EINVAL;
238 
239 	div = DIV_ROUND_CLOSEST(parent_rate, rate);
240 	if (div > GENERATED_MAX_DIV + 1 || !div)
241 		return -EINVAL;
242 
243 	gck->gckdiv = div - 1;
244 	return 0;
245 }
246 
247 static const struct clk_ops generated_ops = {
248 	.enable = clk_generated_enable,
249 	.disable = clk_generated_disable,
250 	.is_enabled = clk_generated_is_enabled,
251 	.recalc_rate = clk_generated_recalc_rate,
252 	.determine_rate = clk_generated_determine_rate,
253 	.get_parent = clk_generated_get_parent,
254 	.set_parent = clk_generated_set_parent,
255 	.set_rate = clk_generated_set_rate,
256 };
257 
258 /**
259  * clk_generated_startup - Initialize a given clock to its default parent and
260  * divisor parameter.
261  *
262  * @gck:	Generated clock to set the startup parameters for.
263  *
264  * Take parameters from the hardware and update local clock configuration
265  * accordingly.
266  */
clk_generated_startup(struct clk_generated * gck)267 static void clk_generated_startup(struct clk_generated *gck)
268 {
269 	u32 tmp;
270 	unsigned long flags;
271 
272 	spin_lock_irqsave(gck->lock, flags);
273 	regmap_write(gck->regmap, AT91_PMC_PCR,
274 		     (gck->id & AT91_PMC_PCR_PID_MASK));
275 	regmap_read(gck->regmap, AT91_PMC_PCR, &tmp);
276 	spin_unlock_irqrestore(gck->lock, flags);
277 
278 	gck->parent_id = (tmp & AT91_PMC_PCR_GCKCSS_MASK)
279 					>> AT91_PMC_PCR_GCKCSS_OFFSET;
280 	gck->gckdiv = (tmp & AT91_PMC_PCR_GCKDIV_MASK)
281 					>> AT91_PMC_PCR_GCKDIV_OFFSET;
282 }
283 
284 static struct clk_hw * __init
at91_clk_register_generated(struct regmap * regmap,spinlock_t * lock,const char * name,const char ** parent_names,u8 num_parents,u8 id,bool pll_audio,const struct clk_range * range)285 at91_clk_register_generated(struct regmap *regmap, spinlock_t *lock,
286 			    const char *name, const char **parent_names,
287 			    u8 num_parents, u8 id, bool pll_audio,
288 			    const struct clk_range *range)
289 {
290 	struct clk_generated *gck;
291 	struct clk_init_data init;
292 	struct clk_hw *hw;
293 	int ret;
294 
295 	gck = kzalloc(sizeof(*gck), GFP_KERNEL);
296 	if (!gck)
297 		return ERR_PTR(-ENOMEM);
298 
299 	init.name = name;
300 	init.ops = &generated_ops;
301 	init.parent_names = parent_names;
302 	init.num_parents = num_parents;
303 	init.flags = CLK_SET_RATE_GATE | CLK_SET_PARENT_GATE |
304 		CLK_SET_RATE_PARENT;
305 
306 	gck->id = id;
307 	gck->hw.init = &init;
308 	gck->regmap = regmap;
309 	gck->lock = lock;
310 	gck->range = *range;
311 	gck->audio_pll_allowed = pll_audio;
312 
313 	clk_generated_startup(gck);
314 	hw = &gck->hw;
315 	ret = clk_hw_register(NULL, &gck->hw);
316 	if (ret) {
317 		kfree(gck);
318 		hw = ERR_PTR(ret);
319 	} else {
320 		pmc_register_id(id);
321 	}
322 
323 	return hw;
324 }
325 
of_sama5d2_clk_generated_setup(struct device_node * np)326 static void __init of_sama5d2_clk_generated_setup(struct device_node *np)
327 {
328 	int num;
329 	u32 id;
330 	const char *name;
331 	struct clk_hw *hw;
332 	unsigned int num_parents;
333 	const char *parent_names[GENERATED_SOURCE_MAX];
334 	struct device_node *gcknp;
335 	struct clk_range range = CLK_RANGE(0, 0);
336 	struct regmap *regmap;
337 
338 	num_parents = of_clk_get_parent_count(np);
339 	if (num_parents == 0 || num_parents > GENERATED_SOURCE_MAX)
340 		return;
341 
342 	of_clk_parent_fill(np, parent_names, num_parents);
343 
344 	num = of_get_child_count(np);
345 	if (!num || num > PERIPHERAL_MAX)
346 		return;
347 
348 	regmap = syscon_node_to_regmap(of_get_parent(np));
349 	if (IS_ERR(regmap))
350 		return;
351 
352 	for_each_child_of_node(np, gcknp) {
353 		bool pll_audio = false;
354 
355 		if (of_property_read_u32(gcknp, "reg", &id))
356 			continue;
357 
358 		if (id < PERIPHERAL_ID_MIN || id >= PERIPHERAL_MAX)
359 			continue;
360 
361 		if (of_property_read_string(np, "clock-output-names", &name))
362 			name = gcknp->name;
363 
364 		of_at91_get_clk_range(gcknp, "atmel,clk-output-range",
365 				      &range);
366 
367 		if (of_device_is_compatible(np, "atmel,sama5d2-clk-generated") &&
368 		    (id == GCK_ID_I2S0 || id == GCK_ID_I2S1 ||
369 		     id == GCK_ID_CLASSD))
370 			pll_audio = true;
371 
372 		hw = at91_clk_register_generated(regmap, &pmc_pcr_lock, name,
373 						  parent_names, num_parents,
374 						  id, pll_audio, &range);
375 		if (IS_ERR(hw))
376 			continue;
377 
378 		of_clk_add_hw_provider(gcknp, of_clk_hw_simple_get, hw);
379 	}
380 }
381 CLK_OF_DECLARE(of_sama5d2_clk_generated_setup, "atmel,sama5d2-clk-generated",
382 	       of_sama5d2_clk_generated_setup);
383