• Home
  • Raw
  • Download

Lines Matching refs:div

58 static inline u64 scaled_div_value(struct bcm_clk_div *div, u32 reg_div)  in scaled_div_value()  argument
60 return (u64)reg_div + ((u64)1 << div->u.s.frac_width); in scaled_div_value()
68 u64 scaled_div_build(struct bcm_clk_div *div, u32 div_value, u32 billionths) in scaled_div_build() argument
76 combined <<= div->u.s.frac_width; in scaled_div_build()
83 scaled_div_min(struct bcm_clk_div *div) in scaled_div_min() argument
85 if (divider_is_fixed(div)) in scaled_div_min()
86 return (u64)div->u.fixed; in scaled_div_min()
88 return scaled_div_value(div, 0); in scaled_div_min()
92 u64 scaled_div_max(struct bcm_clk_div *div) in scaled_div_max() argument
96 if (divider_is_fixed(div)) in scaled_div_max()
97 return (u64)div->u.fixed; in scaled_div_max()
99 reg_div = ((u32)1 << div->u.s.width) - 1; in scaled_div_max()
101 return scaled_div_value(div, reg_div); in scaled_div_max()
109 divider(struct bcm_clk_div *div, u64 scaled_div) in divider() argument
111 BUG_ON(scaled_div < scaled_div_min(div)); in divider()
112 BUG_ON(scaled_div > scaled_div_max(div)); in divider()
114 return (u32)(scaled_div - ((u64)1 << div->u.s.frac_width)); in divider()
119 scale_rate(struct bcm_clk_div *div, u32 rate) in scale_rate() argument
121 if (divider_is_fixed(div)) in scale_rate()
124 return (u64)rate << div->u.s.frac_width; in scale_rate()
564 static u64 divider_read_scaled(struct ccu_data *ccu, struct bcm_clk_div *div) in divider_read_scaled() argument
570 if (divider_is_fixed(div)) in divider_read_scaled()
571 return (u64)div->u.fixed; in divider_read_scaled()
574 reg_val = __ccu_read(ccu, div->u.s.offset); in divider_read_scaled()
578 reg_div = bitfield_extract(reg_val, div->u.s.shift, div->u.s.width); in divider_read_scaled()
581 return scaled_div_value(div, reg_div); in divider_read_scaled()
592 struct bcm_clk_div *div, struct bcm_clk_trig *trig) in __div_commit() argument
599 BUG_ON(divider_is_fixed(div)); in __div_commit()
606 if (div->u.s.scaled_div == BAD_SCALED_DIV_VALUE) { in __div_commit()
607 reg_val = __ccu_read(ccu, div->u.s.offset); in __div_commit()
608 reg_div = bitfield_extract(reg_val, div->u.s.shift, in __div_commit()
609 div->u.s.width); in __div_commit()
610 div->u.s.scaled_div = scaled_div_value(div, reg_div); in __div_commit()
616 reg_div = divider(div, div->u.s.scaled_div); in __div_commit()
626 reg_val = __ccu_read(ccu, div->u.s.offset); in __div_commit()
627 reg_val = bitfield_replace(reg_val, div->u.s.shift, div->u.s.width, in __div_commit()
629 __ccu_write(ccu, div->u.s.offset, reg_val); in __div_commit()
648 struct bcm_clk_div *div, struct bcm_clk_trig *trig) in div_init() argument
650 if (!divider_exists(div) || divider_is_fixed(div)) in div_init()
652 return !__div_commit(ccu, gate, div, trig); in div_init()
656 struct bcm_clk_div *div, struct bcm_clk_trig *trig, in divider_write() argument
663 BUG_ON(divider_is_fixed(div)); in divider_write()
665 previous = div->u.s.scaled_div; in divider_write()
669 div->u.s.scaled_div = scaled_div; in divider_write()
674 ret = __div_commit(ccu, gate, div, trig); in divider_write()
680 div->u.s.scaled_div = previous; /* Revert the change */ in divider_write()
694 struct bcm_clk_div *div, struct bcm_clk_div *pre_div, in clk_recalc_rate() argument
701 if (!divider_exists(div)) in clk_recalc_rate()
720 scaled_rate = scale_rate(div, scaled_rate); in clk_recalc_rate()
725 scaled_parent_rate = scale_rate(div, parent_rate); in clk_recalc_rate()
733 scaled_div = divider_read_scaled(ccu, div); in clk_recalc_rate()
748 static long round_rate(struct ccu_data *ccu, struct bcm_clk_div *div, in round_rate() argument
759 BUG_ON(!divider_exists(div)); in round_rate()
779 scaled_rate = scale_rate(div, scaled_rate); in round_rate()
784 scaled_parent_rate = scale_rate(div, parent_rate); in round_rate()
792 if (!divider_is_fixed(div)) { in round_rate()
795 min_scaled_div = scaled_div_min(div); in round_rate()
796 max_scaled_div = scaled_div_max(div); in round_rate()
802 best_scaled_div = divider_read_scaled(ccu, div); in round_rate()
1004 return clk_recalc_rate(bcm_clk->ccu, &data->div, &data->pre_div, in kona_peri_clk_recalc_rate()
1012 struct bcm_clk_div *div = &bcm_clk->u.peri->div; in kona_peri_clk_round_rate() local
1014 if (!divider_exists(div)) in kona_peri_clk_round_rate()
1018 return round_rate(bcm_clk->ccu, div, &bcm_clk->u.peri->pre_div, in kona_peri_clk_round_rate()
1135 struct bcm_clk_div *div = &data->div; in kona_peri_clk_set_rate() local
1145 if (!divider_exists(div)) in kona_peri_clk_set_rate()
1153 if (divider_is_fixed(&data->div)) in kona_peri_clk_set_rate()
1161 (void)round_rate(bcm_clk->ccu, div, &data->pre_div, in kona_peri_clk_set_rate()
1168 ret = divider_write(bcm_clk->ccu, &data->gate, &data->div, in kona_peri_clk_set_rate()
1216 if (!div_init(ccu, &peri->gate, &peri->div, &peri->trig)) { in __peri_clk_init()