Home
last modified time | relevance | path

Searched refs:hw (Results 1 – 25 of 326) sorted by relevance

12345678910>>...14

/arch/arm/mach-sa1100/
Dclock.c23 static int clk_gpio27_enable(struct clk_hw *hw) in clk_gpio27_enable() argument
39 static void clk_gpio27_disable(struct clk_hw *hw) in clk_gpio27_disable() argument
71 static unsigned long clk_mpll_recalc_rate(struct clk_hw *hw, in clk_mpll_recalc_rate() argument
95 struct clk_hw *hw; in sa11xx_clk_init() local
98 hw = clk_hw_register_fixed_rate(NULL, "clk32768", NULL, 0, 32768); in sa11xx_clk_init()
99 if (IS_ERR(hw)) in sa11xx_clk_init()
100 return PTR_ERR(hw); in sa11xx_clk_init()
102 clk_hw_register_clkdev(hw, NULL, "sa1100-rtc"); in sa11xx_clk_init()
104 hw = clk_hw_register_fixed_rate(NULL, "clk3686400", NULL, 0, 3686400); in sa11xx_clk_init()
105 if (IS_ERR(hw)) in sa11xx_clk_init()
[all …]
/arch/arm/boot/dts/
Dtegra124-peripherals-opp.dtsi10 opp-supported-hw = <0x0003>;
16 opp-supported-hw = <0x0008>;
22 opp-supported-hw = <0x0010>;
28 opp-supported-hw = <0x0004>;
34 opp-supported-hw = <0x0003>;
40 opp-supported-hw = <0x0008>;
46 opp-supported-hw = <0x0010>;
52 opp-supported-hw = <0x0004>;
58 opp-supported-hw = <0x0003>;
64 opp-supported-hw = <0x0008>;
[all …]
Dtegra30-peripherals-opp.dtsi10 opp-supported-hw = <0x0006>;
16 opp-supported-hw = <0x0001>;
22 opp-supported-hw = <0x0008>;
28 opp-supported-hw = <0x0006>;
34 opp-supported-hw = <0x0001>;
40 opp-supported-hw = <0x0008>;
46 opp-supported-hw = <0x0006>;
52 opp-supported-hw = <0x0001>;
58 opp-supported-hw = <0x0008>;
64 opp-supported-hw = <0x0006>;
[all …]
Dtegra20-cpu-opp.dtsi10 opp-supported-hw = <0x0F 0x0003>;
17 opp-supported-hw = <0x0F 0x0004>;
24 opp-supported-hw = <0x0F 0x0003>;
30 opp-supported-hw = <0x0F 0x0004>;
36 opp-supported-hw = <0x0C 0x0003>;
42 opp-supported-hw = <0x03 0x0006>, <0x04 0x0004>,
49 opp-supported-hw = <0x03 0x0001>;
55 opp-supported-hw = <0x08 0x0003>;
61 opp-supported-hw = <0x04 0x0006>, <0x08 0x0004>;
67 opp-supported-hw = <0x04 0x0001>;
[all …]
Dtegra30-cpu-opp.dtsi10 opp-supported-hw = <0x1F 0x31FE>;
16 opp-supported-hw = <0x1F 0x0C01>;
22 opp-supported-hw = <0x1F 0x0200>;
28 opp-supported-hw = <0x1F 0x31FE>;
34 opp-supported-hw = <0x1F 0x0C01>;
40 opp-supported-hw = <0x1F 0x0200>;
46 opp-supported-hw = <0x1F 0x31FE>;
53 opp-supported-hw = <0x1F 0x0C01>;
60 opp-supported-hw = <0x1F 0x0200>;
67 opp-supported-hw = <0x1F 0x0C00>;
[all …]
Dtegra20-peripherals-opp.dtsi10 opp-supported-hw = <0x000F>;
16 opp-supported-hw = <0x000F>;
22 opp-supported-hw = <0x000F>;
28 opp-supported-hw = <0x000F>;
34 opp-supported-hw = <0x000F>;
40 opp-supported-hw = <0x000F>;
46 opp-supported-hw = <0x000F>;
52 opp-supported-hw = <0x000F>;
58 opp-supported-hw = <0x000F>;
64 opp-supported-hw = <0x000F>;
[all …]
/arch/arm64/boot/dts/qcom/
Dmsm8996pro.dtsi14 * nibble of supported hw, so speed bin 0 becomes 0x10, speed bin 1
25 opp-supported-hw = <0x70>;
30 opp-supported-hw = <0x70>;
35 opp-supported-hw = <0x70>;
40 opp-supported-hw = <0x70>;
45 opp-supported-hw = <0x70>;
50 opp-supported-hw = <0x70>;
55 opp-supported-hw = <0x70>;
60 opp-supported-hw = <0x70>;
65 opp-supported-hw = <0x70>;
[all …]
/arch/powerpc/perf/
Dcore-fsl-emb.c166 if (event->hw.state & PERF_HES_STOPPED) in fsl_emb_pmu_read()
175 prev = local64_read(&event->hw.prev_count); in fsl_emb_pmu_read()
177 val = read_pmc(event->hw.idx); in fsl_emb_pmu_read()
178 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in fsl_emb_pmu_read()
183 local64_sub(delta, &event->hw.period_left); in fsl_emb_pmu_read()
287 if (event->hw.config & FSL_EMB_EVENT_RESTRICTED) in fsl_emb_pmu_add()
304 event->hw.idx = i; in fsl_emb_pmu_add()
309 if (event->hw.sample_period) { in fsl_emb_pmu_add()
310 s64 left = local64_read(&event->hw.period_left); in fsl_emb_pmu_add()
314 local64_set(&event->hw.prev_count, val); in fsl_emb_pmu_add()
[all …]
Dcore-book3s.c865 idx = cpuhw->event[i]->hw.idx; in any_pmc_overflown()
1132 if (event->hw.state & PERF_HES_STOPPED) in power_pmu_read()
1135 if (!event->hw.idx) in power_pmu_read()
1139 val = read_pmc(event->hw.idx); in power_pmu_read()
1140 local64_set(&event->hw.prev_count, val); in power_pmu_read()
1150 prev = local64_read(&event->hw.prev_count); in power_pmu_read()
1152 val = read_pmc(event->hw.idx); in power_pmu_read()
1156 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in power_pmu_read()
1170 prev = local64_read(&event->hw.period_left); in power_pmu_read()
1174 } while (local64_cmpxchg(&event->hw.period_left, prev, val) != prev); in power_pmu_read()
[all …]
/arch/arm/mm/
Dcache-l2x0-pmu.c119 struct hw_perf_event *hw = &event->hw; in l2x0_pmu_event_read() local
123 prev_count = local64_read(&hw->prev_count); in l2x0_pmu_event_read()
124 new_count = l2x0_pmu_counter_read(hw->idx); in l2x0_pmu_event_read()
125 } while (local64_xchg(&hw->prev_count, new_count) != prev_count); in l2x0_pmu_event_read()
135 struct hw_perf_event *hw = &event->hw; in l2x0_pmu_event_configure() local
147 local64_set(&hw->prev_count, 0); in l2x0_pmu_event_configure()
148 l2x0_pmu_counter_write(hw->idx, 0); in l2x0_pmu_event_configure()
188 struct hw_perf_event *hw = &event->hw; in l2x0_pmu_event_start() local
190 if (WARN_ON_ONCE(!(event->hw.state & PERF_HES_STOPPED))) in l2x0_pmu_event_start()
194 WARN_ON_ONCE(!(hw->state & PERF_HES_UPTODATE)); in l2x0_pmu_event_start()
[all …]
/arch/mips/alchemy/common/
Dclock.c118 static unsigned long alchemy_clk_cpu_recalc(struct clk_hw *hw, in alchemy_clk_cpu_recalc() argument
180 struct clk_hw hw; member
184 #define to_auxpll_clk(x) container_of(x, struct alchemy_auxpll_clk, hw)
186 static unsigned long alchemy_clk_aux_recalc(struct clk_hw *hw, in alchemy_clk_aux_recalc() argument
189 struct alchemy_auxpll_clk *a = to_auxpll_clk(hw); in alchemy_clk_aux_recalc()
194 static int alchemy_clk_aux_setr(struct clk_hw *hw, in alchemy_clk_aux_setr() argument
198 struct alchemy_auxpll_clk *a = to_auxpll_clk(hw); in alchemy_clk_aux_setr()
214 static long alchemy_clk_aux_roundr(struct clk_hw *hw, in alchemy_clk_aux_roundr() argument
218 struct alchemy_auxpll_clk *a = to_auxpll_clk(hw); in alchemy_clk_aux_roundr()
260 a->hw.init = &id; in alchemy_clk_setup_aux()
[all …]
/arch/x86/kernel/
Dhw_breakpoint.c234 int arch_check_bp_in_kernelspace(struct arch_hw_breakpoint *hw) in arch_check_bp_in_kernelspace() argument
239 va = hw->address; in arch_check_bp_in_kernelspace()
240 len = arch_bp_generic_len(hw->len); in arch_check_bp_in_kernelspace()
325 struct arch_hw_breakpoint *hw) in arch_build_bp_info() argument
342 hw->address = attr->bp_addr; in arch_build_bp_info()
343 hw->mask = 0; in arch_build_bp_info()
348 hw->type = X86_BREAKPOINT_WRITE; in arch_build_bp_info()
351 hw->type = X86_BREAKPOINT_RW; in arch_build_bp_info()
364 hw->type = X86_BREAKPOINT_EXECUTE; in arch_build_bp_info()
371 hw->len = X86_BREAKPOINT_LEN_X; in arch_build_bp_info()
[all …]
/arch/sh/kernel/
Dhw_breakpoint.c124 int arch_check_bp_in_kernelspace(struct arch_hw_breakpoint *hw) in arch_check_bp_in_kernelspace() argument
129 va = hw->address; in arch_check_bp_in_kernelspace()
130 len = get_hbp_len(hw->len); in arch_check_bp_in_kernelspace()
176 struct arch_hw_breakpoint *hw) in arch_build_bp_info() argument
178 hw->address = attr->bp_addr; in arch_build_bp_info()
183 hw->len = SH_BREAKPOINT_LEN_1; in arch_build_bp_info()
186 hw->len = SH_BREAKPOINT_LEN_2; in arch_build_bp_info()
189 hw->len = SH_BREAKPOINT_LEN_4; in arch_build_bp_info()
192 hw->len = SH_BREAKPOINT_LEN_8; in arch_build_bp_info()
201 hw->type = SH_BREAKPOINT_READ; in arch_build_bp_info()
[all …]
Dperf_event.c103 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init()
202 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop()
205 if (!(event->hw.state & PERF_HES_STOPPED)) { in sh_pmu_stop()
208 event->hw.state |= PERF_HES_STOPPED; in sh_pmu_stop()
211 if ((flags & PERF_EF_UPDATE) && !(event->hw.state & PERF_HES_UPTODATE)) { in sh_pmu_stop()
212 sh_perf_event_update(event, &event->hw, idx); in sh_pmu_stop()
213 event->hw.state |= PERF_HES_UPTODATE; in sh_pmu_stop()
220 struct hw_perf_event *hwc = &event->hw; in sh_pmu_start()
227 WARN_ON_ONCE(!(event->hw.state & PERF_HES_UPTODATE)); in sh_pmu_start()
230 event->hw.state = 0; in sh_pmu_start()
[all …]
/arch/arm/mach-omap2/
Dclkt2xxx_virt_prcm_set.c71 long omap2_round_to_table_rate(struct clk_hw *hw, unsigned long rate, in omap2_round_to_table_rate() argument
95 int omap2_select_table_rate(struct clk_hw *hw, unsigned long rate, in omap2_select_table_rate() argument
229 struct clk_hw_omap *hw = NULL; in omap2xxx_clkt_vps_init() local
236 hw = kzalloc(sizeof(*hw), GFP_KERNEL); in omap2xxx_clkt_vps_init()
237 if (!hw) in omap2xxx_clkt_vps_init()
244 hw->hw.init = &init; in omap2xxx_clkt_vps_init()
246 clk = clk_register(NULL, &hw->hw); in omap2xxx_clkt_vps_init()
249 kfree(hw); in omap2xxx_clkt_vps_init()
Dclock2xxx.h17 int omap2_select_table_rate(struct clk_hw *hw, unsigned long rate,
19 long omap2_round_to_table_rate(struct clk_hw *hw, unsigned long rate,
25 void omap2xxx_clkt_dpllcore_init(struct clk_hw *hw);
Dclkt2xxx_dpllcore.c102 unsigned long omap2_dpllcore_recalc(struct clk_hw *hw, in omap2_dpllcore_recalc() argument
108 int omap2_reprogram_dpllcore(struct clk_hw *hw, unsigned long rate, in omap2_reprogram_dpllcore() argument
111 struct clk_hw_omap *clk = to_clk_hw_omap(hw); in omap2_reprogram_dpllcore()
189 void omap2xxx_clkt_dpllcore_init(struct clk_hw *hw) in omap2xxx_clkt_dpllcore_init() argument
192 dpll_core_ck = to_clk_hw_omap(hw); in omap2xxx_clkt_dpllcore_init()
/arch/arm64/kernel/
Dhw_breakpoint.c162 struct task_struct *tsk = bp->hw.target; in is_compat_bp()
335 int arch_check_bp_in_kernelspace(struct arch_hw_breakpoint *hw) in arch_check_bp_in_kernelspace() argument
340 va = hw->address; in arch_check_bp_in_kernelspace()
341 len = get_hbp_len(hw->ctrl.len); in arch_check_bp_in_kernelspace()
414 struct arch_hw_breakpoint *hw) in arch_build_bp_info() argument
419 hw->ctrl.type = ARM_BREAKPOINT_EXECUTE; in arch_build_bp_info()
422 hw->ctrl.type = ARM_BREAKPOINT_LOAD; in arch_build_bp_info()
425 hw->ctrl.type = ARM_BREAKPOINT_STORE; in arch_build_bp_info()
428 hw->ctrl.type = ARM_BREAKPOINT_LOAD | ARM_BREAKPOINT_STORE; in arch_build_bp_info()
437 hw->ctrl.len = ARM_BREAKPOINT_LEN_1; in arch_build_bp_info()
[all …]
/arch/xtensa/kernel/
Dhw_breakpoint.c36 int arch_check_bp_in_kernelspace(struct arch_hw_breakpoint *hw) in arch_check_bp_in_kernelspace() argument
41 va = hw->address; in arch_check_bp_in_kernelspace()
42 len = hw->len; in arch_check_bp_in_kernelspace()
52 struct arch_hw_breakpoint *hw) in hw_breakpoint_arch_parse() argument
57 hw->type = XTENSA_BREAKPOINT_EXECUTE; in hw_breakpoint_arch_parse()
60 hw->type = XTENSA_BREAKPOINT_LOAD; in hw_breakpoint_arch_parse()
63 hw->type = XTENSA_BREAKPOINT_STORE; in hw_breakpoint_arch_parse()
66 hw->type = XTENSA_BREAKPOINT_LOAD | XTENSA_BREAKPOINT_STORE; in hw_breakpoint_arch_parse()
73 hw->len = attr->bp_len; in hw_breakpoint_arch_parse()
74 if (hw->len < 1 || hw->len > 64 || !is_power_of_2(hw->len)) in hw_breakpoint_arch_parse()
[all …]
Dperf_event.c153 new_raw_count = xtensa_pmu_read_counter(event->hw.idx); in xtensa_perf_event_update()
216 event->hw.config = xtensa_hw_ctl[event->attr.config]; in xtensa_pmu_event_init()
223 event->hw.config = ret; in xtensa_pmu_event_init()
231 event->hw.config = (event->attr.config & in xtensa_pmu_event_init()
251 struct hw_perf_event *hwc = &event->hw; in xtensa_pmu_start()
258 WARN_ON_ONCE(!(event->hw.state & PERF_HES_UPTODATE)); in xtensa_pmu_start()
269 struct hw_perf_event *hwc = &event->hw; in xtensa_pmu_stop()
280 !(event->hw.state & PERF_HES_UPTODATE)) { in xtensa_pmu_stop()
281 xtensa_perf_event_update(event, &event->hw, idx); in xtensa_pmu_stop()
282 event->hw.state |= PERF_HES_UPTODATE; in xtensa_pmu_stop()
[all …]
/arch/arm/kernel/
Dhw_breakpoint.c451 int arch_check_bp_in_kernelspace(struct arch_hw_breakpoint *hw) in arch_check_bp_in_kernelspace() argument
456 va = hw->address; in arch_check_bp_in_kernelspace()
457 len = get_hbp_len(hw->ctrl.len); in arch_check_bp_in_kernelspace()
514 struct arch_hw_breakpoint *hw) in arch_build_bp_info() argument
519 hw->ctrl.type = ARM_BREAKPOINT_EXECUTE; in arch_build_bp_info()
522 hw->ctrl.type = ARM_BREAKPOINT_LOAD; in arch_build_bp_info()
525 hw->ctrl.type = ARM_BREAKPOINT_STORE; in arch_build_bp_info()
528 hw->ctrl.type = ARM_BREAKPOINT_LOAD | ARM_BREAKPOINT_STORE; in arch_build_bp_info()
537 hw->ctrl.len = ARM_BREAKPOINT_LEN_1; in arch_build_bp_info()
540 hw->ctrl.len = ARM_BREAKPOINT_LEN_2; in arch_build_bp_info()
[all …]
Dbios32.c428 static void pcibios_init_hw(struct device *parent, struct hw_pci *hw, in pcibios_init_hw() argument
435 for (nr = busnr = 0; nr < hw->nr_controllers; nr++) { in pcibios_init_hw()
445 sys->swizzle = hw->swizzle; in pcibios_init_hw()
446 sys->map_irq = hw->map_irq; in pcibios_init_hw()
449 if (hw->private_data) in pcibios_init_hw()
450 sys->private_data = hw->private_data[nr]; in pcibios_init_hw()
452 ret = hw->setup(nr, sys); in pcibios_init_hw()
465 if (hw->scan) in pcibios_init_hw()
466 ret = hw->scan(nr, bridge); in pcibios_init_hw()
473 bridge->ops = hw->ops; in pcibios_init_hw()
[all …]
/arch/powerpc/kernel/
Dhw_breakpoint.c220 if (tmp->bp->hw.target == bp->hw.target && in same_task_bps_check()
312 if (bp->hw.target && bp->cpu == -1) { in arch_reserve_bp_slot()
317 } else if (!bp->hw.target && bp->cpu != -1) { in arch_reserve_bp_slot()
340 if (bp->hw.target) in arch_release_bp_slot()
372 int arch_check_bp_in_kernelspace(struct arch_hw_breakpoint *hw) in arch_check_bp_in_kernelspace() argument
374 return is_kernel_addr(hw->address); in arch_check_bp_in_kernelspace()
408 static int hw_breakpoint_validate_len(struct arch_hw_breakpoint *hw) in hw_breakpoint_validate_len() argument
414 start_addr = ALIGN_DOWN(hw->address, HW_BREAKPOINT_SIZE); in hw_breakpoint_validate_len()
415 end_addr = ALIGN(hw->address + hw->len, HW_BREAKPOINT_SIZE); in hw_breakpoint_validate_len()
432 hw->hw_len = hw_len; in hw_breakpoint_validate_len()
[all …]
/arch/x86/events/
Dmsr.c214 event->hw.idx = -1; in msr_event_init()
215 event->hw.event_base = msr[cfg].msr; in msr_event_init()
216 event->hw.config = cfg; in msr_event_init()
225 if (event->hw.event_base) in msr_read_counter()
226 rdmsrl(event->hw.event_base, now); in msr_read_counter()
240 prev = local64_read(&event->hw.prev_count); in msr_event_update()
243 if (local64_cmpxchg(&event->hw.prev_count, prev, now) != prev) in msr_event_update()
247 if (unlikely(event->hw.event_base == MSR_SMI_COUNT)) { in msr_event_update()
250 } else if (unlikely(event->hw.event_base == MSR_IA32_THERM_STATUS)) { in msr_event_update()
263 local64_set(&event->hw.prev_count, now); in msr_event_start()
/arch/alpha/kernel/
Dperf_event.c199 event[0]->hw.idx = idx0; in ev67_check_constraints()
200 event[0]->hw.config_base = config; in ev67_check_constraints()
202 event[1]->hw.idx = idx0 ^ 1; in ev67_check_constraints()
203 event[1]->hw.config_base = config; in ev67_check_constraints()
351 evtype[n] = group->hw.event_base; in collect_events()
359 evtype[n] = pe->hw.event_base; in collect_events()
403 cpuc->current_idx[j] != pe->hw.idx) { in maybe_change_configuration()
404 alpha_perf_event_update(pe, &pe->hw, cpuc->current_idx[j], 0); in maybe_change_configuration()
413 struct hw_perf_event *hwc = &pe->hw; in maybe_change_configuration()
424 cpuc->config = cpuc->event[0]->hw.config_base; in maybe_change_configuration()
[all …]

12345678910>>...14