Home
last modified time | relevance | path

Searched refs:hw_data (Results 1 – 25 of 55) sorted by relevance

123

/drivers/crypto/qat/qat_c62xvf/
Dadf_c62xvf_hw_data.c64 void adf_init_hw_data_c62xiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62xiov() argument
66 hw_data->dev_class = &c62xiov_class; in adf_init_hw_data_c62xiov()
67 hw_data->num_banks = ADF_C62XIOV_ETR_MAX_BANKS; in adf_init_hw_data_c62xiov()
68 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c62xiov()
69 hw_data->num_accel = ADF_C62XIOV_MAX_ACCELERATORS; in adf_init_hw_data_c62xiov()
70 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62xiov()
71 hw_data->num_engines = ADF_C62XIOV_MAX_ACCELENGINES; in adf_init_hw_data_c62xiov()
72 hw_data->tx_rx_gap = ADF_C62XIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c62xiov()
73 hw_data->tx_rings_mask = ADF_C62XIOV_TX_RINGS_MASK; in adf_init_hw_data_c62xiov()
74 hw_data->alloc_irq = adf_vf_isr_resource_alloc; in adf_init_hw_data_c62xiov()
[all …]
Dadf_drv.c78 struct adf_hw_device_data *hw_data; in adf_probe() local
112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
114 if (!hw_data) { in adf_probe()
118 accel_dev->hw_device = hw_data; in adf_probe()
122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
128 hw_data->dev_class->name, pci_name(pdev)); in adf_probe()
/drivers/crypto/qat/qat_dh895xccvf/
Dadf_dh895xccvf_hw_data.c64 void adf_init_hw_data_dh895xcciov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcciov() argument
66 hw_data->dev_class = &dh895xcciov_class; in adf_init_hw_data_dh895xcciov()
67 hw_data->num_banks = ADF_DH895XCCIOV_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcciov()
68 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_dh895xcciov()
69 hw_data->num_accel = ADF_DH895XCCIOV_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcciov()
70 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcciov()
71 hw_data->num_engines = ADF_DH895XCCIOV_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcciov()
72 hw_data->tx_rx_gap = ADF_DH895XCCIOV_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcciov()
73 hw_data->tx_rings_mask = ADF_DH895XCCIOV_TX_RINGS_MASK; in adf_init_hw_data_dh895xcciov()
74 hw_data->alloc_irq = adf_vf_isr_resource_alloc; in adf_init_hw_data_dh895xcciov()
[all …]
Dadf_drv.c78 struct adf_hw_device_data *hw_data; in adf_probe() local
112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
114 if (!hw_data) { in adf_probe()
118 accel_dev->hw_device = hw_data; in adf_probe()
122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
128 hw_data->dev_class->name, pci_name(pdev)); in adf_probe()
/drivers/crypto/qat/qat_c3xxxvf/
Dadf_c3xxxvf_hw_data.c64 void adf_init_hw_data_c3xxxiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxxiov() argument
66 hw_data->dev_class = &c3xxxiov_class; in adf_init_hw_data_c3xxxiov()
67 hw_data->num_banks = ADF_C3XXXIOV_ETR_MAX_BANKS; in adf_init_hw_data_c3xxxiov()
68 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c3xxxiov()
69 hw_data->num_accel = ADF_C3XXXIOV_MAX_ACCELERATORS; in adf_init_hw_data_c3xxxiov()
70 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxxiov()
71 hw_data->num_engines = ADF_C3XXXIOV_MAX_ACCELENGINES; in adf_init_hw_data_c3xxxiov()
72 hw_data->tx_rx_gap = ADF_C3XXXIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxxiov()
73 hw_data->tx_rings_mask = ADF_C3XXXIOV_TX_RINGS_MASK; in adf_init_hw_data_c3xxxiov()
74 hw_data->alloc_irq = adf_vf_isr_resource_alloc; in adf_init_hw_data_c3xxxiov()
[all …]
Dadf_drv.c78 struct adf_hw_device_data *hw_data; in adf_probe() local
112 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
114 if (!hw_data) { in adf_probe()
118 accel_dev->hw_device = hw_data; in adf_probe()
122 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
123 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
124 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
128 hw_data->dev_class->name, pci_name(pdev)); in adf_probe()
/drivers/crypto/qat/qat_4xxx/
Dadf_4xxx_hw_data.c214 void adf_init_hw_data_4xxx(struct adf_hw_device_data *hw_data) in adf_init_hw_data_4xxx() argument
216 hw_data->dev_class = &adf_4xxx_class; in adf_init_hw_data_4xxx()
217 hw_data->instance_id = adf_4xxx_class.instances++; in adf_init_hw_data_4xxx()
218 hw_data->num_banks = ADF_4XXX_ETR_MAX_BANKS; in adf_init_hw_data_4xxx()
219 hw_data->num_rings_per_bank = ADF_4XXX_NUM_RINGS_PER_BANK; in adf_init_hw_data_4xxx()
220 hw_data->num_accel = ADF_4XXX_MAX_ACCELERATORS; in adf_init_hw_data_4xxx()
221 hw_data->num_engines = ADF_4XXX_MAX_ACCELENGINES; in adf_init_hw_data_4xxx()
222 hw_data->num_logical_accel = 1; in adf_init_hw_data_4xxx()
223 hw_data->tx_rx_gap = ADF_4XXX_RX_RINGS_OFFSET; in adf_init_hw_data_4xxx()
224 hw_data->tx_rings_mask = ADF_4XXX_TX_RINGS_MASK; in adf_init_hw_data_4xxx()
[all …]
Dadf_drv.c145 struct adf_hw_device_data *hw_data; in adf_probe() local
181 hw_data = devm_kzalloc(&pdev->dev, sizeof(*hw_data), GFP_KERNEL); in adf_probe()
182 if (!hw_data) { in adf_probe()
187 accel_dev->hw_device = hw_data; in adf_probe()
191 pci_read_config_dword(pdev, ADF_4XXX_FUSECTL4_OFFSET, &hw_data->fuses); in adf_probe()
194 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
195 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
196 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
198 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
199 (~hw_data->ae_mask & 0x01)) { in adf_probe()
[all …]
/drivers/crypto/qat/qat_c62x/
Dadf_c62x_hw_data.c173 void adf_init_hw_data_c62x(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62x() argument
175 hw_data->dev_class = &c62x_class; in adf_init_hw_data_c62x()
176 hw_data->instance_id = c62x_class.instances++; in adf_init_hw_data_c62x()
177 hw_data->num_banks = ADF_C62X_ETR_MAX_BANKS; in adf_init_hw_data_c62x()
178 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c62x()
179 hw_data->num_accel = ADF_C62X_MAX_ACCELERATORS; in adf_init_hw_data_c62x()
180 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62x()
181 hw_data->num_engines = ADF_C62X_MAX_ACCELENGINES; in adf_init_hw_data_c62x()
182 hw_data->tx_rx_gap = ADF_C62X_RX_RINGS_OFFSET; in adf_init_hw_data_c62x()
183 hw_data->tx_rings_mask = ADF_C62X_TX_RINGS_MASK; in adf_init_hw_data_c62x()
[all …]
Dadf_drv.c76 struct adf_hw_device_data *hw_data; in adf_probe() local
117 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
119 if (!hw_data) { in adf_probe()
124 accel_dev->hw_device = hw_data; in adf_probe()
128 &hw_data->fuses); in adf_probe()
130 &hw_data->straps); in adf_probe()
133 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
134 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
135 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
137 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
[all …]
/drivers/crypto/qat/qat_c3xxx/
Dadf_c3xxx_hw_data.c171 void adf_init_hw_data_c3xxx(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxx() argument
173 hw_data->dev_class = &c3xxx_class; in adf_init_hw_data_c3xxx()
174 hw_data->instance_id = c3xxx_class.instances++; in adf_init_hw_data_c3xxx()
175 hw_data->num_banks = ADF_C3XXX_ETR_MAX_BANKS; in adf_init_hw_data_c3xxx()
176 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_c3xxx()
177 hw_data->num_accel = ADF_C3XXX_MAX_ACCELERATORS; in adf_init_hw_data_c3xxx()
178 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxx()
179 hw_data->num_engines = ADF_C3XXX_MAX_ACCELENGINES; in adf_init_hw_data_c3xxx()
180 hw_data->tx_rx_gap = ADF_C3XXX_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxx()
181 hw_data->tx_rings_mask = ADF_C3XXX_TX_RINGS_MASK; in adf_init_hw_data_c3xxx()
[all …]
Dadf_drv.c76 struct adf_hw_device_data *hw_data; in adf_probe() local
117 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
119 if (!hw_data) { in adf_probe()
124 accel_dev->hw_device = hw_data; in adf_probe()
128 &hw_data->fuses); in adf_probe()
130 &hw_data->straps); in adf_probe()
133 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
134 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
135 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
137 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
[all …]
/drivers/crypto/qat/qat_dh895xcc/
Dadf_dh895xcc_hw_data.c201 void adf_init_hw_data_dh895xcc(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcc() argument
203 hw_data->dev_class = &dh895xcc_class; in adf_init_hw_data_dh895xcc()
204 hw_data->instance_id = dh895xcc_class.instances++; in adf_init_hw_data_dh895xcc()
205 hw_data->num_banks = ADF_DH895XCC_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcc()
206 hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK; in adf_init_hw_data_dh895xcc()
207 hw_data->num_accel = ADF_DH895XCC_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcc()
208 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcc()
209 hw_data->num_engines = ADF_DH895XCC_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcc()
210 hw_data->tx_rx_gap = ADF_DH895XCC_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcc()
211 hw_data->tx_rings_mask = ADF_DH895XCC_TX_RINGS_MASK; in adf_init_hw_data_dh895xcc()
[all …]
Dadf_drv.c76 struct adf_hw_device_data *hw_data; in adf_probe() local
117 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
119 if (!hw_data) { in adf_probe()
124 accel_dev->hw_device = hw_data; in adf_probe()
128 &hw_data->fuses); in adf_probe()
131 hw_data->accel_mask = hw_data->get_accel_mask(hw_data); in adf_probe()
132 hw_data->ae_mask = hw_data->get_ae_mask(hw_data); in adf_probe()
133 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
135 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
136 ((~hw_data->ae_mask) & 0x01)) { in adf_probe()
[all …]
/drivers/video/backlight/
Dapple_bl.c31 struct hw_data { struct
40 static const struct hw_data *hw_data; argument
81 static const struct hw_data intel_chipset_data = {
126 static const struct hw_data nvidia_chipset_data = {
151 hw_data = &intel_chipset_data; in apple_bl_add()
153 hw_data = &nvidia_chipset_data; in apple_bl_add()
157 if (!hw_data) { in apple_bl_add()
164 intensity = hw_data->backlight_ops.get_brightness(NULL); in apple_bl_add()
167 hw_data->set_brightness(1); in apple_bl_add()
168 if (!hw_data->backlight_ops.get_brightness(NULL)) in apple_bl_add()
[all …]
/drivers/crypto/qat/qat_common/
Dadf_init.c63 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_init() local
66 if (!hw_data) { in adf_dev_init()
82 if (hw_data->init_device && hw_data->init_device(accel_dev)) { in adf_dev_init()
87 if (hw_data->init_admin_comms && hw_data->init_admin_comms(accel_dev)) { in adf_dev_init()
92 if (hw_data->init_arb && hw_data->init_arb(accel_dev)) { in adf_dev_init()
111 if (hw_data->alloc_irq(accel_dev)) { in adf_dev_init()
117 hw_data->enable_ints(accel_dev); in adf_dev_init()
118 hw_data->enable_error_correction(accel_dev); in adf_dev_init()
120 ret = hw_data->enable_pfvf_comms(accel_dev); in adf_dev_init()
156 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_start() local
[all …]
Dadf_hw_arbiter.c20 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_arb() local
22 unsigned long ae_mask = hw_data->ae_mask; in adf_init_arb()
28 hw_data->get_arb_info(&info); in adf_init_arb()
39 thd_2_arb_cfg = hw_data->get_arb_mapping(); in adf_init_arb()
41 for_each_set_bit(i, &ae_mask, hw_data->num_engines) in adf_init_arb()
51 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_update_ring_arb() local
53 u32 tx_ring_mask = hw_data->tx_rings_mask; in adf_update_ring_arb()
54 u32 shift = hw_data->tx_rx_gap; in adf_update_ring_arb()
75 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_exit_arb() local
82 hw_data->get_arb_info(&info); in adf_exit_arb()
[all …]
Dadf_pf2vf_msg.c17 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in __adf_enable_vf2pf_interrupts() local
19 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in __adf_enable_vf2pf_interrupts()
50 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in __adf_disable_vf2pf_interrupts() local
52 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in __adf_disable_vf2pf_interrupts()
90 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in __adf_iov_putmsg() local
92 pci_info->pci_bars[hw_data->get_misc_bar_id(hw_data)].virt_addr; in __adf_iov_putmsg()
101 pf2vf_offset = hw_data->get_pf2vf_offset(0); in __adf_iov_putmsg()
109 pf2vf_offset = hw_data->get_pf2vf_offset(vf_nr); in __adf_iov_putmsg()
189 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_vf2pf_req_hndl() local
190 int bar_id = hw_data->get_misc_bar_id(hw_data); in adf_vf2pf_req_hndl()
[all …]
Dadf_isr.c29 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_msix() local
32 if (hw_data->set_msix_rttable) in adf_enable_msix()
33 hw_data->set_msix_rttable(accel_dev); in adf_enable_msix()
39 msix_num_entries += hw_data->num_banks; in adf_enable_msix()
44 hw_data->num_banks; in adf_enable_msix()
79 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_msix_isr_ae() local
81 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_msix_isr_ae()
141 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_request_irqs() local
149 for (i = 0; i < hw_data->num_banks; i++) { in adf_request_irqs()
165 cpu = ((accel_dev->accel_id * hw_data->num_banks) + in adf_request_irqs()
[all …]
Dadf_vf_isr.c35 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_pf2vf_interrupts() local
37 pci_info->pci_bars[hw_data->get_misc_bar_id(hw_data)].virt_addr; in adf_enable_pf2vf_interrupts()
45 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_disable_pf2vf_interrupts() local
47 pci_info->pci_bars[hw_data->get_misc_bar_id(hw_data)].virt_addr; in adf_disable_pf2vf_interrupts()
96 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_pf2vf_bh_handler() local
98 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_pf2vf_bh_handler()
103 msg = ADF_CSR_RD(pmisc_bar_addr, hw_data->get_pf2vf_offset(0)); in adf_pf2vf_bh_handler()
135 ADF_CSR_WR(pmisc_bar_addr, hw_data->get_pf2vf_offset(0), msg); in adf_pf2vf_bh_handler()
155 ADF_CSR_WR(pmisc_bar_addr, hw_data->get_pf2vf_offset(0), msg); in adf_pf2vf_bh_handler()
186 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_isr() local
[all …]
Dadf_sriov.c44 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_sriov() local
61 if (hw_data->configure_iov_threads) in adf_enable_sriov()
62 hw_data->configure_iov_threads(accel_dev, true); in adf_enable_sriov()
65 if (hw_data->get_pf2vf_offset) in adf_enable_sriov()
87 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_disable_sriov() local
95 if (hw_data->get_pf2vf_offset) in adf_disable_sriov()
101 if (hw_data->get_pf2vf_offset) in adf_disable_sriov()
105 if (hw_data->configure_iov_threads) in adf_disable_sriov()
106 hw_data->configure_iov_threads(accel_dev, false); in adf_disable_sriov()
Dadf_gen2_hw_data.c10 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_cfg_iov_thds() local
16 pmisc_id = hw_data->get_misc_bar_id(hw_data); in adf_gen2_cfg_iov_thds()
157 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_get_accel_cap() local
159 u32 straps = hw_data->straps; in adf_gen2_get_accel_cap()
160 u32 fuses = hw_data->fuses; in adf_gen2_get_accel_cap()
197 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_gen2_set_ssm_wdtimer() local
200 unsigned long accel_mask = hw_data->accel_mask; in adf_gen2_set_ssm_wdtimer()
206 pmisc_id = hw_data->get_misc_bar_id(hw_data); in adf_gen2_set_ssm_wdtimer()
211 for_each_set_bit(i, &accel_mask, hw_data->num_accel) { in adf_gen2_set_ssm_wdtimer()
Dadf_ctl_drv.c345 struct adf_hw_device_data *hw_data; in adf_ctl_ioctl_get_status() local
359 hw_data = accel_dev->hw_device; in adf_ctl_ioctl_get_status()
361 dev_info.num_ae = hw_data->get_num_aes(hw_data); in adf_ctl_ioctl_get_status()
362 dev_info.num_accel = hw_data->get_num_accels(hw_data); in adf_ctl_ioctl_get_status()
363 dev_info.num_logical_accel = hw_data->num_logical_accel; in adf_ctl_ioctl_get_status()
364 dev_info.banks_per_accel = hw_data->num_banks in adf_ctl_ioctl_get_status()
365 / hw_data->num_logical_accel; in adf_ctl_ioctl_get_status()
366 strlcpy(dev_info.name, hw_data->dev_class->name, sizeof(dev_info.name)); in adf_ctl_ioctl_get_status()
367 dev_info.instance_id = hw_data->instance_id; in adf_ctl_ioctl_get_status()
368 dev_info.type = hw_data->dev_class->type; in adf_ctl_ioctl_get_status()
/drivers/clk/uniphier/
Dclk-uniphier-core.c45 struct clk_hw_onecell_data *hw_data; in uniphier_clk_probe() local
67 hw_data = devm_kzalloc(dev, struct_size(hw_data, hws, clk_num), in uniphier_clk_probe()
69 if (!hw_data) in uniphier_clk_probe()
72 hw_data->num = clk_num; in uniphier_clk_probe()
76 hw_data->hws[clk_num] = ERR_PTR(-EINVAL); in uniphier_clk_probe()
87 hw_data->hws[p->idx] = hw; in uniphier_clk_probe()
91 hw_data); in uniphier_clk_probe()
/drivers/clk/keystone/
Dsyscon-clk.c95 struct clk_hw_onecell_data *hw_data; in ti_syscon_gate_clk_probe() local
116 hw_data = devm_kzalloc(dev, struct_size(hw_data, hws, num_clks), in ti_syscon_gate_clk_probe()
118 if (!hw_data) in ti_syscon_gate_clk_probe()
121 hw_data->num = num_clks; in ti_syscon_gate_clk_probe()
124 hw_data->hws[i] = ti_syscon_gate_clk_register(dev, regmap, in ti_syscon_gate_clk_probe()
126 if (IS_ERR(hw_data->hws[i])) in ti_syscon_gate_clk_probe()
132 hw_data); in ti_syscon_gate_clk_probe()

123