Home
last modified time | relevance | path

Searched refs:hw_data (Results 1 – 25 of 41) sorted by relevance

12

/drivers/crypto/qat/qat_c62xvf/
Dadf_c62xvf_hw_data.c112 void adf_init_hw_data_c62xiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62xiov() argument
114 hw_data->dev_class = &c62xiov_class; in adf_init_hw_data_c62xiov()
115 hw_data->num_banks = ADF_C62XIOV_ETR_MAX_BANKS; in adf_init_hw_data_c62xiov()
116 hw_data->num_accel = ADF_C62XIOV_MAX_ACCELERATORS; in adf_init_hw_data_c62xiov()
117 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62xiov()
118 hw_data->num_engines = ADF_C62XIOV_MAX_ACCELENGINES; in adf_init_hw_data_c62xiov()
119 hw_data->tx_rx_gap = ADF_C62XIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c62xiov()
120 hw_data->tx_rings_mask = ADF_C62XIOV_TX_RINGS_MASK; in adf_init_hw_data_c62xiov()
121 hw_data->alloc_irq = adf_vf_isr_resource_alloc; in adf_init_hw_data_c62xiov()
122 hw_data->free_irq = adf_vf_isr_resource_free; in adf_init_hw_data_c62xiov()
[all …]
Dadf_drv.c125 struct adf_hw_device_data *hw_data; in adf_probe() local
159 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
161 if (!hw_data) { in adf_probe()
165 accel_dev->hw_device = hw_data; in adf_probe()
169 hw_data->accel_mask = hw_data->get_accel_mask(hw_data->fuses); in adf_probe()
170 hw_data->ae_mask = hw_data->get_ae_mask(hw_data->fuses); in adf_probe()
171 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
175 ADF_DEVICE_NAME_PREFIX, hw_data->dev_class->name, in adf_probe()
/drivers/crypto/qat/qat_dh895xccvf/
Dadf_dh895xccvf_hw_data.c112 void adf_init_hw_data_dh895xcciov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcciov() argument
114 hw_data->dev_class = &dh895xcciov_class; in adf_init_hw_data_dh895xcciov()
115 hw_data->num_banks = ADF_DH895XCCIOV_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcciov()
116 hw_data->num_accel = ADF_DH895XCCIOV_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcciov()
117 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcciov()
118 hw_data->num_engines = ADF_DH895XCCIOV_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcciov()
119 hw_data->tx_rx_gap = ADF_DH895XCCIOV_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcciov()
120 hw_data->tx_rings_mask = ADF_DH895XCCIOV_TX_RINGS_MASK; in adf_init_hw_data_dh895xcciov()
121 hw_data->alloc_irq = adf_vf_isr_resource_alloc; in adf_init_hw_data_dh895xcciov()
122 hw_data->free_irq = adf_vf_isr_resource_free; in adf_init_hw_data_dh895xcciov()
[all …]
Dadf_drv.c125 struct adf_hw_device_data *hw_data; in adf_probe() local
159 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
161 if (!hw_data) { in adf_probe()
165 accel_dev->hw_device = hw_data; in adf_probe()
169 hw_data->accel_mask = hw_data->get_accel_mask(hw_data->fuses); in adf_probe()
170 hw_data->ae_mask = hw_data->get_ae_mask(hw_data->fuses); in adf_probe()
171 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
175 ADF_DEVICE_NAME_PREFIX, hw_data->dev_class->name, in adf_probe()
/drivers/crypto/qat/qat_c3xxxvf/
Dadf_c3xxxvf_hw_data.c112 void adf_init_hw_data_c3xxxiov(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxxiov() argument
114 hw_data->dev_class = &c3xxxiov_class; in adf_init_hw_data_c3xxxiov()
115 hw_data->num_banks = ADF_C3XXXIOV_ETR_MAX_BANKS; in adf_init_hw_data_c3xxxiov()
116 hw_data->num_accel = ADF_C3XXXIOV_MAX_ACCELERATORS; in adf_init_hw_data_c3xxxiov()
117 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxxiov()
118 hw_data->num_engines = ADF_C3XXXIOV_MAX_ACCELENGINES; in adf_init_hw_data_c3xxxiov()
119 hw_data->tx_rx_gap = ADF_C3XXXIOV_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxxiov()
120 hw_data->tx_rings_mask = ADF_C3XXXIOV_TX_RINGS_MASK; in adf_init_hw_data_c3xxxiov()
121 hw_data->alloc_irq = adf_vf_isr_resource_alloc; in adf_init_hw_data_c3xxxiov()
122 hw_data->free_irq = adf_vf_isr_resource_free; in adf_init_hw_data_c3xxxiov()
[all …]
Dadf_drv.c125 struct adf_hw_device_data *hw_data; in adf_probe() local
159 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
161 if (!hw_data) { in adf_probe()
165 accel_dev->hw_device = hw_data; in adf_probe()
169 hw_data->accel_mask = hw_data->get_accel_mask(hw_data->fuses); in adf_probe()
170 hw_data->ae_mask = hw_data->get_ae_mask(hw_data->fuses); in adf_probe()
171 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
175 ADF_DEVICE_NAME_PREFIX, hw_data->dev_class->name, in adf_probe()
/drivers/crypto/qat/qat_c62x/
Dadf_c62x_hw_data.c208 void adf_init_hw_data_c62x(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c62x() argument
210 hw_data->dev_class = &c62x_class; in adf_init_hw_data_c62x()
211 hw_data->instance_id = c62x_class.instances++; in adf_init_hw_data_c62x()
212 hw_data->num_banks = ADF_C62X_ETR_MAX_BANKS; in adf_init_hw_data_c62x()
213 hw_data->num_accel = ADF_C62X_MAX_ACCELERATORS; in adf_init_hw_data_c62x()
214 hw_data->num_logical_accel = 1; in adf_init_hw_data_c62x()
215 hw_data->num_engines = ADF_C62X_MAX_ACCELENGINES; in adf_init_hw_data_c62x()
216 hw_data->tx_rx_gap = ADF_C62X_RX_RINGS_OFFSET; in adf_init_hw_data_c62x()
217 hw_data->tx_rings_mask = ADF_C62X_TX_RINGS_MASK; in adf_init_hw_data_c62x()
218 hw_data->alloc_irq = adf_isr_resource_alloc; in adf_init_hw_data_c62x()
[all …]
Dadf_drv.c123 struct adf_hw_device_data *hw_data; in adf_probe() local
164 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
166 if (!hw_data) { in adf_probe()
171 accel_dev->hw_device = hw_data; in adf_probe()
175 &hw_data->fuses); in adf_probe()
178 hw_data->accel_mask = hw_data->get_accel_mask(hw_data->fuses); in adf_probe()
179 hw_data->ae_mask = hw_data->get_ae_mask(hw_data->fuses); in adf_probe()
180 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
182 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
183 ((~hw_data->ae_mask) & 0x01)) { in adf_probe()
[all …]
/drivers/crypto/qat/qat_c3xxx/
Dadf_c3xxx_hw_data.c198 void adf_init_hw_data_c3xxx(struct adf_hw_device_data *hw_data) in adf_init_hw_data_c3xxx() argument
200 hw_data->dev_class = &c3xxx_class; in adf_init_hw_data_c3xxx()
201 hw_data->instance_id = c3xxx_class.instances++; in adf_init_hw_data_c3xxx()
202 hw_data->num_banks = ADF_C3XXX_ETR_MAX_BANKS; in adf_init_hw_data_c3xxx()
203 hw_data->num_accel = ADF_C3XXX_MAX_ACCELERATORS; in adf_init_hw_data_c3xxx()
204 hw_data->num_logical_accel = 1; in adf_init_hw_data_c3xxx()
205 hw_data->num_engines = ADF_C3XXX_MAX_ACCELENGINES; in adf_init_hw_data_c3xxx()
206 hw_data->tx_rx_gap = ADF_C3XXX_RX_RINGS_OFFSET; in adf_init_hw_data_c3xxx()
207 hw_data->tx_rings_mask = ADF_C3XXX_TX_RINGS_MASK; in adf_init_hw_data_c3xxx()
208 hw_data->alloc_irq = adf_isr_resource_alloc; in adf_init_hw_data_c3xxx()
[all …]
Dadf_drv.c123 struct adf_hw_device_data *hw_data; in adf_probe() local
164 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
166 if (!hw_data) { in adf_probe()
171 accel_dev->hw_device = hw_data; in adf_probe()
175 &hw_data->fuses); in adf_probe()
178 hw_data->accel_mask = hw_data->get_accel_mask(hw_data->fuses); in adf_probe()
179 hw_data->ae_mask = hw_data->get_ae_mask(hw_data->fuses); in adf_probe()
180 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
182 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
183 ((~hw_data->ae_mask) & 0x01)) { in adf_probe()
[all …]
/drivers/crypto/qat/qat_dh895xcc/
Dadf_dh895xcc_hw_data.c221 void adf_init_hw_data_dh895xcc(struct adf_hw_device_data *hw_data) in adf_init_hw_data_dh895xcc() argument
223 hw_data->dev_class = &dh895xcc_class; in adf_init_hw_data_dh895xcc()
224 hw_data->instance_id = dh895xcc_class.instances++; in adf_init_hw_data_dh895xcc()
225 hw_data->num_banks = ADF_DH895XCC_ETR_MAX_BANKS; in adf_init_hw_data_dh895xcc()
226 hw_data->num_accel = ADF_DH895XCC_MAX_ACCELERATORS; in adf_init_hw_data_dh895xcc()
227 hw_data->num_logical_accel = 1; in adf_init_hw_data_dh895xcc()
228 hw_data->num_engines = ADF_DH895XCC_MAX_ACCELENGINES; in adf_init_hw_data_dh895xcc()
229 hw_data->tx_rx_gap = ADF_DH895XCC_RX_RINGS_OFFSET; in adf_init_hw_data_dh895xcc()
230 hw_data->tx_rings_mask = ADF_DH895XCC_TX_RINGS_MASK; in adf_init_hw_data_dh895xcc()
231 hw_data->alloc_irq = adf_isr_resource_alloc; in adf_init_hw_data_dh895xcc()
[all …]
Dadf_drv.c123 struct adf_hw_device_data *hw_data; in adf_probe() local
164 hw_data = kzalloc_node(sizeof(*hw_data), GFP_KERNEL, in adf_probe()
166 if (!hw_data) { in adf_probe()
171 accel_dev->hw_device = hw_data; in adf_probe()
175 &hw_data->fuses); in adf_probe()
178 hw_data->accel_mask = hw_data->get_accel_mask(hw_data->fuses); in adf_probe()
179 hw_data->ae_mask = hw_data->get_ae_mask(hw_data->fuses); in adf_probe()
180 accel_pci_dev->sku = hw_data->get_sku(hw_data); in adf_probe()
182 if (!hw_data->accel_mask || !hw_data->ae_mask || in adf_probe()
183 ((~hw_data->ae_mask) & 0x01)) { in adf_probe()
[all …]
/drivers/video/backlight/
Dapple_bl.c34 struct hw_data { struct
43 static const struct hw_data *hw_data; argument
84 static const struct hw_data intel_chipset_data = {
129 static const struct hw_data nvidia_chipset_data = {
154 hw_data = &intel_chipset_data; in apple_bl_add()
156 hw_data = &nvidia_chipset_data; in apple_bl_add()
160 if (!hw_data) { in apple_bl_add()
167 intensity = hw_data->backlight_ops.get_brightness(NULL); in apple_bl_add()
170 hw_data->set_brightness(1); in apple_bl_add()
171 if (!hw_data->backlight_ops.get_brightness(NULL)) in apple_bl_add()
[all …]
/drivers/crypto/qat/qat_common/
Dadf_pf2vf_msg.c62 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_pf2vf_interrupts() local
64 pci_info->pci_bars[hw_data->get_misc_bar_id(hw_data)].virt_addr; in adf_enable_pf2vf_interrupts()
66 ADF_CSR_WR(pmisc_bar_addr, hw_data->get_vintmsk_offset(0), 0x0); in adf_enable_pf2vf_interrupts()
72 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_disable_pf2vf_interrupts() local
74 pci_info->pci_bars[hw_data->get_misc_bar_id(hw_data)].virt_addr; in adf_disable_pf2vf_interrupts()
76 ADF_CSR_WR(pmisc_bar_addr, hw_data->get_vintmsk_offset(0), 0x2); in adf_disable_pf2vf_interrupts()
82 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_vf2pf_interrupts() local
84 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_enable_vf2pf_interrupts()
105 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_disable_vf2pf_interrupts() local
107 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_disable_vf2pf_interrupts()
[all …]
Dadf_init.c107 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_init() local
109 if (!hw_data) { in adf_dev_init()
125 if (hw_data->init_admin_comms && hw_data->init_admin_comms(accel_dev)) { in adf_dev_init()
130 if (hw_data->init_arb && hw_data->init_arb(accel_dev)) { in adf_dev_init()
135 hw_data->enable_ints(accel_dev); in adf_dev_init()
151 if (hw_data->alloc_irq(accel_dev)) { in adf_dev_init()
173 hw_data->enable_error_correction(accel_dev); in adf_dev_init()
174 hw_data->enable_vf2pf_comms(accel_dev); in adf_dev_init()
192 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_start() local
204 if (hw_data->send_admin_init(accel_dev)) { in adf_dev_start()
[all …]
Dadf_isr.c65 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_msix() local
72 msix_num_entries += hw_data->num_banks; in adf_enable_msix()
77 hw_data->num_banks; in adf_enable_msix()
110 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_msix_isr_ae() local
112 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_msix_isr_ae()
166 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_request_irqs() local
174 for (i = 0; i < hw_data->num_banks; i++) { in adf_request_irqs()
190 cpu = ((accel_dev->accel_id * hw_data->num_banks) + in adf_request_irqs()
214 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_free_irqs() local
220 for (i = 0; i < hw_data->num_banks; i++) { in adf_free_irqs()
[all …]
Dadf_hw_arbiter.c81 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_arb() local
93 for (i = 0; i < hw_data->num_engines; i++) in adf_init_arb()
97 hw_data->get_arb_mapping(accel_dev, &thd_2_arb_cfg); in adf_init_arb()
102 for (i = 0; i < hw_data->num_engines; i++) in adf_init_arb()
118 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_exit_arb() local
132 for (i = 0; i < hw_data->num_engines; i++) in adf_exit_arb()
136 for (i = 0; i < hw_data->num_engines; i++) in adf_exit_arb()
Dadf_vf_isr.c118 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_pf2vf_bh_handler() local
120 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_pf2vf_bh_handler()
125 msg = ADF_CSR_RD(pmisc_bar_addr, hw_data->get_pf2vf_offset(0)); in adf_pf2vf_bh_handler()
152 ADF_CSR_WR(pmisc_bar_addr, hw_data->get_pf2vf_offset(0), msg); in adf_pf2vf_bh_handler()
172 ADF_CSR_WR(pmisc_bar_addr, hw_data->get_pf2vf_offset(0), msg); in adf_pf2vf_bh_handler()
202 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_isr() local
204 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_isr()
Dadf_ctl_drv.c389 struct adf_hw_device_data *hw_data; in adf_ctl_ioctl_get_status() local
403 hw_data = accel_dev->hw_device; in adf_ctl_ioctl_get_status()
405 dev_info.num_ae = hw_data->get_num_aes(hw_data); in adf_ctl_ioctl_get_status()
406 dev_info.num_accel = hw_data->get_num_accels(hw_data); in adf_ctl_ioctl_get_status()
407 dev_info.num_logical_accel = hw_data->num_logical_accel; in adf_ctl_ioctl_get_status()
408 dev_info.banks_per_accel = hw_data->num_banks in adf_ctl_ioctl_get_status()
409 / hw_data->num_logical_accel; in adf_ctl_ioctl_get_status()
410 strlcpy(dev_info.name, hw_data->dev_class->name, sizeof(dev_info.name)); in adf_ctl_ioctl_get_status()
411 dev_info.instance_id = hw_data->instance_id; in adf_ctl_ioctl_get_status()
412 dev_info.type = hw_data->dev_class->type; in adf_ctl_ioctl_get_status()
Dadf_accel_engine.c120 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_ae_start() local
123 if (!hw_data->fw_name) in adf_ae_start()
127 if (hw_data->ae_mask & (1 << ae)) { in adf_ae_start()
141 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_ae_stop() local
144 if (!hw_data->fw_name) in adf_ae_stop()
148 if (hw_data->ae_mask & (1 << ae)) { in adf_ae_stop()
Dadf_transport.c182 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_ring() local
203 if (hw_data->tx_rings_mask & (1 << ring->ring_number)) in adf_init_ring()
389 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_bank() local
414 if (hw_data->tx_rings_mask & (1 << i)) { in adf_init_bank()
422 if (i < hw_data->tx_rx_gap) { in adf_init_bank()
427 tx_ring = &bank->rings[i - hw_data->tx_rx_gap]; in adf_init_bank()
443 if (hw_data->tx_rings_mask & (1 << i)) in adf_init_bank()
462 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_etr_data() local
483 i = hw_data->get_etr_bar_id(hw_data); in adf_init_etr_data()
522 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in cleanup_bank() local
[all …]
Dadf_sriov.c114 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_sriov() local
116 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_enable_sriov()
173 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_disable_sriov() local
175 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in adf_disable_sriov()
/drivers/clk/uniphier/
Dclk-uniphier-core.c54 struct clk_hw_onecell_data *hw_data; in uniphier_clk_probe() local
76 hw_data = devm_kzalloc(dev, in uniphier_clk_probe()
77 sizeof(*hw_data) + clk_num * sizeof(struct clk_hw *), in uniphier_clk_probe()
79 if (!hw_data) in uniphier_clk_probe()
82 hw_data->num = clk_num; in uniphier_clk_probe()
86 hw_data->hws[clk_num] = ERR_PTR(-EINVAL); in uniphier_clk_probe()
97 hw_data->hws[p->idx] = hw; in uniphier_clk_probe()
101 hw_data); in uniphier_clk_probe()
/drivers/clk/mvebu/
Darmada-37xx-periph.c37 struct clk_hw_onecell_data *hw_data; member
394 driver_data->hw_data = devm_kzalloc(dev, sizeof(*driver_data->hw_data) + in armada_3700_periph_clock_probe()
395 sizeof(*driver_data->hw_data->hws) * num_periph, in armada_3700_periph_clock_probe()
397 if (!driver_data->hw_data) in armada_3700_periph_clock_probe()
399 driver_data->hw_data->num = num_periph; in armada_3700_periph_clock_probe()
404 struct clk_hw **hw = &driver_data->hw_data->hws[i]; in armada_3700_periph_clock_probe()
414 driver_data->hw_data); in armada_3700_periph_clock_probe()
417 clk_hw_unregister(driver_data->hw_data->hws[i]); in armada_3700_periph_clock_probe()
428 struct clk_hw_onecell_data *hw_data = data->hw_data; in armada_3700_periph_clock_remove() local
433 for (i = 0; i < hw_data->num; i++) in armada_3700_periph_clock_remove()
[all …]
/drivers/gpu/drm/amd/powerplay/hwmgr/
Dcz_hwmgr.c844 struct cz_hwmgr *hw_data = (struct cz_hwmgr *)(hwmgr->backend); in cz_nbdpm_pstate_enable_disable() local
846 if (hw_data->is_nb_dpm_enabled) { in cz_nbdpm_pstate_enable_disable()
871 struct cz_hwmgr *hw_data = (struct cz_hwmgr *)(hwmgr->backend); in cz_tf_update_low_mem_pstate() local
875 if (hw_data->sys_info.nb_dpm_enable) { in cz_tf_update_low_mem_pstate()
876 disable_switch = hw_data->cc6_settings.nb_pstate_switch_disable ? true : false; in cz_tf_update_low_mem_pstate()
877 enable_low_mem_state = hw_data->cc6_settings.nb_pstate_switch_disable ? false : true; in cz_tf_update_low_mem_pstate()
927 struct cz_hwmgr *hw_data = (struct cz_hwmgr *)(hwmgr->backend); in cz_tf_power_up_display_clock_sys_pll() local
928 hw_data->disp_clk_bypass_pending = false; in cz_tf_power_up_display_clock_sys_pll()
929 hw_data->disp_clk_bypass = false; in cz_tf_power_up_display_clock_sys_pll()
938 struct cz_hwmgr *hw_data = (struct cz_hwmgr *)(hwmgr->backend); in cz_tf_clear_nb_dpm_flag() local
[all …]

12