/drivers/gpu/drm/radeon/ |
D | radeon_asic.h | 34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); 35 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 36 uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev); 37 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); 39 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); 40 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 41 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev); 42 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); 43 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); 61 int r100_init(struct radeon_device *rdev); [all …]
|
D | radeon_pm.c | 45 static int radeon_debugfs_pm_init(struct radeon_device *rdev); 46 static bool radeon_pm_in_vbl(struct radeon_device *rdev); 47 static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish); 48 static void radeon_pm_update_profile(struct radeon_device *rdev); 49 static void radeon_pm_set_clocks(struct radeon_device *rdev); 51 int radeon_pm_get_type_index(struct radeon_device *rdev, in radeon_pm_get_type_index() argument 58 for (i = 0; i < rdev->pm.num_power_states; i++) { in radeon_pm_get_type_index() 59 if (rdev->pm.power_state[i].type == ps_type) { in radeon_pm_get_type_index() 66 return rdev->pm.default_power_state_index; in radeon_pm_get_type_index() 69 void radeon_pm_acpi_event_handler(struct radeon_device *rdev) in radeon_pm_acpi_event_handler() argument [all …]
|
D | r420.c | 39 void r420_pm_init_profile(struct radeon_device *rdev) in r420_pm_init_profile() argument 42 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile() 43 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile() 44 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile() 45 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile() 47 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile() 48 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; in r420_pm_init_profile() 49 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile() 50 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile() 52 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile() [all …]
|
D | radeon_device.c | 151 struct radeon_device *rdev = dev->dev_private; in radeon_is_px() local 153 if (rdev->flags & RADEON_IS_PX) in radeon_is_px() 158 static void radeon_device_handle_px_quirks(struct radeon_device *rdev) in radeon_device_handle_px_quirks() argument 164 if (rdev->pdev->vendor == p->chip_vendor && in radeon_device_handle_px_quirks() 165 rdev->pdev->device == p->chip_device && in radeon_device_handle_px_quirks() 166 rdev->pdev->subsystem_vendor == p->subsys_vendor && in radeon_device_handle_px_quirks() 167 rdev->pdev->subsystem_device == p->subsys_device) { in radeon_device_handle_px_quirks() 168 rdev->px_quirk_flags = p->px_quirk_flags; in radeon_device_handle_px_quirks() 174 if (rdev->px_quirk_flags & RADEON_PX_QUIRK_DISABLE_PX) in radeon_device_handle_px_quirks() 175 rdev->flags &= ~RADEON_IS_PX; in radeon_device_handle_px_quirks() [all …]
|
D | r520.c | 36 int r520_mc_wait_for_idle(struct radeon_device *rdev) in r520_mc_wait_for_idle() argument 41 for (i = 0; i < rdev->usec_timeout; i++) { in r520_mc_wait_for_idle() 52 static void r520_gpu_init(struct radeon_device *rdev) in r520_gpu_init() argument 56 rv515_vga_render_disable(rdev); in r520_gpu_init() 78 if (rdev->family == CHIP_RV530) { in r520_gpu_init() 81 r420_pipes_init(rdev); in r520_gpu_init() 88 if (r520_mc_wait_for_idle(rdev)) { in r520_gpu_init() 93 static void r520_vram_get_type(struct radeon_device *rdev) in r520_vram_get_type() argument 97 rdev->mc.vram_width = 128; in r520_vram_get_type() 98 rdev->mc.vram_is_ddr = true; in r520_vram_get_type() [all …]
|
D | radeon_irq_kms.c | 51 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_handler_kms() local 54 ret = radeon_irq_process(rdev); in radeon_driver_irq_handler_kms() 76 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_hotplug_work_func() local 78 struct drm_device *dev = rdev->ddev; in radeon_hotplug_work_func() 84 if (!rdev->mode_info.mode_config_initialized) in radeon_hotplug_work_func() 97 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_dp_work_func() local 99 struct drm_device *dev = rdev->ddev; in radeon_dp_work_func() 117 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_preinstall_kms() local 121 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_driver_irq_preinstall_kms() 124 atomic_set(&rdev->irq.ring_int[i], 0); in radeon_driver_irq_preinstall_kms() [all …]
|
D | r600.c | 100 int r600_debugfs_mc_info_init(struct radeon_device *rdev); 103 int r600_mc_wait_for_idle(struct radeon_device *rdev); 104 static void r600_gpu_init(struct radeon_device *rdev); 105 void r600_fini(struct radeon_device *rdev); 106 void r600_irq_disable(struct radeon_device *rdev); 107 static void r600_pcie_gen2_enable(struct radeon_device *rdev); 108 extern int evergreen_rlc_resume(struct radeon_device *rdev); 109 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev); 114 u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg) in r600_rcu_rreg() argument 119 spin_lock_irqsave(&rdev->rcu_idx_lock, flags); in r600_rcu_rreg() [all …]
|
D | rs400.c | 36 static int rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev); 38 void rs400_gart_adjust_size(struct radeon_device *rdev) in rs400_gart_adjust_size() argument 41 switch (rdev->mc.gtt_size/(1024*1024)) { in rs400_gart_adjust_size() 52 (unsigned)(rdev->mc.gtt_size >> 20)); in rs400_gart_adjust_size() 55 rdev->mc.gtt_size = 32 * 1024 * 1024; in rs400_gart_adjust_size() 60 void rs400_gart_tlb_flush(struct radeon_device *rdev) in rs400_gart_tlb_flush() argument 63 unsigned int timeout = rdev->usec_timeout; in rs400_gart_tlb_flush() 76 int rs400_gart_init(struct radeon_device *rdev) in rs400_gart_init() argument 80 if (rdev->gart.ptr) { in rs400_gart_init() 85 switch(rdev->mc.gtt_size / (1024 * 1024)) { in rs400_gart_init() [all …]
|
D | rv770.c | 42 static void rv770_gpu_init(struct radeon_device *rdev); 43 void rv770_fini(struct radeon_device *rdev); 44 static void rv770_pcie_gen2_enable(struct radeon_device *rdev); 45 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk); 47 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in rv770_set_uvd_clocks() argument 53 if (rdev->family == CHIP_RV740) in rv770_set_uvd_clocks() 54 return evergreen_set_uvd_clocks(rdev, vclk, dclk); in rv770_set_uvd_clocks() 67 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000, in rv770_set_uvd_clocks() 87 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks() 118 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks() [all …]
|
D | ni.c | 42 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg) in tn_smc_rreg() argument 47 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_rreg() 50 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_rreg() 54 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v) in tn_smc_wreg() argument 58 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_wreg() 61 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_wreg() 190 extern bool evergreen_is_display_hung(struct radeon_device *rdev); 191 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev); 192 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save); 193 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save); [all …]
|
D | radeon_gart.c | 68 int radeon_gart_table_ram_alloc(struct radeon_device *rdev) in radeon_gart_table_ram_alloc() argument 72 ptr = pci_alloc_consistent(rdev->pdev, rdev->gart.table_size, in radeon_gart_table_ram_alloc() 73 &rdev->gart.table_addr); in radeon_gart_table_ram_alloc() 78 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480 || in radeon_gart_table_ram_alloc() 79 rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) { in radeon_gart_table_ram_alloc() 81 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_alloc() 84 rdev->gart.ptr = ptr; in radeon_gart_table_ram_alloc() 85 memset((void *)rdev->gart.ptr, 0, rdev->gart.table_size); in radeon_gart_table_ram_alloc() 98 void radeon_gart_table_ram_free(struct radeon_device *rdev) in radeon_gart_table_ram_free() argument 100 if (rdev->gart.ptr == NULL) { in radeon_gart_table_ram_free() [all …]
|
D | radeon.h | 246 bool radeon_get_bios(struct radeon_device *rdev); 256 int radeon_dummy_page_init(struct radeon_device *rdev); 257 void radeon_dummy_page_fini(struct radeon_device *rdev); 282 int radeon_pm_init(struct radeon_device *rdev); 283 int radeon_pm_late_init(struct radeon_device *rdev); 284 void radeon_pm_fini(struct radeon_device *rdev); 285 void radeon_pm_compute_clocks(struct radeon_device *rdev); 286 void radeon_pm_suspend(struct radeon_device *rdev); 287 void radeon_pm_resume(struct radeon_device *rdev); 288 void radeon_combios_get_power_modes(struct radeon_device *rdev); [all …]
|
D | rs600.c | 47 static void rs600_gpu_init(struct radeon_device *rdev); 48 int rs600_mc_wait_for_idle(struct radeon_device *rdev); 56 static bool avivo_is_in_vblank(struct radeon_device *rdev, int crtc) in avivo_is_in_vblank() argument 64 static bool avivo_is_counter_moving(struct radeon_device *rdev, int crtc) in avivo_is_counter_moving() argument 85 void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc) in avivo_wait_for_vblank() argument 89 if (crtc >= rdev->num_crtc) in avivo_wait_for_vblank() 98 while (avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank() 100 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank() 105 while (!avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank() 107 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank() [all …]
|
D | evergreen.c | 45 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg) in eg_cg_rreg() argument 50 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_rreg() 53 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_rreg() 57 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_cg_wreg() argument 61 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_wreg() 64 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_wreg() 67 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg) in eg_pif_phy0_rreg() argument 72 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg() 75 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg() 79 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_pif_phy0_wreg() argument [all …]
|
D | rv6xx_dpm.c | 34 static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev, 44 static struct rv6xx_power_info *rv6xx_get_pi(struct radeon_device *rdev) in rv6xx_get_pi() argument 46 struct rv6xx_power_info *pi = rdev->pm.dpm.priv; in rv6xx_get_pi() 51 static void rv6xx_force_pcie_gen1(struct radeon_device *rdev) in rv6xx_force_pcie_gen1() argument 64 for (i = 0; i < rdev->usec_timeout; i++) { in rv6xx_force_pcie_gen1() 75 static void rv6xx_enable_pcie_gen2_support(struct radeon_device *rdev) in rv6xx_enable_pcie_gen2_support() argument 88 static void rv6xx_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, in rv6xx_enable_bif_dynamic_pcie_gen2() argument 101 static void rv6xx_enable_l0s(struct radeon_device *rdev) in rv6xx_enable_l0s() argument 110 static void rv6xx_enable_l1(struct radeon_device *rdev) in rv6xx_enable_l1() argument 122 static void rv6xx_enable_pll_sleep_in_l1(struct radeon_device *rdev) in rv6xx_enable_pll_sleep_in_l1() argument [all …]
|
D | radeon_kms.c | 58 struct radeon_device *rdev = dev->dev_private; in radeon_driver_unload_kms() local 60 if (rdev == NULL) in radeon_driver_unload_kms() 63 if (rdev->rmmio == NULL) in radeon_driver_unload_kms() 71 radeon_kfd_device_fini(rdev); in radeon_driver_unload_kms() 73 radeon_acpi_fini(rdev); in radeon_driver_unload_kms() 75 radeon_modeset_fini(rdev); in radeon_driver_unload_kms() 76 radeon_device_fini(rdev); in radeon_driver_unload_kms() 79 kfree(rdev); in radeon_driver_unload_kms() 98 struct radeon_device *rdev; in radeon_driver_load_kms() local 126 rdev = kzalloc(sizeof(struct radeon_device), GFP_KERNEL); in radeon_driver_load_kms() [all …]
|
D | radeon_fence.c | 62 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument 64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write() 65 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write() 83 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument 85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read() 88 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read() 108 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument 115 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check() 129 int radeon_fence_emit(struct radeon_device *rdev, in radeon_fence_emit() argument 140 (*fence)->rdev = rdev; in radeon_fence_emit() [all …]
|
D | rs690.c | 35 int rs690_mc_wait_for_idle(struct radeon_device *rdev) in rs690_mc_wait_for_idle() argument 40 for (i = 0; i < rdev->usec_timeout; i++) { in rs690_mc_wait_for_idle() 50 static void rs690_gpu_init(struct radeon_device *rdev) in rs690_gpu_init() argument 53 r420_pipes_init(rdev); in rs690_gpu_init() 54 if (rs690_mc_wait_for_idle(rdev)) { in rs690_gpu_init() 64 void rs690_pm_info(struct radeon_device *rdev) in rs690_pm_info() argument 72 if (atom_parse_data_header(rdev->mode_info.atom_context, index, NULL, in rs690_pm_info() 74 info = (union igp_info *)(rdev->mode_info.atom_context->bios + data_offset); in rs690_pm_info() 80 rdev->pm.igp_sideport_mclk.full = dfixed_const(le32_to_cpu(info->info.ulBootUpMemoryClock)); in rs690_pm_info() 81 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); in rs690_pm_info() [all …]
|
D | cik.c | 124 extern int r600_ih_ring_alloc(struct radeon_device *rdev); 125 extern void r600_ih_ring_fini(struct radeon_device *rdev); 126 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save); 127 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save); 128 extern bool evergreen_is_display_hung(struct radeon_device *rdev); 129 extern void sumo_rlc_fini(struct radeon_device *rdev); 130 extern int sumo_rlc_init(struct radeon_device *rdev); 131 extern void si_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc); 132 extern void si_rlc_reset(struct radeon_device *rdev); 133 extern void si_init_uvd_internal_cg(struct radeon_device *rdev); [all …]
|
D | kv_dpm.c | 36 static int kv_enable_nb_dpm(struct radeon_device *rdev, 38 static void kv_init_graphics_levels(struct radeon_device *rdev); 39 static int kv_calculate_ds_divider(struct radeon_device *rdev); 40 static int kv_calculate_nbps_level_settings(struct radeon_device *rdev); 41 static int kv_calculate_dpm_settings(struct radeon_device *rdev); 42 static void kv_enable_new_levels(struct radeon_device *rdev); 43 static void kv_program_nbps_index_settings(struct radeon_device *rdev, 45 static int kv_set_enabled_level(struct radeon_device *rdev, u32 level); 46 static int kv_set_enabled_levels(struct radeon_device *rdev); 47 static int kv_force_dpm_highest(struct radeon_device *rdev); [all …]
|
D | rs780_dpm.c | 41 static struct igp_power_info *rs780_get_pi(struct radeon_device *rdev) in rs780_get_pi() argument 43 struct igp_power_info *pi = rdev->pm.dpm.priv; in rs780_get_pi() 48 static void rs780_get_pm_mode_parameters(struct radeon_device *rdev) in rs780_get_pm_mode_parameters() argument 50 struct igp_power_info *pi = rs780_get_pi(rdev); in rs780_get_pm_mode_parameters() 51 struct radeon_mode_info *minfo = &rdev->mode_info; in rs780_get_pm_mode_parameters() 60 for (i = 0; i < rdev->num_crtc; i++) { in rs780_get_pm_mode_parameters() 72 static void rs780_voltage_scaling_enable(struct radeon_device *rdev, bool enable); 74 static int rs780_initialize_dpm_power_state(struct radeon_device *rdev, in rs780_initialize_dpm_power_state() argument 81 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, in rs780_initialize_dpm_power_state() 86 r600_engine_clock_entry_set_reference_divider(rdev, 0, dividers.ref_div); in rs780_initialize_dpm_power_state() [all …]
|
/drivers/regulator/ |
D | core.c | 42 #define rdev_crit(rdev, fmt, ...) \ argument 43 pr_crit("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__) 44 #define rdev_err(rdev, fmt, ...) \ argument 45 pr_err("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__) 46 #define rdev_warn(rdev, fmt, ...) \ argument 47 pr_warn("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__) 48 #define rdev_info(rdev, fmt, ...) \ argument 49 pr_info("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__) 50 #define rdev_dbg(rdev, fmt, ...) \ argument 51 pr_debug("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__) [all …]
|
D | helpers.c | 31 int regulator_is_enabled_regmap(struct regulator_dev *rdev) in regulator_is_enabled_regmap() argument 36 ret = regmap_read(rdev->regmap, rdev->desc->enable_reg, &val); in regulator_is_enabled_regmap() 40 val &= rdev->desc->enable_mask; in regulator_is_enabled_regmap() 42 if (rdev->desc->enable_is_inverted) { in regulator_is_enabled_regmap() 43 if (rdev->desc->enable_val) in regulator_is_enabled_regmap() 44 return val != rdev->desc->enable_val; in regulator_is_enabled_regmap() 47 if (rdev->desc->enable_val) in regulator_is_enabled_regmap() 48 return val == rdev->desc->enable_val; in regulator_is_enabled_regmap() 63 int regulator_enable_regmap(struct regulator_dev *rdev) in regulator_enable_regmap() argument 67 if (rdev->desc->enable_is_inverted) { in regulator_enable_regmap() [all …]
|
/drivers/infiniband/hw/bnxt_re/ |
D | main.c | 107 static int bnxt_re_unregister_netdev(struct bnxt_re_dev *rdev, bool lock_wait) in bnxt_re_unregister_netdev() argument 112 if (!rdev) in bnxt_re_unregister_netdev() 115 en_dev = rdev->en_dev; in bnxt_re_unregister_netdev() 120 rc = en_dev->en_ops->bnxt_unregister_device(rdev->en_dev, in bnxt_re_unregister_netdev() 127 static int bnxt_re_register_netdev(struct bnxt_re_dev *rdev) in bnxt_re_register_netdev() argument 132 if (!rdev) in bnxt_re_register_netdev() 135 en_dev = rdev->en_dev; in bnxt_re_register_netdev() 139 &bnxt_re_ulp_ops, rdev); in bnxt_re_register_netdev() 144 static int bnxt_re_free_msix(struct bnxt_re_dev *rdev, bool lock_wait) in bnxt_re_free_msix() argument 149 if (!rdev) in bnxt_re_free_msix() [all …]
|
/drivers/infiniband/hw/cxgb4/ |
D | resource.c | 38 static int c4iw_init_qid_table(struct c4iw_rdev *rdev) in c4iw_init_qid_table() argument 42 if (c4iw_id_table_alloc(&rdev->resource.qid_table, in c4iw_init_qid_table() 43 rdev->lldi.vr->qp.start, in c4iw_init_qid_table() 44 rdev->lldi.vr->qp.size, in c4iw_init_qid_table() 45 rdev->lldi.vr->qp.size, 0)) in c4iw_init_qid_table() 48 for (i = rdev->lldi.vr->qp.start; in c4iw_init_qid_table() 49 i < rdev->lldi.vr->qp.start + rdev->lldi.vr->qp.size; i++) in c4iw_init_qid_table() 50 if (!(i & rdev->qpmask)) in c4iw_init_qid_table() 51 c4iw_id_free(&rdev->resource.qid_table, i); in c4iw_init_qid_table() 56 int c4iw_init_resource(struct c4iw_rdev *rdev, u32 nr_tpt, u32 nr_pdid) in c4iw_init_resource() argument [all …]
|