Home
last modified time | relevance | path

Searched refs:rdev (Results 1 – 25 of 329) sorted by relevance

12345678910>>...14

/drivers/gpu/drm/radeon/
Dradeon_asic.h34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36 uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev);
37 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
39 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
40 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
41 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
42 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
43 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
61 int r100_init(struct radeon_device *rdev);
[all …]
Dradeon_pm.c45 static int radeon_debugfs_pm_init(struct radeon_device *rdev);
46 static bool radeon_pm_in_vbl(struct radeon_device *rdev);
47 static bool radeon_pm_debug_check_in_vbl(struct radeon_device *rdev, bool finish);
48 static void radeon_pm_update_profile(struct radeon_device *rdev);
49 static void radeon_pm_set_clocks(struct radeon_device *rdev);
51 int radeon_pm_get_type_index(struct radeon_device *rdev, in radeon_pm_get_type_index() argument
58 for (i = 0; i < rdev->pm.num_power_states; i++) { in radeon_pm_get_type_index()
59 if (rdev->pm.power_state[i].type == ps_type) { in radeon_pm_get_type_index()
66 return rdev->pm.default_power_state_index; in radeon_pm_get_type_index()
69 void radeon_pm_acpi_event_handler(struct radeon_device *rdev) in radeon_pm_acpi_event_handler() argument
[all …]
Dr420.c39 void r420_pm_init_profile(struct radeon_device *rdev) in r420_pm_init_profile() argument
42 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile()
43 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile()
44 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile()
45 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile()
47 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile()
48 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; in r420_pm_init_profile()
49 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile()
50 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile()
52 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile()
[all …]
Dradeon_device.c141 struct radeon_device *rdev = dev->dev_private; in radeon_is_px() local
143 if (rdev->flags & RADEON_IS_PX) in radeon_is_px()
148 static void radeon_device_handle_px_quirks(struct radeon_device *rdev) in radeon_device_handle_px_quirks() argument
154 if (rdev->pdev->vendor == p->chip_vendor && in radeon_device_handle_px_quirks()
155 rdev->pdev->device == p->chip_device && in radeon_device_handle_px_quirks()
156 rdev->pdev->subsystem_vendor == p->subsys_vendor && in radeon_device_handle_px_quirks()
157 rdev->pdev->subsystem_device == p->subsys_device) { in radeon_device_handle_px_quirks()
158 rdev->px_quirk_flags = p->px_quirk_flags; in radeon_device_handle_px_quirks()
164 if (rdev->px_quirk_flags & RADEON_PX_QUIRK_DISABLE_PX) in radeon_device_handle_px_quirks()
165 rdev->flags &= ~RADEON_IS_PX; in radeon_device_handle_px_quirks()
[all …]
Dr520.c36 int r520_mc_wait_for_idle(struct radeon_device *rdev) in r520_mc_wait_for_idle() argument
41 for (i = 0; i < rdev->usec_timeout; i++) { in r520_mc_wait_for_idle()
52 static void r520_gpu_init(struct radeon_device *rdev) in r520_gpu_init() argument
56 rv515_vga_render_disable(rdev); in r520_gpu_init()
78 if (rdev->family == CHIP_RV530) { in r520_gpu_init()
81 r420_pipes_init(rdev); in r520_gpu_init()
88 if (r520_mc_wait_for_idle(rdev)) { in r520_gpu_init()
94 static void r520_vram_get_type(struct radeon_device *rdev) in r520_vram_get_type() argument
98 rdev->mc.vram_width = 128; in r520_vram_get_type()
99 rdev->mc.vram_is_ddr = true; in r520_vram_get_type()
[all …]
Dradeon_irq_kms.c51 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_handler_kms() local
54 ret = radeon_irq_process(rdev); in radeon_driver_irq_handler_kms()
76 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_hotplug_work_func() local
78 struct drm_device *dev = rdev->ddev; in radeon_hotplug_work_func()
84 if (!rdev->mode_info.mode_config_initialized) in radeon_hotplug_work_func()
99 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_dp_work_func() local
101 struct drm_device *dev = rdev->ddev; in radeon_dp_work_func()
121 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_preinstall_kms() local
125 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_driver_irq_preinstall_kms()
128 atomic_set(&rdev->irq.ring_int[i], 0); in radeon_driver_irq_preinstall_kms()
[all …]
Dr600.c100 int r600_debugfs_mc_info_init(struct radeon_device *rdev);
103 int r600_mc_wait_for_idle(struct radeon_device *rdev);
104 static void r600_gpu_init(struct radeon_device *rdev);
105 void r600_fini(struct radeon_device *rdev);
106 void r600_irq_disable(struct radeon_device *rdev);
107 static void r600_pcie_gen2_enable(struct radeon_device *rdev);
108 extern int evergreen_rlc_resume(struct radeon_device *rdev);
109 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
114 u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg) in r600_rcu_rreg() argument
119 spin_lock_irqsave(&rdev->rcu_idx_lock, flags); in r600_rcu_rreg()
[all …]
Drs400.c36 static int rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev);
38 void rs400_gart_adjust_size(struct radeon_device *rdev) in rs400_gart_adjust_size() argument
41 switch (rdev->mc.gtt_size/(1024*1024)) { in rs400_gart_adjust_size()
52 (unsigned)(rdev->mc.gtt_size >> 20)); in rs400_gart_adjust_size()
55 rdev->mc.gtt_size = 32 * 1024 * 1024; in rs400_gart_adjust_size()
60 void rs400_gart_tlb_flush(struct radeon_device *rdev) in rs400_gart_tlb_flush() argument
63 unsigned int timeout = rdev->usec_timeout; in rs400_gart_tlb_flush()
76 int rs400_gart_init(struct radeon_device *rdev) in rs400_gart_init() argument
80 if (rdev->gart.ptr) { in rs400_gart_init()
85 switch(rdev->mc.gtt_size / (1024 * 1024)) { in rs400_gart_init()
[all …]
Dni.c42 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg) in tn_smc_rreg() argument
47 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
50 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
54 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v) in tn_smc_wreg() argument
58 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
61 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
190 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
191 extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
192 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
193 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
[all …]
Drv770.c42 static void rv770_gpu_init(struct radeon_device *rdev);
43 void rv770_fini(struct radeon_device *rdev);
44 static void rv770_pcie_gen2_enable(struct radeon_device *rdev);
45 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
47 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in rv770_set_uvd_clocks() argument
53 if (rdev->family == CHIP_RV740) in rv770_set_uvd_clocks()
54 return evergreen_set_uvd_clocks(rdev, vclk, dclk); in rv770_set_uvd_clocks()
67 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000, in rv770_set_uvd_clocks()
87 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks()
118 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks()
[all …]
Dradeon_gart.c65 int radeon_gart_table_ram_alloc(struct radeon_device *rdev) in radeon_gart_table_ram_alloc() argument
69 ptr = pci_alloc_consistent(rdev->pdev, rdev->gart.table_size, in radeon_gart_table_ram_alloc()
70 &rdev->gart.table_addr); in radeon_gart_table_ram_alloc()
75 if (rdev->family == CHIP_RS400 || rdev->family == CHIP_RS480 || in radeon_gart_table_ram_alloc()
76 rdev->family == CHIP_RS690 || rdev->family == CHIP_RS740) { in radeon_gart_table_ram_alloc()
78 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_alloc()
81 rdev->gart.ptr = ptr; in radeon_gart_table_ram_alloc()
82 memset((void *)rdev->gart.ptr, 0, rdev->gart.table_size); in radeon_gart_table_ram_alloc()
95 void radeon_gart_table_ram_free(struct radeon_device *rdev) in radeon_gart_table_ram_free() argument
97 if (rdev->gart.ptr == NULL) { in radeon_gart_table_ram_free()
[all …]
Dradeon.h241 bool radeon_get_bios(struct radeon_device *rdev);
251 int radeon_dummy_page_init(struct radeon_device *rdev);
252 void radeon_dummy_page_fini(struct radeon_device *rdev);
277 int radeon_pm_init(struct radeon_device *rdev);
278 int radeon_pm_late_init(struct radeon_device *rdev);
279 void radeon_pm_fini(struct radeon_device *rdev);
280 void radeon_pm_compute_clocks(struct radeon_device *rdev);
281 void radeon_pm_suspend(struct radeon_device *rdev);
282 void radeon_pm_resume(struct radeon_device *rdev);
283 void radeon_combios_get_power_modes(struct radeon_device *rdev);
[all …]
Dradeon_fence.c62 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument
64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
65 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write()
83 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
88 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read()
108 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
115 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
129 int radeon_fence_emit(struct radeon_device *rdev, in radeon_fence_emit() argument
133 u64 seq = ++rdev->fence_drv[ring].sync_seq[ring]; in radeon_fence_emit()
[all …]
Drs600.c47 static void rs600_gpu_init(struct radeon_device *rdev);
48 int rs600_mc_wait_for_idle(struct radeon_device *rdev);
56 static bool avivo_is_in_vblank(struct radeon_device *rdev, int crtc) in avivo_is_in_vblank() argument
64 static bool avivo_is_counter_moving(struct radeon_device *rdev, int crtc) in avivo_is_counter_moving() argument
85 void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc) in avivo_wait_for_vblank() argument
89 if (crtc >= rdev->num_crtc) in avivo_wait_for_vblank()
98 while (avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank()
100 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank()
105 while (!avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank()
107 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank()
[all …]
Devergreen.c41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg) in eg_cg_rreg() argument
46 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_rreg()
49 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_rreg()
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_cg_wreg() argument
57 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
60 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg) in eg_pif_phy0_rreg() argument
68 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
71 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_pif_phy0_wreg() argument
[all …]
Drv6xx_dpm.c34 static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev,
44 static struct rv6xx_power_info *rv6xx_get_pi(struct radeon_device *rdev) in rv6xx_get_pi() argument
46 struct rv6xx_power_info *pi = rdev->pm.dpm.priv; in rv6xx_get_pi()
51 static void rv6xx_force_pcie_gen1(struct radeon_device *rdev) in rv6xx_force_pcie_gen1() argument
64 for (i = 0; i < rdev->usec_timeout; i++) { in rv6xx_force_pcie_gen1()
75 static void rv6xx_enable_pcie_gen2_support(struct radeon_device *rdev) in rv6xx_enable_pcie_gen2_support() argument
88 static void rv6xx_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, in rv6xx_enable_bif_dynamic_pcie_gen2() argument
101 static void rv6xx_enable_l0s(struct radeon_device *rdev) in rv6xx_enable_l0s() argument
110 static void rv6xx_enable_l1(struct radeon_device *rdev) in rv6xx_enable_l1() argument
122 static void rv6xx_enable_pll_sleep_in_l1(struct radeon_device *rdev) in rv6xx_enable_pll_sleep_in_l1() argument
[all …]
Dradeon_kms.c58 struct radeon_device *rdev = dev->dev_private; in radeon_driver_unload_kms() local
60 if (rdev == NULL) in radeon_driver_unload_kms()
63 if (rdev->rmmio == NULL) in radeon_driver_unload_kms()
68 radeon_kfd_device_fini(rdev); in radeon_driver_unload_kms()
70 radeon_acpi_fini(rdev); in radeon_driver_unload_kms()
72 radeon_modeset_fini(rdev); in radeon_driver_unload_kms()
73 radeon_device_fini(rdev); in radeon_driver_unload_kms()
76 kfree(rdev); in radeon_driver_unload_kms()
96 struct radeon_device *rdev; in radeon_driver_load_kms() local
99 rdev = kzalloc(sizeof(struct radeon_device), GFP_KERNEL); in radeon_driver_load_kms()
[all …]
Drs690.c35 int rs690_mc_wait_for_idle(struct radeon_device *rdev) in rs690_mc_wait_for_idle() argument
40 for (i = 0; i < rdev->usec_timeout; i++) { in rs690_mc_wait_for_idle()
50 static void rs690_gpu_init(struct radeon_device *rdev) in rs690_gpu_init() argument
53 r420_pipes_init(rdev); in rs690_gpu_init()
54 if (rs690_mc_wait_for_idle(rdev)) { in rs690_gpu_init()
65 void rs690_pm_info(struct radeon_device *rdev) in rs690_pm_info() argument
73 if (atom_parse_data_header(rdev->mode_info.atom_context, index, NULL, in rs690_pm_info()
75 info = (union igp_info *)(rdev->mode_info.atom_context->bios + data_offset); in rs690_pm_info()
81 rdev->pm.igp_sideport_mclk.full = dfixed_const(le32_to_cpu(info->info.ulBootUpMemoryClock)); in rs690_pm_info()
82 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); in rs690_pm_info()
[all …]
Dsi.c114 static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh);
115 static void si_pcie_gen3_enable(struct radeon_device *rdev);
116 static void si_program_aspm(struct radeon_device *rdev);
117 extern void sumo_rlc_fini(struct radeon_device *rdev);
118 extern int sumo_rlc_init(struct radeon_device *rdev);
119 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
120 extern void r600_ih_ring_fini(struct radeon_device *rdev);
121 extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
122 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
123 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
[all …]
Dcik.c119 extern int r600_ih_ring_alloc(struct radeon_device *rdev);
120 extern void r600_ih_ring_fini(struct radeon_device *rdev);
121 extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
122 extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
123 extern bool evergreen_is_display_hung(struct radeon_device *rdev);
124 extern void sumo_rlc_fini(struct radeon_device *rdev);
125 extern int sumo_rlc_init(struct radeon_device *rdev);
126 extern void si_vram_gtt_location(struct radeon_device *rdev, struct radeon_mc *mc);
127 extern void si_rlc_reset(struct radeon_device *rdev);
128 extern void si_init_uvd_internal_cg(struct radeon_device *rdev);
[all …]
Dkv_dpm.c36 static int kv_enable_nb_dpm(struct radeon_device *rdev,
38 static void kv_init_graphics_levels(struct radeon_device *rdev);
39 static int kv_calculate_ds_divider(struct radeon_device *rdev);
40 static int kv_calculate_nbps_level_settings(struct radeon_device *rdev);
41 static int kv_calculate_dpm_settings(struct radeon_device *rdev);
42 static void kv_enable_new_levels(struct radeon_device *rdev);
43 static void kv_program_nbps_index_settings(struct radeon_device *rdev,
45 static int kv_set_enabled_level(struct radeon_device *rdev, u32 level);
46 static int kv_set_enabled_levels(struct radeon_device *rdev);
47 static int kv_force_dpm_highest(struct radeon_device *rdev);
[all …]
Dr300.c55 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg) in rv370_pcie_rreg() argument
60 spin_lock_irqsave(&rdev->pcie_idx_lock, flags); in rv370_pcie_rreg()
61 WREG32(RADEON_PCIE_INDEX, ((reg) & rdev->pcie_reg_mask)); in rv370_pcie_rreg()
63 spin_unlock_irqrestore(&rdev->pcie_idx_lock, flags); in rv370_pcie_rreg()
67 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v) in rv370_pcie_wreg() argument
71 spin_lock_irqsave(&rdev->pcie_idx_lock, flags); in rv370_pcie_wreg()
72 WREG32(RADEON_PCIE_INDEX, ((reg) & rdev->pcie_reg_mask)); in rv370_pcie_wreg()
74 spin_unlock_irqrestore(&rdev->pcie_idx_lock, flags); in rv370_pcie_wreg()
80 static int rv370_debugfs_pcie_gart_info_init(struct radeon_device *rdev);
82 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev) in rv370_pcie_gart_tlb_flush() argument
[all …]
/drivers/regulator/
Dcore.c42 #define rdev_crit(rdev, fmt, ...) \ argument
43 pr_crit("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
44 #define rdev_err(rdev, fmt, ...) \ argument
45 pr_err("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
46 #define rdev_warn(rdev, fmt, ...) \ argument
47 pr_warn("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
48 #define rdev_info(rdev, fmt, ...) \ argument
49 pr_info("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
50 #define rdev_dbg(rdev, fmt, ...) \ argument
51 pr_debug("%s: " fmt, rdev_get_name(rdev), ##__VA_ARGS__)
[all …]
Dhelpers.c31 int regulator_is_enabled_regmap(struct regulator_dev *rdev) in regulator_is_enabled_regmap() argument
36 ret = regmap_read(rdev->regmap, rdev->desc->enable_reg, &val); in regulator_is_enabled_regmap()
40 val &= rdev->desc->enable_mask; in regulator_is_enabled_regmap()
42 if (rdev->desc->enable_is_inverted) { in regulator_is_enabled_regmap()
43 if (rdev->desc->enable_val) in regulator_is_enabled_regmap()
44 return val != rdev->desc->enable_val; in regulator_is_enabled_regmap()
47 if (rdev->desc->enable_val) in regulator_is_enabled_regmap()
48 return val == rdev->desc->enable_val; in regulator_is_enabled_regmap()
63 int regulator_enable_regmap(struct regulator_dev *rdev) in regulator_enable_regmap() argument
67 if (rdev->desc->enable_is_inverted) { in regulator_enable_regmap()
[all …]
/drivers/infiniband/hw/cxgb4/
Dresource.c38 static int c4iw_init_qid_table(struct c4iw_rdev *rdev) in c4iw_init_qid_table() argument
42 if (c4iw_id_table_alloc(&rdev->resource.qid_table, in c4iw_init_qid_table()
43 rdev->lldi.vr->qp.start, in c4iw_init_qid_table()
44 rdev->lldi.vr->qp.size, in c4iw_init_qid_table()
45 rdev->lldi.vr->qp.size, 0)) in c4iw_init_qid_table()
48 for (i = rdev->lldi.vr->qp.start; in c4iw_init_qid_table()
49 i < rdev->lldi.vr->qp.start + rdev->lldi.vr->qp.size; i++) in c4iw_init_qid_table()
50 if (!(i & rdev->qpmask)) in c4iw_init_qid_table()
51 c4iw_id_free(&rdev->resource.qid_table, i); in c4iw_init_qid_table()
56 int c4iw_init_resource(struct c4iw_rdev *rdev, u32 nr_tpt, u32 nr_pdid) in c4iw_init_resource() argument
[all …]

12345678910>>...14