Home
last modified time | relevance | path

Searched refs:dev_priv (Results 1 – 25 of 303) sorted by relevance

12345678910>>...13

/drivers/gpu/drm/vmwgfx/
Dvmwgfx_drv.c348 static int vmw_dummy_query_bo_create(struct vmw_private *dev_priv) in vmw_dummy_query_bo_create() argument
365 ret = vmw_bo_init(dev_priv, vbo, PAGE_SIZE, in vmw_dummy_query_bo_create()
390 dev_priv->dummy_query_bo = vbo; in vmw_dummy_query_bo_create()
405 static int vmw_request_device_late(struct vmw_private *dev_priv) in vmw_request_device_late() argument
409 if (dev_priv->has_mob) { in vmw_request_device_late()
410 ret = vmw_otables_setup(dev_priv); in vmw_request_device_late()
418 if (dev_priv->cman) { in vmw_request_device_late()
419 ret = vmw_cmdbuf_set_pool_size(dev_priv->cman, in vmw_request_device_late()
422 struct vmw_cmdbuf_man *man = dev_priv->cman; in vmw_request_device_late()
424 dev_priv->cman = NULL; in vmw_request_device_late()
[all …]
Dvmwgfx_irq.c48 struct vmw_private *dev_priv = vmw_priv(dev); in vmw_thread_fn() local
52 dev_priv->irqthread_pending)) { in vmw_thread_fn()
53 vmw_fences_update(dev_priv->fman); in vmw_thread_fn()
54 wake_up_all(&dev_priv->fence_queue); in vmw_thread_fn()
59 dev_priv->irqthread_pending)) { in vmw_thread_fn()
60 vmw_cmdbuf_irqthread(dev_priv->cman); in vmw_thread_fn()
81 struct vmw_private *dev_priv = vmw_priv(dev); in vmw_irq_handler() local
85 status = inl(dev_priv->io_start + VMWGFX_IRQSTATUS_PORT); in vmw_irq_handler()
86 masked_status = status & READ_ONCE(dev_priv->irq_mask); in vmw_irq_handler()
89 outl(status, dev_priv->io_start + VMWGFX_IRQSTATUS_PORT); in vmw_irq_handler()
[all …]
Dvmwgfx_fifo.c39 bool vmw_fifo_have_3d(struct vmw_private *dev_priv) in vmw_fifo_have_3d() argument
41 u32 *fifo_mem = dev_priv->mmio_virt; in vmw_fifo_have_3d()
43 const struct vmw_fifo_state *fifo = &dev_priv->fifo; in vmw_fifo_have_3d()
45 if (!(dev_priv->capabilities & SVGA_CAP_3D)) in vmw_fifo_have_3d()
48 if (dev_priv->capabilities & SVGA_CAP_GBOBJECTS) { in vmw_fifo_have_3d()
51 if (!dev_priv->has_mob) in vmw_fifo_have_3d()
54 spin_lock(&dev_priv->cap_lock); in vmw_fifo_have_3d()
55 vmw_write(dev_priv, SVGA_REG_DEV_CAP, SVGA3D_DEVCAP_3D); in vmw_fifo_have_3d()
56 result = vmw_read(dev_priv, SVGA_REG_DEV_CAP); in vmw_fifo_have_3d()
57 spin_unlock(&dev_priv->cap_lock); in vmw_fifo_have_3d()
[all …]
/drivers/gpu/drm/via/
Dvia_dma.c67 dev_priv->dma_low += 8; \
75 dev_priv->dma_low += 8; \
78 static void via_cmdbuf_start(drm_via_private_t *dev_priv);
79 static void via_cmdbuf_pause(drm_via_private_t *dev_priv);
80 static void via_cmdbuf_reset(drm_via_private_t *dev_priv);
81 static void via_cmdbuf_rewind(drm_via_private_t *dev_priv);
82 static int via_wait_idle(drm_via_private_t *dev_priv);
83 static void via_pad_cache(drm_via_private_t *dev_priv, int qwords);
89 static uint32_t via_cmdbuf_space(drm_via_private_t *dev_priv) in via_cmdbuf_space() argument
91 uint32_t agp_base = dev_priv->dma_offset + (uint32_t) dev_priv->agpAddr; in via_cmdbuf_space()
[all …]
Dvia_irq.c95 drm_via_private_t *dev_priv = dev->dev_private; in via_get_vblank_counter() local
100 return atomic_read(&dev_priv->vbl_received); in via_get_vblank_counter()
106 drm_via_private_t *dev_priv = (drm_via_private_t *) dev->dev_private; in via_driver_irq_handler() local
110 drm_via_irq_t *cur_irq = dev_priv->via_irqs; in via_driver_irq_handler()
113 status = via_read(dev_priv, VIA_REG_INTERRUPT); in via_driver_irq_handler()
115 atomic_inc(&dev_priv->vbl_received); in via_driver_irq_handler()
116 if (!(atomic_read(&dev_priv->vbl_received) & 0x0F)) { in via_driver_irq_handler()
118 if (dev_priv->last_vblank_valid) { in via_driver_irq_handler()
119 dev_priv->nsec_per_vblank = in via_driver_irq_handler()
121 dev_priv->last_vblank) >> 4; in via_driver_irq_handler()
[all …]
/drivers/gpu/drm/savage/
Dsavage_bci.c47 savage_bci_wait_fifo_shadow(drm_savage_private_t * dev_priv, unsigned int n) in savage_bci_wait_fifo_shadow() argument
49 uint32_t mask = dev_priv->status_used_mask; in savage_bci_wait_fifo_shadow()
50 uint32_t threshold = dev_priv->bci_threshold_hi; in savage_bci_wait_fifo_shadow()
55 if (n > dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - threshold) in savage_bci_wait_fifo_shadow()
62 status = dev_priv->status_ptr[0]; in savage_bci_wait_fifo_shadow()
76 savage_bci_wait_fifo_s3d(drm_savage_private_t * dev_priv, unsigned int n) in savage_bci_wait_fifo_s3d() argument
78 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n; in savage_bci_wait_fifo_s3d()
97 savage_bci_wait_fifo_s4(drm_savage_private_t * dev_priv, unsigned int n) in savage_bci_wait_fifo_s4() argument
99 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n; in savage_bci_wait_fifo_s4()
129 savage_bci_wait_event_shadow(drm_savage_private_t * dev_priv, uint16_t e) in savage_bci_wait_event_shadow() argument
[all …]
/drivers/gpu/drm/i915/
Di915_irq.c264 i915_hotplug_interrupt_update_locked(struct drm_i915_private *dev_priv, in i915_hotplug_interrupt_update_locked() argument
270 lockdep_assert_held(&dev_priv->irq_lock); in i915_hotplug_interrupt_update_locked()
291 void i915_hotplug_interrupt_update(struct drm_i915_private *dev_priv, in i915_hotplug_interrupt_update() argument
295 spin_lock_irq(&dev_priv->irq_lock); in i915_hotplug_interrupt_update()
296 i915_hotplug_interrupt_update_locked(dev_priv, mask, bits); in i915_hotplug_interrupt_update()
297 spin_unlock_irq(&dev_priv->irq_lock); in i915_hotplug_interrupt_update()
306 void ilk_update_display_irq(struct drm_i915_private *dev_priv, in ilk_update_display_irq() argument
312 lockdep_assert_held(&dev_priv->irq_lock); in ilk_update_display_irq()
316 if (WARN_ON(!intel_irqs_enabled(dev_priv))) in ilk_update_display_irq()
319 new_val = dev_priv->irq_mask; in ilk_update_display_irq()
[all …]
Di915_drv.c145 static int i915_get_bridge_dev(struct drm_i915_private *dev_priv) in i915_get_bridge_dev() argument
147 int domain = pci_domain_nr(dev_priv->drm.pdev->bus); in i915_get_bridge_dev()
149 dev_priv->bridge_dev = in i915_get_bridge_dev()
151 if (!dev_priv->bridge_dev) { in i915_get_bridge_dev()
160 intel_alloc_mchbar_resource(struct drm_i915_private *dev_priv) in intel_alloc_mchbar_resource() argument
162 int reg = INTEL_GEN(dev_priv) >= 4 ? MCHBAR_I965 : MCHBAR_I915; in intel_alloc_mchbar_resource()
167 if (INTEL_GEN(dev_priv) >= 4) in intel_alloc_mchbar_resource()
168 pci_read_config_dword(dev_priv->bridge_dev, reg + 4, &temp_hi); in intel_alloc_mchbar_resource()
169 pci_read_config_dword(dev_priv->bridge_dev, reg, &temp_lo); in intel_alloc_mchbar_resource()
180 dev_priv->mch_res.name = "i915 MCHBAR"; in intel_alloc_mchbar_resource()
[all …]
Dintel_pch.c11 intel_pch_type(const struct drm_i915_private *dev_priv, unsigned short id) in intel_pch_type() argument
16 WARN_ON(!IS_GEN(dev_priv, 5)); in intel_pch_type()
20 WARN_ON(!IS_GEN(dev_priv, 6) && !IS_IVYBRIDGE(dev_priv)); in intel_pch_type()
24 WARN_ON(!IS_GEN(dev_priv, 6) && !IS_IVYBRIDGE(dev_priv)); in intel_pch_type()
29 WARN_ON(!IS_HASWELL(dev_priv) && !IS_BROADWELL(dev_priv)); in intel_pch_type()
30 WARN_ON(IS_HSW_ULT(dev_priv) || IS_BDW_ULT(dev_priv)); in intel_pch_type()
34 WARN_ON(!IS_HASWELL(dev_priv) && !IS_BROADWELL(dev_priv)); in intel_pch_type()
35 WARN_ON(!IS_HSW_ULT(dev_priv) && !IS_BDW_ULT(dev_priv)); in intel_pch_type()
39 WARN_ON(!IS_HASWELL(dev_priv) && !IS_BROADWELL(dev_priv)); in intel_pch_type()
40 WARN_ON(IS_HSW_ULT(dev_priv) || IS_BDW_ULT(dev_priv)); in intel_pch_type()
[all …]
Di915_suspend.c36 static void i915_save_display(struct drm_i915_private *dev_priv) in i915_save_display() argument
39 if (INTEL_GEN(dev_priv) <= 4) in i915_save_display()
40 dev_priv->regfile.saveDSPARB = I915_READ(DSPARB); in i915_save_display()
43 if (HAS_FBC(dev_priv) && INTEL_GEN(dev_priv) <= 4 && !IS_G4X(dev_priv)) in i915_save_display()
44 dev_priv->regfile.saveFBC_CONTROL = I915_READ(FBC_CONTROL); in i915_save_display()
47 static void i915_restore_display(struct drm_i915_private *dev_priv) in i915_restore_display() argument
50 if (INTEL_GEN(dev_priv) <= 4) in i915_restore_display()
51 I915_WRITE(DSPARB, dev_priv->regfile.saveDSPARB); in i915_restore_display()
54 intel_fbc_global_disable(dev_priv); in i915_restore_display()
57 if (HAS_FBC(dev_priv) && INTEL_GEN(dev_priv) <= 4 && !IS_G4X(dev_priv)) in i915_restore_display()
[all …]
Di915_drv.h187 struct drm_i915_private *dev_priv; member
257 void (*get_cdclk)(struct drm_i915_private *dev_priv,
259 void (*set_cdclk)(struct drm_i915_private *dev_priv,
262 int (*get_fifo_size)(struct drm_i915_private *dev_priv,
296 void (*init_clock_gating)(struct drm_i915_private *dev_priv);
297 void (*hpd_irq_setup)(struct drm_i915_private *dev_priv);
514 struct drm_i915_private *dev_priv; member
1079 struct drm_i915_private *dev_priv; member
1219 bool (*is_valid_b_counter_reg)(struct drm_i915_private *dev_priv,
1226 bool (*is_valid_mux_reg)(struct drm_i915_private *dev_priv, u32 addr);
[all …]
Dintel_pm.c68 static void gen9_init_clock_gating(struct drm_i915_private *dev_priv) in gen9_init_clock_gating() argument
70 if (HAS_LLC(dev_priv)) { in gen9_init_clock_gating()
101 if (IS_SKYLAKE(dev_priv)) { in gen9_init_clock_gating()
108 static void bxt_init_clock_gating(struct drm_i915_private *dev_priv) in bxt_init_clock_gating() argument
110 gen9_init_clock_gating(dev_priv); in bxt_init_clock_gating()
139 static void glk_init_clock_gating(struct drm_i915_private *dev_priv) in glk_init_clock_gating() argument
141 gen9_init_clock_gating(dev_priv); in glk_init_clock_gating()
152 if (IS_GLK_REVID(dev_priv, 0, GLK_REVID_A1)) { in glk_init_clock_gating()
162 static void i915_pineview_get_mem_freq(struct drm_i915_private *dev_priv) in i915_pineview_get_mem_freq() argument
170 dev_priv->fsb_freq = 533; /* 133*4 */ in i915_pineview_get_mem_freq()
[all …]
/drivers/gpu/drm/i915/display/
Dintel_cdclk.c56 static void fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv, in fixed_133mhz_get_cdclk() argument
62 static void fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv, in fixed_200mhz_get_cdclk() argument
68 static void fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv, in fixed_266mhz_get_cdclk() argument
74 static void fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv, in fixed_333mhz_get_cdclk() argument
80 static void fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv, in fixed_400mhz_get_cdclk() argument
86 static void fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv, in fixed_450mhz_get_cdclk() argument
92 static void i85x_get_cdclk(struct drm_i915_private *dev_priv, in i85x_get_cdclk() argument
95 struct pci_dev *pdev = dev_priv->drm.pdev; in i85x_get_cdclk()
134 static void i915gm_get_cdclk(struct drm_i915_private *dev_priv, in i915gm_get_cdclk() argument
137 struct pci_dev *pdev = dev_priv->drm.pdev; in i915gm_get_cdclk()
[all …]
Dintel_fbc.c48 static inline bool fbc_supported(struct drm_i915_private *dev_priv) in fbc_supported() argument
50 return HAS_FBC(dev_priv); in fbc_supported()
53 static inline bool no_fbc_on_multiple_pipes(struct drm_i915_private *dev_priv) in no_fbc_on_multiple_pipes() argument
55 return INTEL_GEN(dev_priv) <= 3; in no_fbc_on_multiple_pipes()
85 static int intel_fbc_calculate_cfb_size(struct drm_i915_private *dev_priv, in intel_fbc_calculate_cfb_size() argument
91 if (IS_GEN(dev_priv, 7)) in intel_fbc_calculate_cfb_size()
93 else if (INTEL_GEN(dev_priv) >= 8) in intel_fbc_calculate_cfb_size()
100 static void i8xx_fbc_deactivate(struct drm_i915_private *dev_priv) in i8xx_fbc_deactivate() argument
113 if (intel_de_wait_for_clear(dev_priv, FBC_STATUS, in i8xx_fbc_deactivate()
120 static void i8xx_fbc_activate(struct drm_i915_private *dev_priv) in i8xx_fbc_activate() argument
[all …]
Dintel_psr.c75 static bool intel_psr2_enabled(struct drm_i915_private *dev_priv, in intel_psr2_enabled() argument
82 switch (dev_priv->psr.debug & I915_PSR_DEBUG_MODE_MASK) { in intel_psr2_enabled()
108 void intel_psr_irq_control(struct drm_i915_private *dev_priv, u32 debug) in intel_psr_irq_control() argument
114 if (INTEL_GEN(dev_priv) >= 8) in intel_psr_irq_control()
121 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { in intel_psr_irq_control()
172 void intel_psr_irq_handler(struct drm_i915_private *dev_priv, u32 psr_iir) in intel_psr_irq_handler() argument
179 if (INTEL_GEN(dev_priv) >= 8) in intel_psr_irq_handler()
184 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { in intel_psr_irq_handler()
191 dev_priv->psr.irq_aux_error = true; in intel_psr_irq_handler()
205 dev_priv->psr.last_entry_attempt = time_ns; in intel_psr_irq_handler()
[all …]
Dintel_fifo_underrun.c56 struct drm_i915_private *dev_priv = to_i915(dev); in ivb_can_enable_err_int() local
60 lockdep_assert_held(&dev_priv->irq_lock); in ivb_can_enable_err_int()
62 for_each_pipe(dev_priv, pipe) { in ivb_can_enable_err_int()
63 crtc = intel_get_crtc_for_pipe(dev_priv, pipe); in ivb_can_enable_err_int()
74 struct drm_i915_private *dev_priv = to_i915(dev); in cpt_can_enable_serr_int() local
78 lockdep_assert_held(&dev_priv->irq_lock); in cpt_can_enable_serr_int()
80 for_each_pipe(dev_priv, pipe) { in cpt_can_enable_serr_int()
81 crtc = intel_get_crtc_for_pipe(dev_priv, pipe); in cpt_can_enable_serr_int()
92 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); in i9xx_check_fifo_underruns() local
96 lockdep_assert_held(&dev_priv->irq_lock); in i9xx_check_fifo_underruns()
[all …]
Dintel_hotplug.c89 enum hpd_pin intel_hpd_pin_default(struct drm_i915_private *dev_priv, in intel_hpd_pin_default() argument
104 if (IS_CNL_WITH_PORT_F(dev_priv)) in intel_hpd_pin_default()
152 static bool intel_hpd_irq_storm_detect(struct drm_i915_private *dev_priv, in intel_hpd_irq_storm_detect() argument
155 struct i915_hotplug *hpd = &dev_priv->hotplug; in intel_hpd_irq_storm_detect()
163 (!long_hpd && !dev_priv->hotplug.hpd_short_storm_enabled)) in intel_hpd_irq_storm_detect()
185 intel_hpd_irq_storm_switch_to_polling(struct drm_i915_private *dev_priv) in intel_hpd_irq_storm_switch_to_polling() argument
187 struct drm_device *dev = &dev_priv->drm; in intel_hpd_irq_storm_switch_to_polling()
195 lockdep_assert_held(&dev_priv->irq_lock); in intel_hpd_irq_storm_switch_to_polling()
209 dev_priv->hotplug.stats[pin].state != HPD_MARK_DISABLED) in intel_hpd_irq_storm_switch_to_polling()
216 dev_priv->hotplug.stats[pin].state = HPD_DISABLED; in intel_hpd_irq_storm_switch_to_polling()
[all …]
Dintel_bios.c207 parse_lfp_panel_data(struct drm_i915_private *dev_priv, in parse_lfp_panel_data() argument
224 dev_priv->vbt.lvds_dither = lvds_options->pixel_dither; in parse_lfp_panel_data()
226 ret = intel_opregion_get_panel_type(dev_priv); in parse_lfp_panel_data()
241 dev_priv->vbt.panel_type = panel_type; in parse_lfp_panel_data()
252 dev_priv->vbt.drrs_type = STATIC_DRRS_SUPPORT; in parse_lfp_panel_data()
256 dev_priv->vbt.drrs_type = SEAMLESS_DRRS_SUPPORT; in parse_lfp_panel_data()
260 dev_priv->vbt.drrs_type = DRRS_NOT_SUPPORTED; in parse_lfp_panel_data()
283 dev_priv->vbt.lfp_lvds_vbt_mode = panel_fixed_mode; in parse_lfp_panel_data()
295 dev_priv->vbt.bios_lvds_val = fp_timing->lvds_reg_val; in parse_lfp_panel_data()
297 dev_priv->vbt.bios_lvds_val); in parse_lfp_panel_data()
[all …]
Dintel_gmbus.c95 static const struct gmbus_pin *get_gmbus_pin(struct drm_i915_private *dev_priv, in get_gmbus_pin() argument
98 if (INTEL_PCH_TYPE(dev_priv) >= PCH_ICP) in get_gmbus_pin()
100 else if (HAS_PCH_CNP(dev_priv)) in get_gmbus_pin()
102 else if (IS_GEN9_LP(dev_priv)) in get_gmbus_pin()
104 else if (IS_GEN9_BC(dev_priv)) in get_gmbus_pin()
106 else if (IS_BROADWELL(dev_priv)) in get_gmbus_pin()
112 bool intel_gmbus_is_valid_pin(struct drm_i915_private *dev_priv, in intel_gmbus_is_valid_pin() argument
117 if (INTEL_PCH_TYPE(dev_priv) >= PCH_ICP) in intel_gmbus_is_valid_pin()
119 else if (HAS_PCH_CNP(dev_priv)) in intel_gmbus_is_valid_pin()
121 else if (IS_GEN9_LP(dev_priv)) in intel_gmbus_is_valid_pin()
[all …]
/drivers/gpu/drm/r128/
Dr128_cce.c57 drm_r128_private_t *dev_priv = dev->dev_private; in R128_READ_PLL() local
64 static void r128_status(drm_r128_private_t *dev_priv) in r128_status() argument
85 static int r128_do_pixcache_flush(drm_r128_private_t *dev_priv) in r128_do_pixcache_flush() argument
93 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_pixcache_flush()
105 static int r128_do_wait_for_fifo(drm_r128_private_t *dev_priv, int entries) in r128_do_wait_for_fifo() argument
109 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_wait_for_fifo()
122 static int r128_do_wait_for_idle(drm_r128_private_t *dev_priv) in r128_do_wait_for_idle() argument
126 ret = r128_do_wait_for_fifo(dev_priv, 64); in r128_do_wait_for_idle()
130 for (i = 0; i < dev_priv->usec_timeout; i++) { in r128_do_wait_for_idle()
132 r128_do_pixcache_flush(dev_priv); in r128_do_wait_for_idle()
[all …]
/drivers/gpu/drm/mga/
Dmga_dma.c53 int mga_do_wait_for_idle(drm_mga_private_t *dev_priv) in mga_do_wait_for_idle() argument
59 for (i = 0; i < dev_priv->usec_timeout; i++) { in mga_do_wait_for_idle()
75 static int mga_do_dma_reset(drm_mga_private_t *dev_priv) in mga_do_dma_reset() argument
77 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv; in mga_do_dma_reset()
78 drm_mga_primary_buffer_t *primary = &dev_priv->prim; in mga_do_dma_reset()
103 void mga_do_dma_flush(drm_mga_private_t *dev_priv) in mga_do_dma_flush() argument
105 drm_mga_primary_buffer_t *primary = &dev_priv->prim; in mga_do_dma_flush()
113 for (i = 0; i < dev_priv->usec_timeout; i++) { in mga_do_dma_flush()
125 tail = primary->tail + dev_priv->primary->offset; in mga_do_dma_flush()
148 DRM_DEBUG(" head = 0x%06lx\n", (unsigned long)(head - dev_priv->primary->offset)); in mga_do_dma_flush()
[all …]
/drivers/gpu/drm/gma500/
Dpsb_drv.c109 struct drm_psb_private *dev_priv = dev->dev_private; in psb_do_init() local
110 struct psb_gtt *pg = &dev_priv->gtt; in psb_do_init()
123 dev_priv->gatt_free_offset = pg->mmu_gatt_start + in psb_do_init()
126 spin_lock_init(&dev_priv->irqmask_lock); in psb_do_init()
127 spin_lock_init(&dev_priv->lock_2d); in psb_do_init()
138 psb_spank(dev_priv); in psb_do_init()
149 struct drm_psb_private *dev_priv = dev->dev_private; in psb_driver_unload() local
153 if (dev_priv) { in psb_driver_unload()
154 if (dev_priv->backlight_device) in psb_driver_unload()
158 if (dev_priv->ops->chip_teardown) in psb_driver_unload()
[all …]
Dpsb_irq.c73 psb_enable_pipestat(struct drm_psb_private *dev_priv, int pipe, u32 mask) in psb_enable_pipestat() argument
75 if ((dev_priv->pipestat[pipe] & mask) != mask) { in psb_enable_pipestat()
77 dev_priv->pipestat[pipe] |= mask; in psb_enable_pipestat()
79 if (gma_power_begin(dev_priv->dev, false)) { in psb_enable_pipestat()
84 gma_power_end(dev_priv->dev); in psb_enable_pipestat()
90 psb_disable_pipestat(struct drm_psb_private *dev_priv, int pipe, u32 mask) in psb_disable_pipestat() argument
92 if ((dev_priv->pipestat[pipe] & mask) != 0) { in psb_disable_pipestat()
94 dev_priv->pipestat[pipe] &= ~mask; in psb_disable_pipestat()
95 if (gma_power_begin(dev_priv->dev, false)) { in psb_disable_pipestat()
100 gma_power_end(dev_priv->dev); in psb_disable_pipestat()
[all …]
Dintel_bios.c45 parse_edp(struct drm_psb_private *dev_priv, struct bdb_header *bdb) in parse_edp() argument
54 dev_priv->edp.bpp = 18; in parse_edp()
56 if (dev_priv->edp.support) { in parse_edp()
58 dev_priv->edp.bpp); in parse_edp()
63 panel_type = dev_priv->panel_type; in parse_edp()
66 dev_priv->edp.bpp = 18; in parse_edp()
69 dev_priv->edp.bpp = 24; in parse_edp()
72 dev_priv->edp.bpp = 30; in parse_edp()
80 dev_priv->edp.pps = *edp_pps; in parse_edp()
83 dev_priv->edp.pps.t1_t3, dev_priv->edp.pps.t8, in parse_edp()
[all …]
/drivers/gpu/drm/i915/gem/
Di915_gem_stolen.c28 int i915_gem_stolen_insert_node_in_range(struct drm_i915_private *dev_priv, in i915_gem_stolen_insert_node_in_range() argument
34 if (!drm_mm_initialized(&dev_priv->mm.stolen)) in i915_gem_stolen_insert_node_in_range()
38 if (INTEL_GEN(dev_priv) >= 8 && start < 4096) in i915_gem_stolen_insert_node_in_range()
41 mutex_lock(&dev_priv->mm.stolen_lock); in i915_gem_stolen_insert_node_in_range()
42 ret = drm_mm_insert_node_in_range(&dev_priv->mm.stolen, node, in i915_gem_stolen_insert_node_in_range()
45 mutex_unlock(&dev_priv->mm.stolen_lock); in i915_gem_stolen_insert_node_in_range()
50 int i915_gem_stolen_insert_node(struct drm_i915_private *dev_priv, in i915_gem_stolen_insert_node() argument
54 return i915_gem_stolen_insert_node_in_range(dev_priv, node, size, in i915_gem_stolen_insert_node()
58 void i915_gem_stolen_remove_node(struct drm_i915_private *dev_priv, in i915_gem_stolen_remove_node() argument
61 mutex_lock(&dev_priv->mm.stolen_lock); in i915_gem_stolen_remove_node()
[all …]

12345678910>>...13