Home
last modified time | relevance | path

Searched refs:intel_get_drm_devid (Results 1 – 25 of 152) sorted by relevance

1234567

/external/igt-gpu-tools/tests/i915/
Dgem_exec_blt.c69 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
82 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
89 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
97 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
107 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
120 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
127 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
135 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in gem_linear_blt()
211 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in run()
218 if (HAS_BLT_RING(intel_get_drm_devid(fd))) in run()
Dgem_request_retire.c83 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in blit()
94 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in blit()
105 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in blit()
110 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in blit()
192 igt_require(HAS_BLT_RING(intel_get_drm_devid(fd))); in test_retire_vma_not_inactive()
Dgem_evict_everything.c68 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
76 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
81 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
99 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
115 if (HAS_BLT_RING(intel_get_drm_devid(fd))) in copy()
Dgem_linear_blits.c69 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
80 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
85 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
101 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
Dgem_evict_alignment.c69 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
77 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
82 if (intel_gen(intel_get_drm_devid(fd)) >= 8) in copy()
120 exec.flags = HAS_BLT_RING(intel_get_drm_devid(fd)) ? I915_EXEC_BLT : 0; in copy()
Dgem_write_read_ring_switch.c159 igt_require(intel_gen(intel_get_drm_devid(fd)) >= 6);
166 batch = intel_batchbuffer_alloc(bufmgr, intel_get_drm_devid(fd));
Dgem_exec_store.c42 const int gen = intel_gen(intel_get_drm_devid(fd)); in store_dword()
102 const int gen = intel_gen(intel_get_drm_devid(fd)); in store_cachelines()
180 const int gen = intel_gen(intel_get_drm_devid(fd)); in store_all()
294 uint16_t devid = intel_get_drm_devid(fd); in print_welcome()
Dgem_ctx_isolation.c229 const unsigned int gen_bit = 1 << intel_gen(intel_get_drm_devid(fd)); in tmpl_regs()
264 const unsigned int gen = intel_gen(intel_get_drm_devid(fd)); in read_regs()
338 const unsigned int gen_bit = 1 << intel_gen(intel_get_drm_devid(fd)); in write_regs()
390 const unsigned int gen = intel_gen(intel_get_drm_devid(fd)); in restore_regs()
459 const int gen = intel_gen(intel_get_drm_devid(fd)); in dump_regs()
562 igt_skip_on(intel_gen(intel_get_drm_devid(fd)) < 8); in nonpriv()
823 gen = intel_gen(intel_get_drm_devid(fd));
Dgem_bad_batch.c63 batch = intel_batchbuffer_alloc(bufmgr, intel_get_drm_devid(fd));
Dgem_bad_address.c67 batch = intel_batchbuffer_alloc(bufmgr, intel_get_drm_devid(fd));
Dgem_mocs_settings.c171 return intel_gen(intel_get_drm_devid(fd)) >= 12; in has_global_mocs()
176 uint32_t devid = intel_get_drm_devid(fd); in get_mocs_settings()
478 if (intel_gen(intel_get_drm_devid(fd)) >= 11) in write_dirty_mocs()
504 igt_skip_on(intel_gen(intel_get_drm_devid(fd)) >= 11); in run_test()
Dgem_exec_async.c34 const int gen = intel_gen(intel_get_drm_devid(fd)); in store_dword()
85 const int gen = intel_gen(intel_get_drm_devid(fd)); in one()
Dgem_bad_reloc.c64 igt_require(intel_gen(intel_get_drm_devid(fd)) >= 7); in negative_reloc()
115 const int gen = intel_gen(intel_get_drm_devid(fd)); in negative_reloc_blt()
Dgem_unref_active_buffers.c67 batch = intel_batchbuffer_alloc(bufmgr, intel_get_drm_devid(fd));
Dgem_non_secure_batch.c82 devid = intel_get_drm_devid(fd);
Dgem_hang.c90 batch = intel_batchbuffer_alloc(bufmgr, intel_get_drm_devid(fd));
Dgem_tiled_fence_blits.c90 const int gen = intel_gen(intel_get_drm_devid(fd)); in create_batch()
155 if (intel_gen(intel_get_drm_devid(fd)) >= 6) in run_test()
Dgem_render_linear_blits.c85 render_copy = igt_get_render_copyfunc(intel_get_drm_devid(fd)); in run_test()
89 batch = intel_batchbuffer_alloc(bufmgr, intel_get_drm_devid(fd)); in run_test()
Dgem_bad_blit.c104 batch = intel_batchbuffer_alloc(bufmgr, intel_get_drm_devid(fd));
Dgem_double_irq_loop.c100 devid = intel_get_drm_devid(fd);
Dgem_ringfill.c101 const int gen = intel_gen(intel_get_drm_devid(fd)); in setup_execbuf()
269 gen = intel_gen(intel_get_drm_devid(fd));
/external/igt-gpu-tools/lib/
Digt_gt.c69 once = intel_gen(intel_get_drm_devid(fd)) >= 5; in has_gpu_reset()
412 gen = intel_gen(intel_get_drm_devid(fd)); in igt_fork_hang_helper()
562 uint16_t devid = intel_get_drm_devid(fd); in gem_class_can_store_dword()
Digt_draw.c345 igt_require(intel_gen(intel_get_drm_devid(fd)) >= 5); in draw_rect_mmap_cpu()
396 igt_require(intel_gen(intel_get_drm_devid(fd)) >= 5); in draw_rect_mmap_wc()
447 igt_require(intel_gen(intel_get_drm_devid(fd)) >= 5); in draw_rect_pwrite_tiled()
523 uint32_t devid = intel_get_drm_devid(fd); in draw_rect_blt()
578 uint32_t devid = intel_get_drm_devid(fd); in draw_rect_render()
Dintel_chipset.h80 static inline uint32_t intel_get_drm_devid(int __attribute__((unused)) fd) { return 0U; } in intel_get_drm_devid() function
83 uint32_t intel_get_drm_devid(int fd);
/external/igt-gpu-tools/tests/
Dkms_panel_fitting.c92 uint32_t devid = intel_get_drm_devid(display->drm_fd); in test_panel_fitting()
223 igt_require(intel_gen(intel_get_drm_devid(display->drm_fd)) >= 5); in test_atomic_fastset()

1234567