• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * This header was generated from the Linux kernel headers by update_headers.py,
3  * to provide necessary information from kernel to userspace, such as constants,
4  * structures, and macros, and thus, contains no copyrightable information.
5  */
6 #ifndef KFD_IOCTL_H_INCLUDED
7 #define KFD_IOCTL_H_INCLUDED
8 #include <drm/drm.h>
9 #include <linux/ioctl.h>
10 #define KFD_IOCTL_MAJOR_VERSION 1
11 #define KFD_IOCTL_MINOR_VERSION 1
12 struct kfd_ioctl_get_version_args {
13 	__u32 major_version;
14 	__u32 minor_version;
15 };
16 #define KFD_IOC_QUEUE_TYPE_COMPUTE	0
17 #define KFD_IOC_QUEUE_TYPE_SDMA		1
18 #define KFD_IOC_QUEUE_TYPE_COMPUTE_AQL	2
19 #define KFD_MAX_QUEUE_PERCENTAGE	100
20 #define KFD_MAX_QUEUE_PRIORITY		15
21 struct kfd_ioctl_create_queue_args {
22 	__u64 ring_base_address;
23 	__u64 write_pointer_address;
24 	__u64 read_pointer_address;
25 	__u64 doorbell_offset;
26 	__u32 ring_size;
27 	__u32 gpu_id;
28 	__u32 queue_type;
29 	__u32 queue_percentage;
30 	__u32 queue_priority;
31 	__u32 queue_id;
32 	__u64 eop_buffer_address;
33 	__u64 eop_buffer_size;
34 	__u64 ctx_save_restore_address;
35 	__u32 ctx_save_restore_size;
36 	__u32 ctl_stack_size;
37 };
38 struct kfd_ioctl_destroy_queue_args {
39 	__u32 queue_id;
40 	__u32 pad;
41 };
42 struct kfd_ioctl_update_queue_args {
43 	__u64 ring_base_address;
44 	__u32 queue_id;
45 	__u32 ring_size;
46 	__u32 queue_percentage;
47 	__u32 queue_priority;
48 };
49 struct kfd_ioctl_set_cu_mask_args {
50 	__u32 queue_id;
51 	__u32 num_cu_mask;
52 	__u64 cu_mask_ptr;
53 };
54 #define KFD_IOC_CACHE_POLICY_COHERENT 0
55 #define KFD_IOC_CACHE_POLICY_NONCOHERENT 1
56 struct kfd_ioctl_set_memory_policy_args {
57 	__u64 alternate_aperture_base;
58 	__u64 alternate_aperture_size;
59 	__u32 gpu_id;
60 	__u32 default_policy;
61 	__u32 alternate_policy;
62 	__u32 pad;
63 };
64 struct kfd_ioctl_get_clock_counters_args {
65 	__u64 gpu_clock_counter;
66 	__u64 cpu_clock_counter;
67 	__u64 system_clock_counter;
68 	__u64 system_clock_freq;
69 	__u32 gpu_id;
70 	__u32 pad;
71 };
72 struct kfd_process_device_apertures {
73 	__u64 lds_base;
74 	__u64 lds_limit;
75 	__u64 scratch_base;
76 	__u64 scratch_limit;
77 	__u64 gpuvm_base;
78 	__u64 gpuvm_limit;
79 	__u32 gpu_id;
80 	__u32 pad;
81 };
82 #define NUM_OF_SUPPORTED_GPUS 7
83 struct kfd_ioctl_get_process_apertures_args {
84 	struct kfd_process_device_apertures
85 			process_apertures[NUM_OF_SUPPORTED_GPUS];
86 
87 	__u32 num_of_nodes;
88 	__u32 pad;
89 };
90 struct kfd_ioctl_get_process_apertures_new_args {
91 
92 	__u64 kfd_process_device_apertures_ptr;
93 
94 	__u32 num_of_nodes;
95 	__u32 pad;
96 };
97 #define MAX_ALLOWED_NUM_POINTS    100
98 #define MAX_ALLOWED_AW_BUFF_SIZE 4096
99 #define MAX_ALLOWED_WAC_BUFF_SIZE  128
100 struct kfd_ioctl_dbg_register_args {
101 	__u32 gpu_id;
102 	__u32 pad;
103 };
104 struct kfd_ioctl_dbg_unregister_args {
105 	__u32 gpu_id;
106 	__u32 pad;
107 };
108 struct kfd_ioctl_dbg_address_watch_args {
109 	__u64 content_ptr;
110 	__u32 gpu_id;
111 	__u32 buf_size_in_bytes;
112 };
113 struct kfd_ioctl_dbg_wave_control_args {
114 	__u64 content_ptr;
115 	__u32 gpu_id;
116 	__u32 buf_size_in_bytes;
117 };
118 #define KFD_IOC_EVENT_SIGNAL			0
119 #define KFD_IOC_EVENT_NODECHANGE		1
120 #define KFD_IOC_EVENT_DEVICESTATECHANGE		2
121 #define KFD_IOC_EVENT_HW_EXCEPTION		3
122 #define KFD_IOC_EVENT_SYSTEM_EVENT		4
123 #define KFD_IOC_EVENT_DEBUG_EVENT		5
124 #define KFD_IOC_EVENT_PROFILE_EVENT		6
125 #define KFD_IOC_EVENT_QUEUE_EVENT		7
126 #define KFD_IOC_EVENT_MEMORY			8
127 #define KFD_IOC_WAIT_RESULT_COMPLETE		0
128 #define KFD_IOC_WAIT_RESULT_TIMEOUT		1
129 #define KFD_IOC_WAIT_RESULT_FAIL		2
130 #define KFD_SIGNAL_EVENT_LIMIT			4096
131 #define KFD_HW_EXCEPTION_WHOLE_GPU_RESET	0
132 #define KFD_HW_EXCEPTION_PER_ENGINE_RESET	1
133 #define KFD_HW_EXCEPTION_GPU_HANG	0
134 #define KFD_HW_EXCEPTION_ECC		1
135 struct kfd_ioctl_create_event_args {
136 	__u64 event_page_offset;
137 	__u32 event_trigger_data;
138 	__u32 event_type;
139 	__u32 auto_reset;
140 	__u32 node_id;
141 	__u32 event_id;
142 	__u32 event_slot_index;
143 };
144 struct kfd_ioctl_destroy_event_args {
145 	__u32 event_id;
146 	__u32 pad;
147 };
148 struct kfd_ioctl_set_event_args {
149 	__u32 event_id;
150 	__u32 pad;
151 };
152 struct kfd_ioctl_reset_event_args {
153 	__u32 event_id;
154 	__u32 pad;
155 };
156 struct kfd_memory_exception_failure {
157 	__u32 NotPresent;
158 	__u32 ReadOnly;
159 	__u32 NoExecute;
160 	__u32 imprecise;
161 };
162 struct kfd_hsa_memory_exception_data {
163 	struct kfd_memory_exception_failure failure;
164 	__u64 va;
165 	__u32 gpu_id;
166 	__u32 pad;
167 };
168 struct kfd_hsa_hw_exception_data {
169 	__u32 reset_type;
170 	__u32 reset_cause;
171 	__u32 memory_lost;
172 	__u32 gpu_id;
173 };
174 struct kfd_event_data {
175 	union {
176 		struct kfd_hsa_memory_exception_data memory_exception_data;
177 		struct kfd_hsa_hw_exception_data hw_exception_data;
178 	};
179 	__u64 kfd_event_data_ext;
180 	__u32 event_id;
181 	__u32 pad;
182 };
183 struct kfd_ioctl_wait_events_args {
184 	__u64 events_ptr;
185 	__u32 num_events;
186 	__u32 wait_for_all;
187 	__u32 timeout;
188 	__u32 wait_result;
189 };
190 struct kfd_ioctl_set_scratch_backing_va_args {
191 	__u64 va_addr;
192 	__u32 gpu_id;
193 	__u32 pad;
194 };
195 struct kfd_ioctl_get_tile_config_args {
196 
197 	__u64 tile_config_ptr;
198 
199 	__u64 macro_tile_config_ptr;
200 
201 	__u32 num_tile_configs;
202 
203 	__u32 num_macro_tile_configs;
204 	__u32 gpu_id;
205 	__u32 gb_addr_config;
206 	__u32 num_banks;
207 	__u32 num_ranks;
208 
209 };
210 struct kfd_ioctl_set_trap_handler_args {
211 	__u64 tba_addr;
212 	__u64 tma_addr;
213 	__u32 gpu_id;
214 	__u32 pad;
215 };
216 struct kfd_ioctl_acquire_vm_args {
217 	__u32 drm_fd;
218 	__u32 gpu_id;
219 };
220 #define KFD_IOC_ALLOC_MEM_FLAGS_VRAM		(1 << 0)
221 #define KFD_IOC_ALLOC_MEM_FLAGS_GTT		(1 << 1)
222 #define KFD_IOC_ALLOC_MEM_FLAGS_USERPTR		(1 << 2)
223 #define KFD_IOC_ALLOC_MEM_FLAGS_DOORBELL	(1 << 3)
224 #define KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE	(1 << 31)
225 #define KFD_IOC_ALLOC_MEM_FLAGS_EXECUTABLE	(1 << 30)
226 #define KFD_IOC_ALLOC_MEM_FLAGS_PUBLIC		(1 << 29)
227 #define KFD_IOC_ALLOC_MEM_FLAGS_NO_SUBSTITUTE	(1 << 28)
228 #define KFD_IOC_ALLOC_MEM_FLAGS_AQL_QUEUE_MEM	(1 << 27)
229 #define KFD_IOC_ALLOC_MEM_FLAGS_COHERENT	(1 << 26)
230 struct kfd_ioctl_alloc_memory_of_gpu_args {
231 	__u64 va_addr;
232 	__u64 size;
233 	__u64 handle;
234 	__u64 mmap_offset;
235 	__u32 gpu_id;
236 	__u32 flags;
237 };
238 struct kfd_ioctl_free_memory_of_gpu_args {
239 	__u64 handle;
240 };
241 struct kfd_ioctl_map_memory_to_gpu_args {
242 	__u64 handle;
243 	__u64 device_ids_array_ptr;
244 	__u32 n_devices;
245 	__u32 n_success;
246 };
247 struct kfd_ioctl_unmap_memory_from_gpu_args {
248 	__u64 handle;
249 	__u64 device_ids_array_ptr;
250 	__u32 n_devices;
251 	__u32 n_success;
252 };
253 #define AMDKFD_IOCTL_BASE 'K'
254 #define AMDKFD_IO(nr)			_IO(AMDKFD_IOCTL_BASE, nr)
255 #define AMDKFD_IOR(nr, type)		_IOR(AMDKFD_IOCTL_BASE, nr, type)
256 #define AMDKFD_IOW(nr, type)		_IOW(AMDKFD_IOCTL_BASE, nr, type)
257 #define AMDKFD_IOWR(nr, type)		_IOWR(AMDKFD_IOCTL_BASE, nr, type)
258 #define AMDKFD_IOC_GET_VERSION			\
259 		AMDKFD_IOR(0x01, struct kfd_ioctl_get_version_args)
260 #define AMDKFD_IOC_CREATE_QUEUE			\
261 		AMDKFD_IOWR(0x02, struct kfd_ioctl_create_queue_args)
262 #define AMDKFD_IOC_DESTROY_QUEUE		\
263 		AMDKFD_IOWR(0x03, struct kfd_ioctl_destroy_queue_args)
264 #define AMDKFD_IOC_SET_MEMORY_POLICY		\
265 		AMDKFD_IOW(0x04, struct kfd_ioctl_set_memory_policy_args)
266 #define AMDKFD_IOC_GET_CLOCK_COUNTERS		\
267 		AMDKFD_IOWR(0x05, struct kfd_ioctl_get_clock_counters_args)
268 #define AMDKFD_IOC_GET_PROCESS_APERTURES	\
269 		AMDKFD_IOR(0x06, struct kfd_ioctl_get_process_apertures_args)
270 #define AMDKFD_IOC_UPDATE_QUEUE			\
271 		AMDKFD_IOW(0x07, struct kfd_ioctl_update_queue_args)
272 #define AMDKFD_IOC_CREATE_EVENT			\
273 		AMDKFD_IOWR(0x08, struct kfd_ioctl_create_event_args)
274 #define AMDKFD_IOC_DESTROY_EVENT		\
275 		AMDKFD_IOW(0x09, struct kfd_ioctl_destroy_event_args)
276 #define AMDKFD_IOC_SET_EVENT			\
277 		AMDKFD_IOW(0x0A, struct kfd_ioctl_set_event_args)
278 #define AMDKFD_IOC_RESET_EVENT			\
279 		AMDKFD_IOW(0x0B, struct kfd_ioctl_reset_event_args)
280 #define AMDKFD_IOC_WAIT_EVENTS			\
281 		AMDKFD_IOWR(0x0C, struct kfd_ioctl_wait_events_args)
282 #define AMDKFD_IOC_DBG_REGISTER			\
283 		AMDKFD_IOW(0x0D, struct kfd_ioctl_dbg_register_args)
284 #define AMDKFD_IOC_DBG_UNREGISTER		\
285 		AMDKFD_IOW(0x0E, struct kfd_ioctl_dbg_unregister_args)
286 #define AMDKFD_IOC_DBG_ADDRESS_WATCH		\
287 		AMDKFD_IOW(0x0F, struct kfd_ioctl_dbg_address_watch_args)
288 #define AMDKFD_IOC_DBG_WAVE_CONTROL		\
289 		AMDKFD_IOW(0x10, struct kfd_ioctl_dbg_wave_control_args)
290 #define AMDKFD_IOC_SET_SCRATCH_BACKING_VA	\
291 		AMDKFD_IOWR(0x11, struct kfd_ioctl_set_scratch_backing_va_args)
292 #define AMDKFD_IOC_GET_TILE_CONFIG                                      \
293 		AMDKFD_IOWR(0x12, struct kfd_ioctl_get_tile_config_args)
294 #define AMDKFD_IOC_SET_TRAP_HANDLER		\
295 		AMDKFD_IOW(0x13, struct kfd_ioctl_set_trap_handler_args)
296 #define AMDKFD_IOC_GET_PROCESS_APERTURES_NEW	\
297 		AMDKFD_IOWR(0x14,		\
298 			struct kfd_ioctl_get_process_apertures_new_args)
299 #define AMDKFD_IOC_ACQUIRE_VM			\
300 		AMDKFD_IOW(0x15, struct kfd_ioctl_acquire_vm_args)
301 #define AMDKFD_IOC_ALLOC_MEMORY_OF_GPU		\
302 		AMDKFD_IOWR(0x16, struct kfd_ioctl_alloc_memory_of_gpu_args)
303 #define AMDKFD_IOC_FREE_MEMORY_OF_GPU		\
304 		AMDKFD_IOW(0x17, struct kfd_ioctl_free_memory_of_gpu_args)
305 #define AMDKFD_IOC_MAP_MEMORY_TO_GPU		\
306 		AMDKFD_IOWR(0x18, struct kfd_ioctl_map_memory_to_gpu_args)
307 #define AMDKFD_IOC_UNMAP_MEMORY_FROM_GPU	\
308 		AMDKFD_IOWR(0x19, struct kfd_ioctl_unmap_memory_from_gpu_args)
309 #define AMDKFD_IOC_SET_CU_MASK		\
310 		AMDKFD_IOW(0x1A, struct kfd_ioctl_set_cu_mask_args)
311 #define AMDKFD_COMMAND_START		0x01
312 #define AMDKFD_COMMAND_END		0x1B
313 #endif
314