1 //
2 // Copyright 2021 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // validationCL.cpp: Validation functions for generic CL entry point parameters
7 // based on the OpenCL Specification V3.0.7, see https://www.khronos.org/registry/OpenCL/
8 // Each used CL error code is preceeded by a citation of the relevant rule in the spec.
9
10 #include "libANGLE/validationCL_autogen.h"
11
12 #include "libANGLE/cl_utils.h"
13
14 #define ANGLE_VALIDATE_VERSION(version, major, minor) \
15 do \
16 { \
17 if (version < CL_MAKE_VERSION(major##u, minor##u, 0u)) \
18 { \
19 return CL_INVALID_VALUE; \
20 } \
21 } while (0)
22
23 #define ANGLE_VALIDATE_EXTENSION(extension) \
24 do \
25 { \
26 if (!extension) \
27 { \
28 return CL_INVALID_VALUE; \
29 } \
30 } while (0)
31
32 namespace cl
33 {
34
35 namespace
36 {
37
ValidateContextProperties(const cl_context_properties * properties,const Platform * & platform)38 cl_int ValidateContextProperties(const cl_context_properties *properties, const Platform *&platform)
39 {
40 platform = nullptr;
41 bool hasUserSync = false;
42 if (properties != nullptr)
43 {
44 while (*properties != 0)
45 {
46 switch (*properties++)
47 {
48 case CL_CONTEXT_PLATFORM:
49 {
50 // CL_INVALID_PROPERTY if the same property name is specified more than once.
51 if (platform != nullptr)
52 {
53 return CL_INVALID_PROPERTY;
54 }
55 cl_platform_id nativePlatform = reinterpret_cast<cl_platform_id>(*properties++);
56 // CL_INVALID_PLATFORM if platform value specified in properties
57 // is not a valid platform.
58 if (!Platform::IsValid(nativePlatform))
59 {
60 return CL_INVALID_PLATFORM;
61 }
62 platform = &nativePlatform->cast<Platform>();
63 break;
64 }
65 case CL_CONTEXT_INTEROP_USER_SYNC:
66 {
67 // CL_INVALID_PROPERTY if the value specified for a supported property name
68 // is not valid, or if the same property name is specified more than once.
69 if ((*properties != CL_FALSE && *properties != CL_TRUE) || hasUserSync)
70 {
71 return CL_INVALID_PROPERTY;
72 }
73 ++properties;
74 hasUserSync = true;
75 break;
76 }
77 default:
78 {
79 // CL_INVALID_PROPERTY if context property name in properties
80 // is not a supported property name.
81 return CL_INVALID_PROPERTY;
82 }
83 }
84 }
85 }
86 if (platform == nullptr)
87 {
88 platform = Platform::GetDefault();
89 // CL_INVALID_PLATFORM if properties is NULL and no platform could be selected.
90 if (platform == nullptr)
91 {
92 return CL_INVALID_PLATFORM;
93 }
94 }
95 return CL_SUCCESS;
96 }
97
ValidateMemoryFlags(MemFlags flags,const Platform & platform)98 bool ValidateMemoryFlags(MemFlags flags, const Platform &platform)
99 {
100 // CL_MEM_READ_WRITE, CL_MEM_WRITE_ONLY, and CL_MEM_READ_ONLY are mutually exclusive.
101 MemFlags allowedFlags(CL_MEM_READ_WRITE | CL_MEM_WRITE_ONLY | CL_MEM_READ_ONLY);
102 if (!flags.areMutuallyExclusive(CL_MEM_READ_WRITE, CL_MEM_WRITE_ONLY, CL_MEM_READ_ONLY))
103 {
104 return false;
105 }
106 // CL_MEM_USE_HOST_PTR is mutually exclusive with either of the other two flags.
107 allowedFlags.set(CL_MEM_USE_HOST_PTR | CL_MEM_ALLOC_HOST_PTR | CL_MEM_COPY_HOST_PTR);
108 if (!flags.areMutuallyExclusive(CL_MEM_USE_HOST_PTR,
109 CL_MEM_ALLOC_HOST_PTR | CL_MEM_COPY_HOST_PTR))
110 {
111 return false;
112 }
113 if (platform.isVersionOrNewer(1u, 2u))
114 {
115 // CL_MEM_HOST_WRITE_ONLY, CL_MEM_HOST_READ_ONLY,
116 // and CL_MEM_HOST_NO_ACCESS are mutually exclusive.
117 allowedFlags.set(CL_MEM_HOST_WRITE_ONLY | CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_NO_ACCESS);
118 if (!flags.areMutuallyExclusive(CL_MEM_HOST_WRITE_ONLY, CL_MEM_HOST_READ_ONLY,
119 CL_MEM_HOST_NO_ACCESS))
120 {
121 return false;
122 }
123 }
124 if (platform.isVersionOrNewer(2u, 0u))
125 {
126 allowedFlags.set(CL_MEM_KERNEL_READ_AND_WRITE);
127 }
128 if (flags.hasOtherBitsThan(allowedFlags))
129 {
130 return false;
131 }
132 return true;
133 }
134
ValidateMapFlags(MapFlags flags,const Platform & platform)135 bool ValidateMapFlags(MapFlags flags, const Platform &platform)
136 {
137 MemFlags allowedFlags(CL_MAP_READ | CL_MAP_WRITE);
138 if (platform.isVersionOrNewer(1u, 2u))
139 {
140 // CL_MAP_READ or CL_MAP_WRITE and CL_MAP_WRITE_INVALIDATE_REGION are mutually exclusive.
141 allowedFlags.set(CL_MAP_WRITE_INVALIDATE_REGION);
142 if (!flags.areMutuallyExclusive(CL_MAP_WRITE_INVALIDATE_REGION, CL_MAP_READ | CL_MAP_WRITE))
143 {
144 return false;
145 }
146 }
147 if (flags.hasOtherBitsThan(allowedFlags))
148 {
149 return false;
150 }
151 return true;
152 }
153
ValidateMemoryProperties(const cl_mem_properties * properties)154 bool ValidateMemoryProperties(const cl_mem_properties *properties)
155 {
156 if (properties != nullptr)
157 {
158 // OpenCL 3.0 does not define any optional properties.
159 // This function is reserved for extensions and future use.
160 if (*properties != 0)
161 {
162 return false;
163 }
164 }
165 return true;
166 }
167
ValidateCommandQueueAndEventWaitList(cl_command_queue commandQueue,bool validateImageSupport,cl_uint numEvents,const cl_event * events)168 cl_int ValidateCommandQueueAndEventWaitList(cl_command_queue commandQueue,
169 bool validateImageSupport,
170 cl_uint numEvents,
171 const cl_event *events)
172 {
173 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
174 if (!CommandQueue::IsValid(commandQueue))
175 {
176 return CL_INVALID_COMMAND_QUEUE;
177 }
178 const CommandQueue &queue = commandQueue->cast<CommandQueue>();
179 if (!queue.isOnHost())
180 {
181 return CL_INVALID_COMMAND_QUEUE;
182 }
183
184 if (validateImageSupport)
185 {
186 // CL_INVALID_OPERATION if the device associated with command_queue does not support images.
187 if (queue.getDevice().getInfo().imageSupport == CL_FALSE)
188 {
189 return CL_INVALID_OPERATION;
190 }
191 }
192
193 // CL_INVALID_EVENT_WAIT_LIST if event_wait_list is NULL and num_events_in_wait_list > 0,
194 // or event_wait_list is not NULL and num_events_in_wait_list is 0, ...
195 if ((events == nullptr) != (numEvents == 0u))
196 {
197 return CL_INVALID_EVENT_WAIT_LIST;
198 }
199 while (numEvents-- != 0u)
200 {
201 // or if event objects in event_wait_list are not valid events.
202 if (!Event::IsValid(*events))
203 {
204 return CL_INVALID_EVENT_WAIT_LIST;
205 }
206
207 // CL_INVALID_CONTEXT if the context associated with command_queue
208 // and events in event_wait_list are not the same.
209 if (&queue.getContext() != &(*events++)->cast<Event>().getContext())
210 {
211 return CL_INVALID_CONTEXT;
212 }
213 }
214
215 return CL_SUCCESS;
216 }
217
ValidateEnqueueBuffer(const CommandQueue & queue,cl_mem buffer,bool hostRead,bool hostWrite)218 cl_int ValidateEnqueueBuffer(const CommandQueue &queue,
219 cl_mem buffer,
220 bool hostRead,
221 bool hostWrite)
222 {
223 // CL_INVALID_MEM_OBJECT if buffer is not a valid buffer object.
224 if (!Buffer::IsValid(buffer))
225 {
226 return CL_INVALID_MEM_OBJECT;
227 }
228 const Buffer &buf = buffer->cast<Buffer>();
229
230 // CL_INVALID_CONTEXT if the context associated with command_queue and buffer are not the same.
231 if (&queue.getContext() != &buf.getContext())
232 {
233 return CL_INVALID_CONTEXT;
234 }
235
236 // CL_MISALIGNED_SUB_BUFFER_OFFSET if buffer is a sub-buffer object and offset specified
237 // when the sub-buffer object is created is not aligned to CL_DEVICE_MEM_BASE_ADDR_ALIGN
238 // value (which is in bits!) for device associated with queue.
239 if (buf.isSubBuffer() &&
240 (buf.getOffset() % (queue.getDevice().getInfo().memBaseAddrAlign / 8u)) != 0u)
241 {
242 return CL_MISALIGNED_SUB_BUFFER_OFFSET;
243 }
244
245 // CL_INVALID_OPERATION if a read function is called on buffer which
246 // has been created with CL_MEM_HOST_WRITE_ONLY or CL_MEM_HOST_NO_ACCESS.
247 if (hostRead && buf.getFlags().isSet(CL_MEM_HOST_WRITE_ONLY | CL_MEM_HOST_NO_ACCESS))
248 {
249 return CL_INVALID_OPERATION;
250 }
251
252 // CL_INVALID_OPERATION if a write function is called on buffer which
253 // has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS.
254 if (hostWrite && buf.getFlags().isSet(CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_NO_ACCESS))
255 {
256 return CL_INVALID_OPERATION;
257 }
258
259 return CL_SUCCESS;
260 }
261
ValidateBufferRect(const Buffer & buffer,const size_t * origin,const size_t * region,size_t rowPitch,size_t slicePitch)262 cl_int ValidateBufferRect(const Buffer &buffer,
263 const size_t *origin,
264 const size_t *region,
265 size_t rowPitch,
266 size_t slicePitch)
267 {
268 // CL_INVALID_VALUE if origin or region is NULL.
269 if (origin == nullptr || region == nullptr)
270 {
271 return CL_INVALID_VALUE;
272 }
273
274 // CL_INVALID_VALUE if any region array element is 0.
275 if (region[0] == 0u || region[1] == 0u || region[2] == 0u)
276 {
277 return CL_INVALID_VALUE;
278 }
279
280 // CL_INVALID_VALUE if row_pitch is not 0 and is less than region[0].
281 if (rowPitch == 0u)
282 {
283 rowPitch = region[0];
284 }
285 else if (rowPitch < region[0])
286 {
287 return CL_INVALID_VALUE;
288 }
289
290 // CL_INVALID_VALUE if slice_pitch is not 0 and is less than
291 // region[1] x row_pitch and not a multiple of row_pitch.
292 if (slicePitch == 0u)
293 {
294 slicePitch = region[1] * rowPitch;
295 }
296 else if (slicePitch < region[1] * rowPitch || (slicePitch % rowPitch) != 0u)
297 {
298 return CL_INVALID_VALUE;
299 }
300
301 // CL_INVALID_VALUE if the region being read or written specified
302 // by (origin, region, row_pitch, slice_pitch) is out of bounds.
303 if (!buffer.isRegionValid(
304 origin[2] * slicePitch + origin[1] * rowPitch + origin[0],
305 (region[2] - 1u) * slicePitch + (region[1] - 1u) * rowPitch + region[0]))
306 {
307 return CL_INVALID_VALUE;
308 }
309
310 return CL_SUCCESS;
311 }
312
ValidateHostRect(const size_t * hostOrigin,const size_t * region,size_t hostRowPitch,size_t hostSlicePitch,const void * ptr)313 cl_int ValidateHostRect(const size_t *hostOrigin,
314 const size_t *region,
315 size_t hostRowPitch,
316 size_t hostSlicePitch,
317 const void *ptr)
318 {
319 // CL_INVALID_VALUE if host_origin or region is NULL.
320 if (hostOrigin == nullptr || region == nullptr)
321 {
322 return CL_INVALID_VALUE;
323 }
324
325 // CL_INVALID_VALUE if any region array element is 0.
326 if (region[0] == 0u || region[1] == 0u || region[2] == 0u)
327 {
328 return CL_INVALID_VALUE;
329 }
330
331 // CL_INVALID_VALUE if host_row_pitch is not 0 and is less than region[0].
332 if (hostRowPitch == 0u)
333 {
334 hostRowPitch = region[0];
335 }
336 else if (hostRowPitch < region[0])
337 {
338 return CL_INVALID_VALUE;
339 }
340
341 // CL_INVALID_VALUE if host_slice_pitch is not 0 and is less than
342 // region[1] x host_row_pitch and not a multiple of host_row_pitch.
343 if (hostSlicePitch != 0u &&
344 (hostSlicePitch < region[1] * hostRowPitch || (hostSlicePitch % hostRowPitch) != 0u))
345 {
346 return CL_INVALID_VALUE;
347 }
348
349 // CL_INVALID_VALUE if ptr is NULL.
350 if (ptr == nullptr)
351 {
352 return CL_INVALID_VALUE;
353 }
354
355 return CL_SUCCESS;
356 }
357
ValidateEnqueueImage(const CommandQueue & queue,cl_mem image,bool hostRead,bool hostWrite)358 cl_int ValidateEnqueueImage(const CommandQueue &queue, cl_mem image, bool hostRead, bool hostWrite)
359 {
360 // CL_INVALID_MEM_OBJECT if image is not a valid image object.
361 if (!Image::IsValid(image))
362 {
363 return CL_INVALID_MEM_OBJECT;
364 }
365 const Image &img = image->cast<Image>();
366
367 // CL_INVALID_CONTEXT if the context associated with command_queue and image are not the same.
368 if (&queue.getContext() != &img.getContext())
369 {
370 return CL_INVALID_CONTEXT;
371 }
372
373 // CL_INVALID_OPERATION if a read function is called on image which
374 // has been created with CL_MEM_HOST_WRITE_ONLY or CL_MEM_HOST_NO_ACCESS.
375 if (hostRead && img.getFlags().isSet(CL_MEM_HOST_WRITE_ONLY | CL_MEM_HOST_NO_ACCESS))
376 {
377 return CL_INVALID_OPERATION;
378 }
379
380 // CL_INVALID_OPERATION if a write function is called on image which
381 // has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS.
382 if (hostWrite && img.getFlags().isSet(CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_NO_ACCESS))
383 {
384 return CL_INVALID_OPERATION;
385 }
386
387 return CL_SUCCESS;
388 }
389
ValidateImageForDevice(const Image & image,const Device & device,const size_t * origin,const size_t * region)390 cl_int ValidateImageForDevice(const Image &image,
391 const Device &device,
392 const size_t *origin,
393 const size_t *region)
394 {
395 // CL_INVALID_VALUE if origin or region is NULL.
396 if (origin == nullptr || region == nullptr)
397 {
398 return CL_INVALID_VALUE;
399 }
400
401 // CL_INVALID_VALUE if values in origin and region do not follow rules
402 // described in the argument description for origin and region.
403 // The values in region cannot be 0.
404 if (region[0] == 0u || region[1] == 0u || region[2] == 0u)
405 {
406 return CL_INVALID_VALUE;
407 }
408 switch (image.getType())
409 {
410 // If image is a 1D image or 1D image buffer object,
411 // origin[1] and origin[2] must be 0 and region[1] and region[2] must be 1.
412 case MemObjectType::Image1D:
413 case MemObjectType::Image1D_Buffer:
414 if (origin[1] != 0u || origin[2] != 0u || region[1] != 1u || region[2] != 1u)
415 {
416 return CL_INVALID_VALUE;
417 }
418 break;
419 // If image is a 2D image object or a 1D image array object,
420 // origin[2] must be 0 and region[2] must be 1.
421 case MemObjectType::Image2D:
422 case MemObjectType::Image1D_Array:
423 if (origin[2] != 0u || region[2] != 1u)
424 {
425 return CL_INVALID_VALUE;
426 }
427 break;
428 case MemObjectType::Image3D:
429 case MemObjectType::Image2D_Array:
430 break;
431 default:
432 ASSERT(false);
433 return CL_INVALID_IMAGE_DESCRIPTOR;
434 }
435
436 // CL_INVALID_VALUE if the region being read or written
437 // specified by origin and region is out of bounds.
438 if (!image.isRegionValid(origin, region))
439 {
440 return CL_INVALID_VALUE;
441 }
442
443 // CL_INVALID_IMAGE_SIZE if image dimensions (image width, height, specified or compute
444 // row and/or slice pitch) for image are not supported by device associated with queue.
445 if (!device.supportsImageDimensions(image.getDescriptor()))
446 {
447 return CL_INVALID_IMAGE_SIZE;
448 }
449
450 return CL_SUCCESS;
451 }
452
ValidateHostRegionForImage(const Image & image,const size_t region[3],size_t rowPitch,size_t slicePitch,const void * ptr)453 cl_int ValidateHostRegionForImage(const Image &image,
454 const size_t region[3],
455 size_t rowPitch,
456 size_t slicePitch,
457 const void *ptr)
458 {
459 // CL_INVALID_VALUE if row_pitch is not 0 and is less than the element size in bytes x width.
460 if (rowPitch == 0u)
461 {
462 rowPitch = image.getElementSize() * region[0];
463 }
464 else if (rowPitch < image.getElementSize() * region[0])
465 {
466 return CL_INVALID_VALUE;
467 }
468 if (slicePitch != 0u)
469 {
470 // TODO(jplate) Follow up with https://github.com/KhronosGroup/OpenCL-Docs/issues/624
471 // This error is missing in the OpenCL spec.
472 // slice_pitch must be 0 if image is a 1D or 2D image.
473 if (image.getType() == MemObjectType::Image1D ||
474 image.getType() == MemObjectType::Image1D_Buffer ||
475 image.getType() == MemObjectType::Image2D)
476 {
477 return CL_INVALID_VALUE;
478 }
479 // CL_INVALID_VALUE if slice_pitch is not 0 and is less than row_pitch x height.
480 if (slicePitch < rowPitch * region[1])
481 {
482 return CL_INVALID_VALUE;
483 }
484 }
485
486 // CL_INVALID_VALUE if ptr is NULL.
487 if (ptr == nullptr)
488 {
489 return CL_INVALID_VALUE;
490 }
491
492 return CL_SUCCESS;
493 }
494
495 } // namespace
496
497 // CL 1.0
ValidateGetPlatformIDs(cl_uint num_entries,const cl_platform_id * platforms,const cl_uint * num_platforms)498 cl_int ValidateGetPlatformIDs(cl_uint num_entries,
499 const cl_platform_id *platforms,
500 const cl_uint *num_platforms)
501 {
502 // CL_INVALID_VALUE if num_entries is equal to zero and platforms is not NULL
503 // or if both num_platforms and platforms are NULL.
504 if ((num_entries == 0u && platforms != nullptr) ||
505 (platforms == nullptr && num_platforms == nullptr))
506 {
507 return CL_INVALID_VALUE;
508 }
509
510 return CL_SUCCESS;
511 }
512
ValidateGetPlatformInfo(cl_platform_id platform,PlatformInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)513 cl_int ValidateGetPlatformInfo(cl_platform_id platform,
514 PlatformInfo param_name,
515 size_t param_value_size,
516 const void *param_value,
517 const size_t *param_value_size_ret)
518 {
519 // CL_INVALID_PLATFORM if platform is not a valid platform.
520 if (!Platform::IsValidOrDefault(platform))
521 {
522 return CL_INVALID_PLATFORM;
523 }
524
525 // CL_INVALID_VALUE if param_name is not one of the supported values.
526 const cl_version version = platform->cast<Platform>().getVersion();
527 switch (param_name)
528 {
529 case PlatformInfo::HostTimerResolution:
530 ANGLE_VALIDATE_VERSION(version, 2, 1);
531 break;
532 case PlatformInfo::NumericVersion:
533 case PlatformInfo::ExtensionsWithVersion:
534 ANGLE_VALIDATE_VERSION(version, 3, 0);
535 break;
536 case PlatformInfo::InvalidEnum:
537 return CL_INVALID_VALUE;
538 default:
539 // All remaining possible values for param_name are valid for all versions.
540 break;
541 }
542
543 return CL_SUCCESS;
544 }
545
ValidateGetDeviceIDs(cl_platform_id platform,DeviceType device_type,cl_uint num_entries,const cl_device_id * devices,const cl_uint * num_devices)546 cl_int ValidateGetDeviceIDs(cl_platform_id platform,
547 DeviceType device_type,
548 cl_uint num_entries,
549 const cl_device_id *devices,
550 const cl_uint *num_devices)
551 {
552 // CL_INVALID_PLATFORM if platform is not a valid platform.
553 if (!Platform::IsValidOrDefault(platform))
554 {
555 return CL_INVALID_PLATFORM;
556 }
557
558 // CL_INVALID_DEVICE_TYPE if device_type is not a valid value.
559 if (!Device::IsValidType(device_type))
560 {
561 return CL_INVALID_DEVICE_TYPE;
562 }
563
564 // CL_INVALID_VALUE if num_entries is equal to zero and devices is not NULL
565 // or if both num_devices and devices are NULL.
566 if ((num_entries == 0u && devices != nullptr) || (num_devices == nullptr && devices == nullptr))
567 {
568 return CL_INVALID_VALUE;
569 }
570
571 return CL_SUCCESS;
572 }
573
ValidateGetDeviceInfo(cl_device_id device,DeviceInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)574 cl_int ValidateGetDeviceInfo(cl_device_id device,
575 DeviceInfo param_name,
576 size_t param_value_size,
577 const void *param_value,
578 const size_t *param_value_size_ret)
579 {
580 // CL_INVALID_DEVICE if device is not a valid device.
581 if (!Device::IsValid(device))
582 {
583 return CL_INVALID_DEVICE;
584 }
585 const Device &dev = device->cast<Device>();
586
587 // CL_INVALID_VALUE if param_name is not one of the supported values
588 // or if param_name is a value that is available as an extension
589 // and the corresponding extension is not supported by the device.
590 const cl_version version = dev.getVersion();
591 const rx::CLDeviceImpl::Info &info = dev.getInfo();
592 // Enums ordered within their version block as they appear in the OpenCL spec V3.0.7, table 5
593 switch (param_name)
594 {
595 case DeviceInfo::PreferredVectorWidthHalf:
596 case DeviceInfo::NativeVectorWidthChar:
597 case DeviceInfo::NativeVectorWidthShort:
598 case DeviceInfo::NativeVectorWidthInt:
599 case DeviceInfo::NativeVectorWidthLong:
600 case DeviceInfo::NativeVectorWidthFloat:
601 case DeviceInfo::NativeVectorWidthDouble:
602 case DeviceInfo::NativeVectorWidthHalf:
603 case DeviceInfo::HostUnifiedMemory:
604 case DeviceInfo::OpenCL_C_Version:
605 ANGLE_VALIDATE_VERSION(version, 1, 1);
606 break;
607
608 case DeviceInfo::ImageMaxBufferSize:
609 case DeviceInfo::ImageMaxArraySize:
610 case DeviceInfo::LinkerAvailable:
611 case DeviceInfo::BuiltInKernels:
612 case DeviceInfo::PrintfBufferSize:
613 case DeviceInfo::PreferredInteropUserSync:
614 case DeviceInfo::ParentDevice:
615 case DeviceInfo::PartitionMaxSubDevices:
616 case DeviceInfo::PartitionProperties:
617 case DeviceInfo::PartitionAffinityDomain:
618 case DeviceInfo::PartitionType:
619 case DeviceInfo::ReferenceCount:
620 ANGLE_VALIDATE_VERSION(version, 1, 2);
621 break;
622
623 case DeviceInfo::MaxReadWriteImageArgs:
624 case DeviceInfo::ImagePitchAlignment:
625 case DeviceInfo::ImageBaseAddressAlignment:
626 case DeviceInfo::MaxPipeArgs:
627 case DeviceInfo::PipeMaxActiveReservations:
628 case DeviceInfo::PipeMaxPacketSize:
629 case DeviceInfo::MaxGlobalVariableSize:
630 case DeviceInfo::GlobalVariablePreferredTotalSize:
631 case DeviceInfo::QueueOnDeviceProperties:
632 case DeviceInfo::QueueOnDevicePreferredSize:
633 case DeviceInfo::QueueOnDeviceMaxSize:
634 case DeviceInfo::MaxOnDeviceQueues:
635 case DeviceInfo::MaxOnDeviceEvents:
636 case DeviceInfo::SVM_Capabilities:
637 case DeviceInfo::PreferredPlatformAtomicAlignment:
638 case DeviceInfo::PreferredGlobalAtomicAlignment:
639 case DeviceInfo::PreferredLocalAtomicAlignment:
640 ANGLE_VALIDATE_VERSION(version, 2, 0);
641 break;
642
643 case DeviceInfo::IL_Version:
644 case DeviceInfo::MaxNumSubGroups:
645 case DeviceInfo::SubGroupIndependentForwardProgress:
646 ANGLE_VALIDATE_VERSION(version, 2, 1);
647 break;
648
649 case DeviceInfo::ILsWithVersion:
650 case DeviceInfo::BuiltInKernelsWithVersion:
651 case DeviceInfo::NumericVersion:
652 case DeviceInfo::OpenCL_C_AllVersions:
653 case DeviceInfo::OpenCL_C_Features:
654 case DeviceInfo::ExtensionsWithVersion:
655 case DeviceInfo::AtomicMemoryCapabilities:
656 case DeviceInfo::AtomicFenceCapabilities:
657 case DeviceInfo::NonUniformWorkGroupSupport:
658 case DeviceInfo::WorkGroupCollectiveFunctionsSupport:
659 case DeviceInfo::GenericAddressSpaceSupport:
660 case DeviceInfo::DeviceEnqueueCapabilities:
661 case DeviceInfo::PipeSupport:
662 case DeviceInfo::PreferredWorkGroupSizeMultiple:
663 case DeviceInfo::LatestConformanceVersionPassed:
664 ANGLE_VALIDATE_VERSION(version, 3, 0);
665 break;
666
667 case DeviceInfo::DoubleFpConfig:
668 ANGLE_VALIDATE_EXTENSION(info.khrFP64);
669 break;
670
671 case DeviceInfo::InvalidEnum:
672 return CL_INVALID_VALUE;
673 default:
674 // All remaining possible values for param_name are valid for all versions.
675 break;
676 }
677
678 return CL_SUCCESS;
679 }
680
ValidateCreateContext(const cl_context_properties * properties,cl_uint num_devices,const cl_device_id * devices,void (CL_CALLBACK * pfn_notify)(const char * errinfo,const void * private_info,size_t cb,void * user_data),const void * user_data)681 cl_int ValidateCreateContext(const cl_context_properties *properties,
682 cl_uint num_devices,
683 const cl_device_id *devices,
684 void(CL_CALLBACK *pfn_notify)(const char *errinfo,
685 const void *private_info,
686 size_t cb,
687 void *user_data),
688 const void *user_data)
689 {
690 const Platform *platform = nullptr;
691 ANGLE_CL_TRY(ValidateContextProperties(properties, platform));
692
693 // CL_INVALID_VALUE if devices is NULL or if num_devices is equal to zero
694 // or if pfn_notify is NULL but user_data is not NULL.
695 if (devices == nullptr || num_devices == 0u || (pfn_notify == nullptr && user_data != nullptr))
696 {
697 return CL_INVALID_VALUE;
698 }
699
700 // CL_INVALID_DEVICE if any device in devices is not a valid device.
701 while (num_devices-- > 0u)
702 {
703 if (!Device::IsValid(*devices) || &(*devices)->cast<Device>().getPlatform() != platform)
704 {
705 return CL_INVALID_DEVICE;
706 }
707 ++devices;
708 }
709
710 return CL_SUCCESS;
711 }
712
ValidateCreateContextFromType(const cl_context_properties * properties,DeviceType device_type,void (CL_CALLBACK * pfn_notify)(const char * errinfo,const void * private_info,size_t cb,void * user_data),const void * user_data)713 cl_int ValidateCreateContextFromType(const cl_context_properties *properties,
714 DeviceType device_type,
715 void(CL_CALLBACK *pfn_notify)(const char *errinfo,
716 const void *private_info,
717 size_t cb,
718 void *user_data),
719 const void *user_data)
720 {
721 const Platform *platform = nullptr;
722 ANGLE_CL_TRY(ValidateContextProperties(properties, platform));
723
724 // CL_INVALID_DEVICE_TYPE if device_type is not a valid value.
725 if (!Device::IsValidType(device_type))
726 {
727 return CL_INVALID_DEVICE_TYPE;
728 }
729
730 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
731 if (pfn_notify == nullptr && user_data != nullptr)
732 {
733 return CL_INVALID_VALUE;
734 }
735
736 return CL_SUCCESS;
737 }
738
ValidateRetainContext(cl_context context)739 cl_int ValidateRetainContext(cl_context context)
740 {
741 // CL_INVALID_CONTEXT if context is not a valid OpenCL context.
742 return Context::IsValid(context) ? CL_SUCCESS : CL_INVALID_CONTEXT;
743 }
744
ValidateReleaseContext(cl_context context)745 cl_int ValidateReleaseContext(cl_context context)
746 {
747 // CL_INVALID_CONTEXT if context is not a valid OpenCL context.
748 return Context::IsValid(context) ? CL_SUCCESS : CL_INVALID_CONTEXT;
749 }
750
ValidateGetContextInfo(cl_context context,ContextInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)751 cl_int ValidateGetContextInfo(cl_context context,
752 ContextInfo param_name,
753 size_t param_value_size,
754 const void *param_value,
755 const size_t *param_value_size_ret)
756 {
757 // CL_INVALID_CONTEXT if context is not a valid context.
758 if (!Context::IsValid(context))
759 {
760 return CL_INVALID_CONTEXT;
761 }
762
763 // CL_INVALID_VALUE if param_name is not one of the supported values.
764 if (param_name == ContextInfo::InvalidEnum)
765 {
766 return CL_INVALID_VALUE;
767 }
768
769 return CL_SUCCESS;
770 }
771
ValidateRetainCommandQueue(cl_command_queue command_queue)772 cl_int ValidateRetainCommandQueue(cl_command_queue command_queue)
773 {
774 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue.
775 return CommandQueue::IsValid(command_queue) ? CL_SUCCESS : CL_INVALID_COMMAND_QUEUE;
776 }
777
ValidateReleaseCommandQueue(cl_command_queue command_queue)778 cl_int ValidateReleaseCommandQueue(cl_command_queue command_queue)
779 {
780 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue.
781 return CommandQueue::IsValid(command_queue) ? CL_SUCCESS : CL_INVALID_COMMAND_QUEUE;
782 }
783
ValidateGetCommandQueueInfo(cl_command_queue command_queue,CommandQueueInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)784 cl_int ValidateGetCommandQueueInfo(cl_command_queue command_queue,
785 CommandQueueInfo param_name,
786 size_t param_value_size,
787 const void *param_value,
788 const size_t *param_value_size_ret)
789 {
790 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue ...
791 if (!CommandQueue::IsValid(command_queue))
792 {
793 return CL_INVALID_COMMAND_QUEUE;
794 }
795 const CommandQueue &queue = command_queue->cast<CommandQueue>();
796 // or if command_queue is not a valid command-queue for param_name.
797 if (param_name == CommandQueueInfo::Size && queue.isOnDevice())
798 {
799 return CL_INVALID_COMMAND_QUEUE;
800 }
801
802 // CL_INVALID_VALUE if param_name is not one of the supported values.
803 const cl_version version = queue.getDevice().getVersion();
804 switch (param_name)
805 {
806 case CommandQueueInfo::Size:
807 ANGLE_VALIDATE_VERSION(version, 2, 0);
808 break;
809 case CommandQueueInfo::DeviceDefault:
810 ANGLE_VALIDATE_VERSION(version, 2, 1);
811 break;
812 case CommandQueueInfo::PropertiesArray:
813 ANGLE_VALIDATE_VERSION(version, 3, 0);
814 break;
815 case CommandQueueInfo::InvalidEnum:
816 return CL_INVALID_VALUE;
817 default:
818 // All remaining possible values for param_name are valid for all versions.
819 break;
820 }
821
822 return CL_SUCCESS;
823 }
824
ValidateCreateBuffer(cl_context context,MemFlags flags,size_t size,const void * host_ptr)825 cl_int ValidateCreateBuffer(cl_context context, MemFlags flags, size_t size, const void *host_ptr)
826 {
827 // CL_INVALID_CONTEXT if context is not a valid context.
828 if (!Context::IsValid(context))
829 {
830 return CL_INVALID_CONTEXT;
831 }
832 const Context &ctx = context->cast<Context>();
833
834 // CL_INVALID_VALUE if values specified in flags are not valid
835 // as defined in the Memory Flags table.
836 if (!ValidateMemoryFlags(flags, ctx.getPlatform()))
837 {
838 return CL_INVALID_VALUE;
839 }
840
841 // CL_INVALID_BUFFER_SIZE if size is 0 ...
842 if (size == 0u)
843 {
844 CL_INVALID_BUFFER_SIZE;
845 }
846 for (const DevicePtr &device : ctx.getDevices())
847 {
848 // or if size is greater than CL_DEVICE_MAX_MEM_ALLOC_SIZE for all devices in context.
849 if (size > device->getInfo().maxMemAllocSize)
850 {
851 return CL_INVALID_BUFFER_SIZE;
852 }
853 }
854
855 // CL_INVALID_HOST_PTR
856 // if host_ptr is NULL and CL_MEM_USE_HOST_PTR or CL_MEM_COPY_HOST_PTR are set in flags or
857 // if host_ptr is not NULL but CL_MEM_COPY_HOST_PTR or CL_MEM_USE_HOST_PTR are not set in flags.
858 if ((host_ptr != nullptr) != flags.isSet(CL_MEM_USE_HOST_PTR | CL_MEM_COPY_HOST_PTR))
859 {
860 return CL_INVALID_HOST_PTR;
861 }
862
863 return CL_SUCCESS;
864 }
865
ValidateRetainMemObject(cl_mem memobj)866 cl_int ValidateRetainMemObject(cl_mem memobj)
867 {
868 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object.
869 return Memory::IsValid(memobj) ? CL_SUCCESS : CL_INVALID_MEM_OBJECT;
870 }
871
ValidateReleaseMemObject(cl_mem memobj)872 cl_int ValidateReleaseMemObject(cl_mem memobj)
873 {
874 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object.
875 return Memory::IsValid(memobj) ? CL_SUCCESS : CL_INVALID_MEM_OBJECT;
876 }
877
ValidateGetSupportedImageFormats(cl_context context,MemFlags flags,MemObjectType image_type,cl_uint num_entries,const cl_image_format * image_formats,const cl_uint * num_image_formats)878 cl_int ValidateGetSupportedImageFormats(cl_context context,
879 MemFlags flags,
880 MemObjectType image_type,
881 cl_uint num_entries,
882 const cl_image_format *image_formats,
883 const cl_uint *num_image_formats)
884 {
885 // CL_INVALID_CONTEXT if context is not a valid context.
886 if (!Context::IsValid(context))
887 {
888 return CL_INVALID_CONTEXT;
889 }
890 const Context &ctx = context->cast<Context>();
891
892 // CL_INVALID_VALUE if flags or image_type are not valid,
893 if (!ValidateMemoryFlags(flags, ctx.getPlatform()) || !Image::IsTypeValid(image_type))
894 {
895 return CL_INVALID_VALUE;
896 }
897 // or if num_entries is 0 and image_formats is not NULL.
898 if (num_entries == 0u && image_formats != nullptr)
899 {
900 return CL_INVALID_VALUE;
901 }
902
903 return CL_SUCCESS;
904 }
905
ValidateGetMemObjectInfo(cl_mem memobj,MemInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)906 cl_int ValidateGetMemObjectInfo(cl_mem memobj,
907 MemInfo param_name,
908 size_t param_value_size,
909 const void *param_value,
910 const size_t *param_value_size_ret)
911 {
912 // CL_INVALID_MEM_OBJECT if memobj is a not a valid memory object.
913 if (!Memory::IsValid(memobj))
914 {
915 return CL_INVALID_MEM_OBJECT;
916 }
917
918 // CL_INVALID_VALUE if param_name is not valid.
919 const cl_version version = memobj->cast<Memory>().getContext().getPlatform().getVersion();
920 switch (param_name)
921 {
922 case MemInfo::AssociatedMemObject:
923 case MemInfo::Offset:
924 ANGLE_VALIDATE_VERSION(version, 1, 1);
925 break;
926 case MemInfo::UsesSVM_Pointer:
927 ANGLE_VALIDATE_VERSION(version, 2, 0);
928 break;
929 case MemInfo::Properties:
930 ANGLE_VALIDATE_VERSION(version, 3, 0);
931 break;
932 case MemInfo::InvalidEnum:
933 return CL_INVALID_VALUE;
934 default:
935 // All remaining possible values for param_name are valid for all versions.
936 break;
937 }
938
939 return CL_SUCCESS;
940 }
941
ValidateGetImageInfo(cl_mem image,ImageInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)942 cl_int ValidateGetImageInfo(cl_mem image,
943 ImageInfo param_name,
944 size_t param_value_size,
945 const void *param_value,
946 const size_t *param_value_size_ret)
947 {
948 // CL_INVALID_MEM_OBJECT if image is a not a valid image object.
949 if (!Image::IsValid(image))
950 {
951 return CL_INVALID_MEM_OBJECT;
952 }
953
954 // CL_INVALID_VALUE if param_name is not valid.
955 const cl_version version = image->cast<Image>().getContext().getPlatform().getVersion();
956 switch (param_name)
957 {
958 case ImageInfo::ArraySize:
959 case ImageInfo::Buffer:
960 case ImageInfo::NumMipLevels:
961 case ImageInfo::NumSamples:
962 ANGLE_VALIDATE_VERSION(version, 1, 2);
963 break;
964 case ImageInfo::InvalidEnum:
965 return CL_INVALID_VALUE;
966 default:
967 // All remaining possible values for param_name are valid for all versions.
968 break;
969 }
970
971 return CL_SUCCESS;
972 }
973
ValidateRetainSampler(cl_sampler sampler)974 cl_int ValidateRetainSampler(cl_sampler sampler)
975 {
976 // CL_INVALID_SAMPLER if sampler is not a valid sampler object.
977 return Sampler::IsValid(sampler) ? CL_SUCCESS : CL_INVALID_SAMPLER;
978 }
979
ValidateReleaseSampler(cl_sampler sampler)980 cl_int ValidateReleaseSampler(cl_sampler sampler)
981 {
982 // CL_INVALID_SAMPLER if sampler is not a valid sampler object.
983 return Sampler::IsValid(sampler) ? CL_SUCCESS : CL_INVALID_SAMPLER;
984 }
985
ValidateGetSamplerInfo(cl_sampler sampler,SamplerInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)986 cl_int ValidateGetSamplerInfo(cl_sampler sampler,
987 SamplerInfo param_name,
988 size_t param_value_size,
989 const void *param_value,
990 const size_t *param_value_size_ret)
991 {
992 // CL_INVALID_SAMPLER if sampler is a not a valid sampler object.
993 if (!Sampler::IsValid(sampler))
994 {
995 return CL_INVALID_SAMPLER;
996 }
997
998 // CL_INVALID_VALUE if param_name is not valid.
999 const cl_version version = sampler->cast<Sampler>().getContext().getPlatform().getVersion();
1000 switch (param_name)
1001 {
1002 case SamplerInfo::Properties:
1003 ANGLE_VALIDATE_VERSION(version, 3, 0);
1004 break;
1005 case SamplerInfo::InvalidEnum:
1006 return CL_INVALID_VALUE;
1007 default:
1008 // All remaining possible values for param_name are valid for all versions.
1009 break;
1010 }
1011
1012 return CL_SUCCESS;
1013 }
1014
ValidateCreateProgramWithSource(cl_context context,cl_uint count,const char ** strings,const size_t * lengths)1015 cl_int ValidateCreateProgramWithSource(cl_context context,
1016 cl_uint count,
1017 const char **strings,
1018 const size_t *lengths)
1019 {
1020 // CL_INVALID_CONTEXT if context is not a valid context.
1021 if (!Context::IsValid(context))
1022 {
1023 return CL_INVALID_CONTEXT;
1024 }
1025
1026 // CL_INVALID_VALUE if count is zero or if strings or any entry in strings is NULL.
1027 if (count == 0u || strings == nullptr)
1028 {
1029 return CL_INVALID_VALUE;
1030 }
1031 while (count-- != 0u)
1032 {
1033 if (*strings++ == nullptr)
1034 {
1035 return CL_INVALID_VALUE;
1036 }
1037 }
1038
1039 return CL_SUCCESS;
1040 }
1041
ValidateCreateProgramWithBinary(cl_context context,cl_uint num_devices,const cl_device_id * device_list,const size_t * lengths,const unsigned char ** binaries,const cl_int * binary_status)1042 cl_int ValidateCreateProgramWithBinary(cl_context context,
1043 cl_uint num_devices,
1044 const cl_device_id *device_list,
1045 const size_t *lengths,
1046 const unsigned char **binaries,
1047 const cl_int *binary_status)
1048 {
1049 // CL_INVALID_CONTEXT if context is not a valid context.
1050 if (!Context::IsValid(context))
1051 {
1052 return CL_INVALID_CONTEXT;
1053 }
1054 const Context &ctx = context->cast<Context>();
1055
1056 // CL_INVALID_VALUE if device_list is NULL or num_devices is zero.
1057 // CL_INVALID_VALUE if lengths or binaries is NULL.
1058 if (device_list == nullptr || num_devices == 0u || lengths == nullptr || binaries == nullptr)
1059 {
1060 return CL_INVALID_VALUE;
1061 }
1062 while (num_devices-- != 0u)
1063 {
1064 // CL_INVALID_DEVICE if any device in device_list
1065 // is not in the list of devices associated with context.
1066 if (!ctx.hasDevice(*device_list++))
1067 {
1068 return CL_INVALID_DEVICE;
1069 }
1070
1071 // CL_INVALID_VALUE if any entry in lengths[i] is zero or binaries[i] is NULL.
1072 if (*lengths++ == 0u || *binaries++ == nullptr)
1073 {
1074 return CL_INVALID_VALUE;
1075 }
1076 }
1077
1078 return CL_SUCCESS;
1079 }
1080
ValidateRetainProgram(cl_program program)1081 cl_int ValidateRetainProgram(cl_program program)
1082 {
1083 // CL_INVALID_PROGRAM if program is not a valid program object.
1084 return Program::IsValid(program) ? CL_SUCCESS : CL_INVALID_PROGRAM;
1085 }
1086
ValidateReleaseProgram(cl_program program)1087 cl_int ValidateReleaseProgram(cl_program program)
1088 {
1089 // CL_INVALID_PROGRAM if program is not a valid program object.
1090 return Program::IsValid(program) ? CL_SUCCESS : CL_INVALID_PROGRAM;
1091 }
1092
ValidateBuildProgram(cl_program program,cl_uint num_devices,const cl_device_id * device_list,const char * options,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)1093 cl_int ValidateBuildProgram(cl_program program,
1094 cl_uint num_devices,
1095 const cl_device_id *device_list,
1096 const char *options,
1097 void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data),
1098 const void *user_data)
1099 {
1100 // CL_INVALID_PROGRAM if program is not a valid program object.
1101 if (!Program::IsValid(program))
1102 {
1103 return CL_INVALID_PROGRAM;
1104 }
1105 const Program &prog = program->cast<Program>();
1106
1107 // CL_INVALID_VALUE if device_list is NULL and num_devices is greater than zero,
1108 // or if device_list is not NULL and num_devices is zero.
1109 if ((device_list != nullptr) != (num_devices != 0u))
1110 {
1111 return CL_INVALID_VALUE;
1112 }
1113
1114 // CL_INVALID_DEVICE if any device in device_list
1115 // is not in the list of devices associated with program.
1116 while (num_devices-- != 0u)
1117 {
1118 if (!prog.hasDevice(*device_list++))
1119 {
1120 return CL_INVALID_DEVICE;
1121 }
1122 }
1123
1124 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
1125 if (pfn_notify == nullptr && user_data != nullptr)
1126 {
1127 return CL_INVALID_VALUE;
1128 }
1129
1130 // CL_INVALID_OPERATION if the build of a program executable for any of the devices listed
1131 // in device_list by a previous call to clBuildProgram for program has not completed.
1132 if (prog.isBuilding())
1133 {
1134 return CL_INVALID_OPERATION;
1135 }
1136
1137 // CL_INVALID_OPERATION if there are kernel objects attached to program.
1138 if (prog.hasAttachedKernels())
1139 {
1140 return CL_INVALID_OPERATION;
1141 }
1142
1143 return CL_SUCCESS;
1144 }
1145
ValidateGetProgramInfo(cl_program program,ProgramInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1146 cl_int ValidateGetProgramInfo(cl_program program,
1147 ProgramInfo param_name,
1148 size_t param_value_size,
1149 const void *param_value,
1150 const size_t *param_value_size_ret)
1151 {
1152 // CL_INVALID_PROGRAM if program is not a valid program object.
1153 if (!Program::IsValid(program))
1154 {
1155 return CL_INVALID_PROGRAM;
1156 }
1157 const Program &prog = program->cast<Program>();
1158
1159 // CL_INVALID_VALUE if param_name is not valid.
1160 const cl_version version = prog.getContext().getPlatform().getVersion();
1161 switch (param_name)
1162 {
1163 case ProgramInfo::NumKernels:
1164 case ProgramInfo::KernelNames:
1165 ANGLE_VALIDATE_VERSION(version, 1, 2);
1166 break;
1167 case ProgramInfo::IL:
1168 ANGLE_VALIDATE_VERSION(version, 2, 1);
1169 break;
1170 case ProgramInfo::ScopeGlobalCtorsPresent:
1171 case ProgramInfo::ScopeGlobalDtorsPresent:
1172 ANGLE_VALIDATE_VERSION(version, 2, 2);
1173 break;
1174 case ProgramInfo::InvalidEnum:
1175 return CL_INVALID_VALUE;
1176 default:
1177 // All remaining possible values for param_name are valid for all versions.
1178 break;
1179 }
1180
1181 return CL_SUCCESS;
1182 }
1183
ValidateGetProgramBuildInfo(cl_program program,cl_device_id device,ProgramBuildInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1184 cl_int ValidateGetProgramBuildInfo(cl_program program,
1185 cl_device_id device,
1186 ProgramBuildInfo param_name,
1187 size_t param_value_size,
1188 const void *param_value,
1189 const size_t *param_value_size_ret)
1190 {
1191 // CL_INVALID_PROGRAM if program is not a valid program object.
1192 if (!Program::IsValid(program))
1193 {
1194 return CL_INVALID_PROGRAM;
1195 }
1196 const Program &prog = program->cast<Program>();
1197
1198 // CL_INVALID_DEVICE if device is not in the list of devices associated with program.
1199 if (!prog.hasDevice(device))
1200 {
1201 return CL_INVALID_DEVICE;
1202 }
1203
1204 // CL_INVALID_VALUE if param_name is not valid.
1205 const cl_version version = prog.getContext().getPlatform().getVersion();
1206 switch (param_name)
1207 {
1208 case ProgramBuildInfo::BinaryType:
1209 ANGLE_VALIDATE_VERSION(version, 1, 2);
1210 break;
1211 case ProgramBuildInfo::GlobalVariableTotalSize:
1212 ANGLE_VALIDATE_VERSION(version, 2, 0);
1213 break;
1214 case ProgramBuildInfo::InvalidEnum:
1215 return CL_INVALID_VALUE;
1216 default:
1217 // All remaining possible values for param_name are valid for all versions.
1218 break;
1219 }
1220
1221 return CL_SUCCESS;
1222 }
1223
ValidateCreateKernel(cl_program program,const char * kernel_name)1224 cl_int ValidateCreateKernel(cl_program program, const char *kernel_name)
1225 {
1226 // CL_INVALID_PROGRAM if program is not a valid program object.
1227 if (!Program::IsValid(program))
1228 {
1229 return CL_INVALID_PROGRAM;
1230 }
1231
1232 // CL_INVALID_VALUE if kernel_name is NULL.
1233 if (kernel_name == nullptr)
1234 {
1235 return CL_INVALID_VALUE;
1236 }
1237
1238 return CL_SUCCESS;
1239 }
1240
ValidateCreateKernelsInProgram(cl_program program,cl_uint num_kernels,const cl_kernel * kernels,const cl_uint * num_kernels_ret)1241 cl_int ValidateCreateKernelsInProgram(cl_program program,
1242 cl_uint num_kernels,
1243 const cl_kernel *kernels,
1244 const cl_uint *num_kernels_ret)
1245 {
1246 // CL_INVALID_PROGRAM if program is not a valid program object.
1247 if (!Program::IsValid(program))
1248 {
1249 return CL_INVALID_PROGRAM;
1250 }
1251
1252 return CL_SUCCESS;
1253 }
1254
ValidateRetainKernel(cl_kernel kernel)1255 cl_int ValidateRetainKernel(cl_kernel kernel)
1256 {
1257 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
1258 return Kernel::IsValid(kernel) ? CL_SUCCESS : CL_INVALID_KERNEL;
1259 }
1260
ValidateReleaseKernel(cl_kernel kernel)1261 cl_int ValidateReleaseKernel(cl_kernel kernel)
1262 {
1263 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
1264 return Kernel::IsValid(kernel) ? CL_SUCCESS : CL_INVALID_KERNEL;
1265 }
1266
ValidateSetKernelArg(cl_kernel kernel,cl_uint arg_index,size_t arg_size,const void * arg_value)1267 cl_int ValidateSetKernelArg(cl_kernel kernel,
1268 cl_uint arg_index,
1269 size_t arg_size,
1270 const void *arg_value)
1271 {
1272 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
1273 if (!Kernel::IsValid(kernel))
1274 {
1275 return CL_INVALID_KERNEL;
1276 }
1277 const Kernel &krnl = kernel->cast<Kernel>();
1278
1279 // CL_INVALID_ARG_INDEX if arg_index is not a valid argument index.
1280 if (arg_index >= krnl.getInfo().args.size())
1281 {
1282 return CL_INVALID_ARG_INDEX;
1283 }
1284
1285 if (arg_size == sizeof(cl_mem) && arg_value != nullptr)
1286 {
1287 const std::string &typeName = krnl.getInfo().args[arg_index].typeName;
1288
1289 // CL_INVALID_MEM_OBJECT for an argument declared to be a memory object
1290 // when the specified arg_value is not a valid memory object.
1291 if (typeName == "image1d_t")
1292 {
1293 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1294 if (!Image::IsValid(image) || image->cast<Image>().getType() != MemObjectType::Image1D)
1295 {
1296 return CL_INVALID_MEM_OBJECT;
1297 }
1298 }
1299 else if (typeName == "image2d_t")
1300 {
1301 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1302 if (!Image::IsValid(image) || image->cast<Image>().getType() != MemObjectType::Image2D)
1303 {
1304 return CL_INVALID_MEM_OBJECT;
1305 }
1306 }
1307 else if (typeName == "image3d_t")
1308 {
1309 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1310 if (!Image::IsValid(image) || image->cast<Image>().getType() != MemObjectType::Image3D)
1311 {
1312 return CL_INVALID_MEM_OBJECT;
1313 }
1314 }
1315 else if (typeName == "image1d_array_t")
1316 {
1317 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1318 if (!Image::IsValid(image) ||
1319 image->cast<Image>().getType() != MemObjectType::Image1D_Array)
1320 {
1321 return CL_INVALID_MEM_OBJECT;
1322 }
1323 }
1324 else if (typeName == "image2d_array_t")
1325 {
1326 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1327 if (!Image::IsValid(image) ||
1328 image->cast<Image>().getType() != MemObjectType::Image2D_Array)
1329 {
1330 return CL_INVALID_MEM_OBJECT;
1331 }
1332 }
1333 else if (typeName == "image1d_buffer_t")
1334 {
1335 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1336 if (!Image::IsValid(image) ||
1337 image->cast<Image>().getType() != MemObjectType::Image1D_Buffer)
1338 {
1339 return CL_INVALID_MEM_OBJECT;
1340 }
1341 }
1342 // CL_INVALID_SAMPLER for an argument declared to be of type sampler_t
1343 // when the specified arg_value is not a valid sampler object.
1344 else if (typeName == "sampler_t")
1345 {
1346 if (!Sampler::IsValid(*static_cast<const cl_sampler *>(arg_value)))
1347 {
1348 return CL_INVALID_SAMPLER;
1349 }
1350 }
1351 // CL_INVALID_DEVICE_QUEUE for an argument declared to be of type queue_t
1352 // when the specified arg_value is not a valid device queue object.
1353 else if (typeName == "queue_t")
1354 {
1355 const cl_command_queue queue = *static_cast<const cl_command_queue *>(arg_value);
1356 if (!CommandQueue::IsValid(queue) || !queue->cast<CommandQueue>().isOnDevice())
1357 {
1358 return CL_INVALID_DEVICE_QUEUE;
1359 }
1360 }
1361 }
1362
1363 return CL_SUCCESS;
1364 }
1365
ValidateGetKernelInfo(cl_kernel kernel,KernelInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1366 cl_int ValidateGetKernelInfo(cl_kernel kernel,
1367 KernelInfo param_name,
1368 size_t param_value_size,
1369 const void *param_value,
1370 const size_t *param_value_size_ret)
1371 {
1372 // CL_INVALID_KERNEL if kernel is a not a valid kernel object.
1373 if (!Kernel::IsValid(kernel))
1374 {
1375 return CL_INVALID_KERNEL;
1376 }
1377
1378 // CL_INVALID_VALUE if param_name is not valid.
1379 const cl_version version =
1380 kernel->cast<Kernel>().getProgram().getContext().getPlatform().getVersion();
1381 switch (param_name)
1382 {
1383 case KernelInfo::Attributes:
1384 ANGLE_VALIDATE_VERSION(version, 1, 2);
1385 break;
1386 case KernelInfo::InvalidEnum:
1387 return CL_INVALID_VALUE;
1388 default:
1389 // All remaining possible values for param_name are valid for all versions.
1390 break;
1391 }
1392
1393 return CL_SUCCESS;
1394 }
1395
ValidateGetKernelWorkGroupInfo(cl_kernel kernel,cl_device_id device,KernelWorkGroupInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1396 cl_int ValidateGetKernelWorkGroupInfo(cl_kernel kernel,
1397 cl_device_id device,
1398 KernelWorkGroupInfo param_name,
1399 size_t param_value_size,
1400 const void *param_value,
1401 const size_t *param_value_size_ret)
1402 {
1403 // CL_INVALID_KERNEL if kernel is a not a valid kernel object.
1404 if (!Kernel::IsValid(kernel))
1405 {
1406 return CL_INVALID_KERNEL;
1407 }
1408 const Kernel &krnl = kernel->cast<Kernel>();
1409
1410 const Device *dev = nullptr;
1411 if (device != nullptr)
1412 {
1413 // CL_INVALID_DEVICE if device is not in the list of devices associated with kernel ...
1414 if (krnl.getProgram().getContext().hasDevice(device))
1415 {
1416 dev = &device->cast<Device>();
1417 }
1418 else
1419 {
1420 return CL_INVALID_DEVICE;
1421 }
1422 }
1423 else
1424 {
1425 // or if device is NULL but there is more than one device associated with kernel.
1426 if (krnl.getProgram().getContext().getDevices().size() == 1u)
1427 {
1428 dev = krnl.getProgram().getContext().getDevices().front().get();
1429 }
1430 else
1431 {
1432 return CL_INVALID_DEVICE;
1433 }
1434 }
1435
1436 // CL_INVALID_VALUE if param_name is not valid.
1437 const cl_version version = krnl.getProgram().getContext().getPlatform().getInfo().version;
1438 switch (param_name)
1439 {
1440 case KernelWorkGroupInfo::GlobalWorkSize:
1441 ANGLE_VALIDATE_VERSION(version, 1, 2);
1442 // CL_INVALID_VALUE if param_name is CL_KERNEL_GLOBAL_WORK_SIZE and
1443 // device is not a custom device and kernel is not a built-in kernel.
1444 if (!dev->supportsBuiltInKernel(krnl.getInfo().functionName))
1445 {
1446 return CL_INVALID_VALUE;
1447 }
1448 break;
1449 case KernelWorkGroupInfo::InvalidEnum:
1450 return CL_INVALID_VALUE;
1451 default:
1452 // All remaining possible values for param_name are valid for all versions.
1453 break;
1454 }
1455
1456 return CL_SUCCESS;
1457 }
1458
ValidateWaitForEvents(cl_uint num_events,const cl_event * event_list)1459 cl_int ValidateWaitForEvents(cl_uint num_events, const cl_event *event_list)
1460 {
1461 // CL_INVALID_VALUE if num_events is zero or event_list is NULL.
1462 if (num_events == 0u || event_list == nullptr)
1463 {
1464 return CL_INVALID_VALUE;
1465 }
1466
1467 const Context *context = nullptr;
1468 while (num_events-- != 0u)
1469 {
1470 // CL_INVALID_EVENT if event objects specified in event_list are not valid event objects.
1471 if (!Event::IsValid(*event_list))
1472 {
1473 return CL_INVALID_EVENT;
1474 }
1475
1476 // CL_INVALID_CONTEXT if events specified in event_list do not belong to the same context.
1477 const Context *eventContext = &(*event_list++)->cast<Event>().getContext();
1478 if (context == nullptr)
1479 {
1480 context = eventContext;
1481 }
1482 else if (context != eventContext)
1483 {
1484 return CL_INVALID_CONTEXT;
1485 }
1486 }
1487
1488 return CL_SUCCESS;
1489 }
1490
ValidateGetEventInfo(cl_event event,EventInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1491 cl_int ValidateGetEventInfo(cl_event event,
1492 EventInfo param_name,
1493 size_t param_value_size,
1494 const void *param_value,
1495 const size_t *param_value_size_ret)
1496 {
1497 // CL_INVALID_EVENT if event is a not a valid event object.
1498 if (!Event::IsValid(event))
1499 {
1500 return CL_INVALID_EVENT;
1501 }
1502
1503 // CL_INVALID_VALUE if param_name is not valid.
1504 const cl_version version = event->cast<Event>().getContext().getPlatform().getVersion();
1505 switch (param_name)
1506 {
1507 case EventInfo::Context:
1508 ANGLE_VALIDATE_VERSION(version, 1, 1);
1509 break;
1510 case EventInfo::InvalidEnum:
1511 return CL_INVALID_VALUE;
1512 default:
1513 // All remaining possible values for param_name are valid for all versions.
1514 break;
1515 }
1516
1517 return CL_SUCCESS;
1518 }
1519
ValidateRetainEvent(cl_event event)1520 cl_int ValidateRetainEvent(cl_event event)
1521 {
1522 // CL_INVALID_EVENT if event is not a valid event object.
1523 return Event::IsValid(event) ? CL_SUCCESS : CL_INVALID_EVENT;
1524 }
1525
ValidateReleaseEvent(cl_event event)1526 cl_int ValidateReleaseEvent(cl_event event)
1527 {
1528 // CL_INVALID_EVENT if event is not a valid event object.
1529 return Event::IsValid(event) ? CL_SUCCESS : CL_INVALID_EVENT;
1530 }
1531
ValidateGetEventProfilingInfo(cl_event event,ProfilingInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1532 cl_int ValidateGetEventProfilingInfo(cl_event event,
1533 ProfilingInfo param_name,
1534 size_t param_value_size,
1535 const void *param_value,
1536 const size_t *param_value_size_ret)
1537 {
1538 // CL_INVALID_EVENT if event is a not a valid event object.
1539 if (!Event::IsValid(event))
1540 {
1541 return CL_INVALID_EVENT;
1542 }
1543 const Event &evt = event->cast<Event>();
1544
1545 // CL_PROFILING_INFO_NOT_AVAILABLE if event is a user event object,
1546 if (evt.getCommandType() == CL_COMMAND_USER)
1547 {
1548 return CL_PROFILING_INFO_NOT_AVAILABLE;
1549 }
1550 // or if the CL_QUEUE_PROFILING_ENABLE flag is not set for the command-queue.
1551 if (evt.getCommandQueue()->getProperties().isNotSet(CL_QUEUE_PROFILING_ENABLE))
1552 {
1553 return CL_PROFILING_INFO_NOT_AVAILABLE;
1554 }
1555
1556 // CL_INVALID_VALUE if param_name is not valid.
1557 const cl_version version = evt.getContext().getPlatform().getVersion();
1558 switch (param_name)
1559 {
1560 case ProfilingInfo::CommandComplete:
1561 ANGLE_VALIDATE_VERSION(version, 2, 0);
1562 break;
1563 case ProfilingInfo::InvalidEnum:
1564 return CL_INVALID_VALUE;
1565 default:
1566 // All remaining possible values for param_name are valid for all versions.
1567 break;
1568 }
1569
1570 return CL_SUCCESS;
1571 }
1572
ValidateFlush(cl_command_queue command_queue)1573 cl_int ValidateFlush(cl_command_queue command_queue)
1574 {
1575 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
1576 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
1577 {
1578 return CL_INVALID_COMMAND_QUEUE;
1579 }
1580 return CL_SUCCESS;
1581 }
1582
ValidateFinish(cl_command_queue command_queue)1583 cl_int ValidateFinish(cl_command_queue command_queue)
1584 {
1585 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
1586 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
1587 {
1588 return CL_INVALID_COMMAND_QUEUE;
1589 }
1590 return CL_SUCCESS;
1591 }
1592
ValidateEnqueueReadBuffer(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_read,size_t offset,size_t size,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1593 cl_int ValidateEnqueueReadBuffer(cl_command_queue command_queue,
1594 cl_mem buffer,
1595 cl_bool blocking_read,
1596 size_t offset,
1597 size_t size,
1598 const void *ptr,
1599 cl_uint num_events_in_wait_list,
1600 const cl_event *event_wait_list,
1601 const cl_event *event)
1602 {
1603 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
1604 event_wait_list));
1605 ANGLE_CL_TRY(ValidateEnqueueBuffer(command_queue->cast<CommandQueue>(), buffer, true, false));
1606
1607 // CL_INVALID_VALUE if the region being read or written specified
1608 // by (offset, size) is out of bounds or if ptr is a NULL value.
1609 if (!buffer->cast<Buffer>().isRegionValid(offset, size) || ptr == nullptr)
1610 {
1611 return CL_INVALID_VALUE;
1612 }
1613
1614 return CL_SUCCESS;
1615 }
1616
ValidateEnqueueWriteBuffer(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_write,size_t offset,size_t size,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1617 cl_int ValidateEnqueueWriteBuffer(cl_command_queue command_queue,
1618 cl_mem buffer,
1619 cl_bool blocking_write,
1620 size_t offset,
1621 size_t size,
1622 const void *ptr,
1623 cl_uint num_events_in_wait_list,
1624 const cl_event *event_wait_list,
1625 const cl_event *event)
1626 {
1627 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
1628 event_wait_list));
1629 ANGLE_CL_TRY(ValidateEnqueueBuffer(command_queue->cast<CommandQueue>(), buffer, false, true));
1630
1631 // CL_INVALID_VALUE if the region being read or written specified
1632 // by (offset, size) is out of bounds or if ptr is a NULL value.
1633 if (!buffer->cast<Buffer>().isRegionValid(offset, size) || ptr == nullptr)
1634 {
1635 return CL_INVALID_VALUE;
1636 }
1637
1638 return CL_SUCCESS;
1639 }
1640
ValidateEnqueueCopyBuffer(cl_command_queue command_queue,cl_mem src_buffer,cl_mem dst_buffer,size_t src_offset,size_t dst_offset,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1641 cl_int ValidateEnqueueCopyBuffer(cl_command_queue command_queue,
1642 cl_mem src_buffer,
1643 cl_mem dst_buffer,
1644 size_t src_offset,
1645 size_t dst_offset,
1646 size_t size,
1647 cl_uint num_events_in_wait_list,
1648 const cl_event *event_wait_list,
1649 const cl_event *event)
1650 {
1651 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
1652 event_wait_list));
1653 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1654
1655 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, src_buffer, false, false));
1656 const Buffer &src = src_buffer->cast<Buffer>();
1657
1658 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, dst_buffer, false, false));
1659 const Buffer &dst = dst_buffer->cast<Buffer>();
1660
1661 // CL_INVALID_VALUE if src_offset, dst_offset, size, src_offset + size or dst_offset + size
1662 // require accessing elements outside the src_buffer and dst_buffer buffer objects respectively.
1663 if (!src.isRegionValid(src_offset, size) || !dst.isRegionValid(dst_offset, size))
1664 {
1665 return CL_INVALID_VALUE;
1666 }
1667
1668 // CL_MEM_COPY_OVERLAP if src_buffer and dst_buffer are the same buffer or sub-buffer object
1669 // and the source and destination regions overlap or if src_buffer and dst_buffer are
1670 // different sub-buffers of the same associated buffer object and they overlap.
1671 if ((src.isSubBuffer() ? src.getParent().get() : &src) ==
1672 (dst.isSubBuffer() ? dst.getParent().get() : &dst))
1673 {
1674 // Only sub-buffers have offsets larger than zero
1675 src_offset += src.getOffset();
1676 dst_offset += dst.getOffset();
1677
1678 if (OverlapRegions(src_offset, dst_offset, size))
1679 {
1680 return CL_MEM_COPY_OVERLAP;
1681 }
1682 }
1683
1684 return CL_SUCCESS;
1685 }
1686
ValidateEnqueueReadImage(cl_command_queue command_queue,cl_mem image,cl_bool blocking_read,const size_t * origin,const size_t * region,size_t row_pitch,size_t slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1687 cl_int ValidateEnqueueReadImage(cl_command_queue command_queue,
1688 cl_mem image,
1689 cl_bool blocking_read,
1690 const size_t *origin,
1691 const size_t *region,
1692 size_t row_pitch,
1693 size_t slice_pitch,
1694 const void *ptr,
1695 cl_uint num_events_in_wait_list,
1696 const cl_event *event_wait_list,
1697 const cl_event *event)
1698 {
1699 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, true, num_events_in_wait_list,
1700 event_wait_list));
1701 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1702
1703 ANGLE_CL_TRY(ValidateEnqueueImage(queue, image, true, false));
1704 const Image &img = image->cast<Image>();
1705
1706 ANGLE_CL_TRY(ValidateImageForDevice(img, queue.getDevice(), origin, region));
1707 ANGLE_CL_TRY(ValidateHostRegionForImage(img, region, row_pitch, slice_pitch, ptr));
1708
1709 return CL_SUCCESS;
1710 }
1711
ValidateEnqueueWriteImage(cl_command_queue command_queue,cl_mem image,cl_bool blocking_write,const size_t * origin,const size_t * region,size_t input_row_pitch,size_t input_slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1712 cl_int ValidateEnqueueWriteImage(cl_command_queue command_queue,
1713 cl_mem image,
1714 cl_bool blocking_write,
1715 const size_t *origin,
1716 const size_t *region,
1717 size_t input_row_pitch,
1718 size_t input_slice_pitch,
1719 const void *ptr,
1720 cl_uint num_events_in_wait_list,
1721 const cl_event *event_wait_list,
1722 const cl_event *event)
1723 {
1724 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, true, num_events_in_wait_list,
1725 event_wait_list));
1726 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1727
1728 ANGLE_CL_TRY(ValidateEnqueueImage(queue, image, false, true));
1729 const Image &img = image->cast<Image>();
1730
1731 ANGLE_CL_TRY(ValidateImageForDevice(img, queue.getDevice(), origin, region));
1732 ANGLE_CL_TRY(ValidateHostRegionForImage(img, region, input_row_pitch, input_slice_pitch, ptr));
1733
1734 return CL_SUCCESS;
1735 }
1736
ValidateEnqueueCopyImage(cl_command_queue command_queue,cl_mem src_image,cl_mem dst_image,const size_t * src_origin,const size_t * dst_origin,const size_t * region,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1737 cl_int ValidateEnqueueCopyImage(cl_command_queue command_queue,
1738 cl_mem src_image,
1739 cl_mem dst_image,
1740 const size_t *src_origin,
1741 const size_t *dst_origin,
1742 const size_t *region,
1743 cl_uint num_events_in_wait_list,
1744 const cl_event *event_wait_list,
1745 const cl_event *event)
1746 {
1747 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, true, num_events_in_wait_list,
1748 event_wait_list));
1749 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1750
1751 ANGLE_CL_TRY(ValidateEnqueueImage(queue, src_image, false, false));
1752 const Image &src = src_image->cast<Image>();
1753
1754 ANGLE_CL_TRY(ValidateEnqueueImage(queue, dst_image, false, false));
1755 const Image &dst = dst_image->cast<Image>();
1756
1757 // CL_IMAGE_FORMAT_MISMATCH if src_image and dst_image do not use the same image format.
1758 if (src.getFormat().image_channel_order != dst.getFormat().image_channel_order ||
1759 src.getFormat().image_channel_data_type != dst.getFormat().image_channel_data_type)
1760 {
1761 return CL_IMAGE_FORMAT_MISMATCH;
1762 }
1763
1764 ANGLE_CL_TRY(ValidateImageForDevice(src, queue.getDevice(), src_origin, region));
1765 ANGLE_CL_TRY(ValidateImageForDevice(dst, queue.getDevice(), dst_origin, region));
1766
1767 // CL_MEM_COPY_OVERLAP if src_image and dst_image are the same image object
1768 // and the source and destination regions overlap.
1769 if (&src == &dst)
1770 {
1771 const MemObjectType type = src.getType();
1772 // Check overlap in first dimension
1773 if (OverlapRegions(src_origin[0], dst_origin[0], region[0]))
1774 {
1775 if (type == MemObjectType::Image1D || type == MemObjectType::Image1D_Buffer)
1776 {
1777 return CL_MEM_COPY_OVERLAP;
1778 }
1779
1780 // Check overlap in second dimension
1781 if (OverlapRegions(src_origin[1], dst_origin[1], region[1]))
1782 {
1783 if (type == MemObjectType::Image2D || type == MemObjectType::Image1D_Array)
1784 {
1785 return CL_MEM_COPY_OVERLAP;
1786 }
1787
1788 // Check overlap in third dimension
1789 if (OverlapRegions(src_origin[2], dst_origin[2], region[2]))
1790 {
1791 return CL_MEM_COPY_OVERLAP;
1792 }
1793 }
1794 }
1795 }
1796
1797 return CL_SUCCESS;
1798 }
1799
ValidateEnqueueCopyImageToBuffer(cl_command_queue command_queue,cl_mem src_image,cl_mem dst_buffer,const size_t * src_origin,const size_t * region,size_t dst_offset,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1800 cl_int ValidateEnqueueCopyImageToBuffer(cl_command_queue command_queue,
1801 cl_mem src_image,
1802 cl_mem dst_buffer,
1803 const size_t *src_origin,
1804 const size_t *region,
1805 size_t dst_offset,
1806 cl_uint num_events_in_wait_list,
1807 const cl_event *event_wait_list,
1808 const cl_event *event)
1809 {
1810 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, true, num_events_in_wait_list,
1811 event_wait_list));
1812 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1813
1814 ANGLE_CL_TRY(ValidateEnqueueImage(queue, src_image, false, false));
1815 const Image &src = src_image->cast<Image>();
1816
1817 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, dst_buffer, false, false));
1818 const Buffer &dst = dst_buffer->cast<Buffer>();
1819
1820 // CL_INVALID_MEM_OBJECT if src_image is a 1D image buffer object created from dst_buffer.
1821 if (src.getType() == MemObjectType::Image1D_Buffer && src.getParent() == &dst)
1822 {
1823 return CL_INVALID_MEM_OBJECT;
1824 }
1825
1826 ANGLE_CL_TRY(ValidateImageForDevice(src, queue.getDevice(), src_origin, region));
1827
1828 // CL_INVALID_VALUE if the region specified by dst_offset and dst_offset + dst_cb
1829 // refer to a region outside dst_buffer.
1830 const MemObjectType type = src.getType();
1831 size_t dst_cb = src.getElementSize() * region[0];
1832 if (type != MemObjectType::Image1D && type != MemObjectType::Image1D_Buffer)
1833 {
1834 dst_cb *= region[1];
1835 if (type != MemObjectType::Image2D && type != MemObjectType::Image1D_Array)
1836 {
1837 dst_cb *= region[2];
1838 }
1839 }
1840 if (!dst.isRegionValid(dst_offset, dst_cb))
1841 {
1842 return CL_INVALID_VALUE;
1843 }
1844
1845 return CL_SUCCESS;
1846 }
1847
ValidateEnqueueCopyBufferToImage(cl_command_queue command_queue,cl_mem src_buffer,cl_mem dst_image,size_t src_offset,const size_t * dst_origin,const size_t * region,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1848 cl_int ValidateEnqueueCopyBufferToImage(cl_command_queue command_queue,
1849 cl_mem src_buffer,
1850 cl_mem dst_image,
1851 size_t src_offset,
1852 const size_t *dst_origin,
1853 const size_t *region,
1854 cl_uint num_events_in_wait_list,
1855 const cl_event *event_wait_list,
1856 const cl_event *event)
1857 {
1858 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, true, num_events_in_wait_list,
1859 event_wait_list));
1860 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1861
1862 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, src_buffer, false, false));
1863 const Buffer &src = src_buffer->cast<Buffer>();
1864
1865 ANGLE_CL_TRY(ValidateEnqueueImage(queue, dst_image, false, false));
1866 const Image &dst = dst_image->cast<Image>();
1867
1868 // CL_INVALID_MEM_OBJECT if dst_image is a 1D image buffer object created from src_buffer.
1869 if (dst.getType() == MemObjectType::Image1D_Buffer && dst.getParent() == &src)
1870 {
1871 return CL_INVALID_MEM_OBJECT;
1872 }
1873
1874 ANGLE_CL_TRY(ValidateImageForDevice(dst, queue.getDevice(), dst_origin, region));
1875
1876 // CL_INVALID_VALUE if the region specified by src_offset and src_offset + src_cb
1877 // refer to a region outside src_buffer.
1878 const MemObjectType type = dst.getType();
1879 size_t src_cb = dst.getElementSize() * region[0];
1880 if (type != MemObjectType::Image1D && type != MemObjectType::Image1D_Buffer)
1881 {
1882 src_cb *= region[1];
1883 if (type != MemObjectType::Image2D && type != MemObjectType::Image1D_Array)
1884 {
1885 src_cb *= region[2];
1886 }
1887 }
1888 if (!src.isRegionValid(src_offset, src_cb))
1889 {
1890 return CL_INVALID_VALUE;
1891 }
1892
1893 return CL_SUCCESS;
1894 }
1895
ValidateEnqueueMapBuffer(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_map,MapFlags map_flags,size_t offset,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1896 cl_int ValidateEnqueueMapBuffer(cl_command_queue command_queue,
1897 cl_mem buffer,
1898 cl_bool blocking_map,
1899 MapFlags map_flags,
1900 size_t offset,
1901 size_t size,
1902 cl_uint num_events_in_wait_list,
1903 const cl_event *event_wait_list,
1904 const cl_event *event)
1905 {
1906 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
1907 event_wait_list));
1908 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1909
1910 // CL_INVALID_OPERATION if buffer has been created with CL_MEM_HOST_WRITE_ONLY or
1911 // CL_MEM_HOST_NO_ACCESS and CL_MAP_READ is set in map_flags
1912 // or if buffer has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS
1913 // and CL_MAP_WRITE or CL_MAP_WRITE_INVALIDATE_REGION is set in map_flags.
1914 ANGLE_CL_TRY(
1915 ValidateEnqueueBuffer(queue, buffer, map_flags.isSet(CL_MAP_READ),
1916 map_flags.isSet(CL_MAP_WRITE | CL_MAP_WRITE_INVALIDATE_REGION)));
1917
1918 // CL_INVALID_VALUE if region being mapped given by (offset, size) is out of bounds
1919 // or if size is 0 or if values specified in map_flags are not valid.
1920 if (!buffer->cast<Buffer>().isRegionValid(offset, size) || size == 0u ||
1921 !ValidateMapFlags(map_flags, queue.getContext().getPlatform()))
1922 {
1923 return CL_INVALID_VALUE;
1924 }
1925
1926 return CL_SUCCESS;
1927 }
1928
ValidateEnqueueMapImage(cl_command_queue command_queue,cl_mem image,cl_bool blocking_map,MapFlags map_flags,const size_t * origin,const size_t * region,const size_t * image_row_pitch,const size_t * image_slice_pitch,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1929 cl_int ValidateEnqueueMapImage(cl_command_queue command_queue,
1930 cl_mem image,
1931 cl_bool blocking_map,
1932 MapFlags map_flags,
1933 const size_t *origin,
1934 const size_t *region,
1935 const size_t *image_row_pitch,
1936 const size_t *image_slice_pitch,
1937 cl_uint num_events_in_wait_list,
1938 const cl_event *event_wait_list,
1939 const cl_event *event)
1940 {
1941 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, true, num_events_in_wait_list,
1942 event_wait_list));
1943 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1944
1945 // CL_INVALID_OPERATION if image has been created with CL_MEM_HOST_WRITE_ONLY or
1946 // CL_MEM_HOST_NO_ACCESS and CL_MAP_READ is set in map_flags
1947 // or if image has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS
1948 // and CL_MAP_WRITE or CL_MAP_WRITE_INVALIDATE_REGION is set in map_flags.
1949 ANGLE_CL_TRY(
1950 ValidateEnqueueImage(queue, image, map_flags.isSet(CL_MAP_READ),
1951 map_flags.isSet(CL_MAP_WRITE | CL_MAP_WRITE_INVALIDATE_REGION)));
1952 const Image &img = image->cast<Image>();
1953
1954 ANGLE_CL_TRY(ValidateImageForDevice(img, queue.getDevice(), origin, region));
1955
1956 // CL_INVALID_VALUE if values specified in map_flags are not valid.
1957 if (!ValidateMapFlags(map_flags, queue.getContext().getPlatform()))
1958 {
1959 return CL_INVALID_VALUE;
1960 }
1961
1962 // CL_INVALID_VALUE if image_row_pitch is NULL.
1963 if (image_row_pitch == nullptr)
1964 {
1965 return CL_INVALID_VALUE;
1966 }
1967
1968 // CL_INVALID_VALUE if image is a 3D image, 1D or 2D image array object
1969 // and image_slice_pitch is NULL.
1970 if ((img.getType() == MemObjectType::Image3D || img.getType() == MemObjectType::Image1D_Array ||
1971 img.getType() == MemObjectType::Image2D_Array) &&
1972 image_slice_pitch == nullptr)
1973 {
1974 return CL_INVALID_VALUE;
1975 }
1976
1977 return CL_SUCCESS;
1978 }
1979
ValidateEnqueueUnmapMemObject(cl_command_queue command_queue,cl_mem memobj,const void * mapped_ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1980 cl_int ValidateEnqueueUnmapMemObject(cl_command_queue command_queue,
1981 cl_mem memobj,
1982 const void *mapped_ptr,
1983 cl_uint num_events_in_wait_list,
1984 const cl_event *event_wait_list,
1985 const cl_event *event)
1986 {
1987 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
1988 event_wait_list));
1989 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1990
1991 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object or is a pipe object.
1992 if (!Memory::IsValid(memobj))
1993 {
1994 return CL_INVALID_MEM_OBJECT;
1995 }
1996 const Memory &memory = memobj->cast<Memory>();
1997 if (memory.getType() == MemObjectType::Pipe)
1998 {
1999 return CL_INVALID_MEM_OBJECT;
2000 }
2001
2002 // CL_INVALID_CONTEXT if context associated with command_queue and memobj are not the same.
2003 if (&queue.getContext() != &memory.getContext())
2004 {
2005 return CL_INVALID_CONTEXT;
2006 }
2007
2008 return CL_SUCCESS;
2009 }
2010
ValidateEnqueueNDRangeKernel(cl_command_queue command_queue,cl_kernel kernel,cl_uint work_dim,const size_t * global_work_offset,const size_t * global_work_size,const size_t * local_work_size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2011 cl_int ValidateEnqueueNDRangeKernel(cl_command_queue command_queue,
2012 cl_kernel kernel,
2013 cl_uint work_dim,
2014 const size_t *global_work_offset,
2015 const size_t *global_work_size,
2016 const size_t *local_work_size,
2017 cl_uint num_events_in_wait_list,
2018 const cl_event *event_wait_list,
2019 const cl_event *event)
2020 {
2021 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
2022 event_wait_list));
2023 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2024 const Device &device = queue.getDevice();
2025
2026 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
2027 if (!Kernel::IsValid(kernel))
2028 {
2029 return CL_INVALID_KERNEL;
2030 }
2031 const Kernel &krnl = kernel->cast<Kernel>();
2032
2033 // CL_INVALID_CONTEXT if context associated with command_queue and kernel are not the same.
2034 if (&queue.getContext() != &krnl.getProgram().getContext())
2035 {
2036 return CL_INVALID_CONTEXT;
2037 }
2038
2039 // CL_INVALID_WORK_DIMENSION if work_dim is not a valid value.
2040 if (work_dim == 0u || work_dim > device.getInfo().maxWorkItemSizes.size())
2041 {
2042 return CL_INVALID_WORK_DIMENSION;
2043 }
2044
2045 // CL_INVALID_GLOBAL_OFFSET if global_work_offset is non-NULL before version 1.1.
2046 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u) && global_work_offset != nullptr)
2047 {
2048 return CL_INVALID_GLOBAL_OFFSET;
2049 }
2050
2051 // CL_INVALID_GLOBAL_WORK_SIZE if global_work_size is NULL or if any of the values
2052 // specified in global_work_size[0] ... global_work_size[work_dim - 1] are 0.
2053 // Returning this error code under these circumstances is deprecated by version 2.1.
2054 if (!queue.getContext().getPlatform().isVersionOrNewer(2u, 1u))
2055 {
2056 if (global_work_size == nullptr)
2057 {
2058 return CL_INVALID_GLOBAL_WORK_SIZE;
2059 }
2060 for (cl_uint dim = 0u; dim < work_dim; ++dim)
2061 {
2062 if (global_work_size[dim] == 0u)
2063 {
2064 return CL_INVALID_GLOBAL_WORK_SIZE;
2065 }
2066 }
2067 }
2068
2069 if (local_work_size != nullptr)
2070 {
2071 size_t numWorkItems = 1u; // Initialize with neutral element for multiplication
2072
2073 // CL_INVALID_WORK_ITEM_SIZE if the number of work-items specified
2074 // in any of local_work_size[0] ... local_work_size[work_dim - 1]
2075 // is greater than the corresponding values specified by
2076 // CL_DEVICE_MAX_WORK_ITEM_SIZES[0] ... CL_DEVICE_MAX_WORK_ITEM_SIZES[work_dim - 1].
2077 for (cl_uint dim = 0u; dim < work_dim; ++dim)
2078 {
2079 if (local_work_size[dim] > device.getInfo().maxWorkItemSizes[dim])
2080 {
2081 return CL_INVALID_WORK_ITEM_SIZE;
2082 }
2083 numWorkItems *= local_work_size[dim];
2084 }
2085
2086 // CL_INVALID_WORK_GROUP_SIZE if local_work_size is specified
2087 // and the total number of work-items in the work-group computed as
2088 // local_work_size[0] x ... local_work_size[work_dim - 1] is greater than the value
2089 // specified by CL_KERNEL_WORK_GROUP_SIZE in the Kernel Object Device Queries table.
2090 if (numWorkItems > krnl.getInfo().workGroups[queue.getDeviceIndex()].workGroupSize)
2091 {
2092 return CL_INVALID_WORK_GROUP_SIZE;
2093 }
2094 }
2095
2096 return CL_SUCCESS;
2097 }
2098
ValidateEnqueueNativeKernel(cl_command_queue command_queue,void (CL_CALLBACK * user_func)(void *),const void * args,size_t cb_args,cl_uint num_mem_objects,const cl_mem * mem_list,const void ** args_mem_loc,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2099 cl_int ValidateEnqueueNativeKernel(cl_command_queue command_queue,
2100 void(CL_CALLBACK *user_func)(void *),
2101 const void *args,
2102 size_t cb_args,
2103 cl_uint num_mem_objects,
2104 const cl_mem *mem_list,
2105 const void **args_mem_loc,
2106 cl_uint num_events_in_wait_list,
2107 const cl_event *event_wait_list,
2108 const cl_event *event)
2109 {
2110 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
2111 event_wait_list));
2112 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2113
2114 // CL_INVALID_OPERATION if the device associated with command_queue
2115 // cannot execute the native kernel.
2116 if (queue.getDevice().getInfo().execCapabilities.isNotSet(CL_EXEC_NATIVE_KERNEL))
2117 {
2118 return CL_INVALID_OPERATION;
2119 }
2120
2121 // CL_INVALID_VALUE if user_func is NULL.
2122 if (user_func == nullptr)
2123 {
2124 return CL_INVALID_VALUE;
2125 }
2126
2127 if (args == nullptr)
2128 {
2129 // CL_INVALID_VALUE if args is a NULL value and cb_args > 0 or num_mem_objects > 0.
2130 if (cb_args > 0u || num_mem_objects > 0u)
2131 {
2132 return CL_INVALID_VALUE;
2133 }
2134 }
2135 else
2136 {
2137 // CL_INVALID_VALUE if args is not NULL and cb_args is 0.
2138 if (cb_args == 0u)
2139 {
2140 return CL_INVALID_VALUE;
2141 }
2142 }
2143
2144 if (num_mem_objects == 0u)
2145 {
2146 // CL_INVALID_VALUE if num_mem_objects = 0 and mem_list or args_mem_loc are not NULL.
2147 if (mem_list != nullptr || args_mem_loc != nullptr)
2148 {
2149 return CL_INVALID_VALUE;
2150 }
2151 }
2152 else
2153 {
2154 // CL_INVALID_VALUE if num_mem_objects > 0 and mem_list or args_mem_loc are NULL.
2155 if (mem_list == nullptr || args_mem_loc == nullptr)
2156 {
2157 return CL_INVALID_VALUE;
2158 }
2159
2160 // CL_INVALID_MEM_OBJECT if one or more memory objects
2161 // specified in mem_list are not valid or are not buffer objects.
2162 while (num_mem_objects-- != 0u)
2163 {
2164 if (!Buffer::IsValid(*mem_list++))
2165 {
2166 return CL_INVALID_MEM_OBJECT;
2167 }
2168 }
2169 }
2170
2171 return CL_SUCCESS;
2172 }
2173
ValidateSetCommandQueueProperty(cl_command_queue command_queue,CommandQueueProperties properties,cl_bool enable,const cl_command_queue_properties * old_properties)2174 cl_int ValidateSetCommandQueueProperty(cl_command_queue command_queue,
2175 CommandQueueProperties properties,
2176 cl_bool enable,
2177 const cl_command_queue_properties *old_properties)
2178 {
2179 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue.
2180 if (!CommandQueue::IsValid(command_queue))
2181 {
2182 return CL_INVALID_COMMAND_QUEUE;
2183 }
2184
2185 // CL_INVALID_VALUE if values specified in properties are not valid.
2186 if (properties.hasOtherBitsThan(CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE |
2187 CL_QUEUE_PROFILING_ENABLE))
2188 {
2189 return CL_INVALID_VALUE;
2190 }
2191
2192 return CL_SUCCESS;
2193 }
2194
ValidateCreateImage2D(cl_context context,MemFlags flags,const cl_image_format * image_format,size_t image_width,size_t image_height,size_t image_row_pitch,const void * host_ptr)2195 cl_int ValidateCreateImage2D(cl_context context,
2196 MemFlags flags,
2197 const cl_image_format *image_format,
2198 size_t image_width,
2199 size_t image_height,
2200 size_t image_row_pitch,
2201 const void *host_ptr)
2202 {
2203 const cl_image_desc desc = {CL_MEM_OBJECT_IMAGE2D, image_width, image_height, 0u, 0u,
2204 image_row_pitch, 0u, 0u, 0u, {nullptr}};
2205 return ValidateCreateImage(context, flags, image_format, &desc, host_ptr);
2206 }
2207
ValidateCreateImage3D(cl_context context,MemFlags flags,const cl_image_format * image_format,size_t image_width,size_t image_height,size_t image_depth,size_t image_row_pitch,size_t image_slice_pitch,const void * host_ptr)2208 cl_int ValidateCreateImage3D(cl_context context,
2209 MemFlags flags,
2210 const cl_image_format *image_format,
2211 size_t image_width,
2212 size_t image_height,
2213 size_t image_depth,
2214 size_t image_row_pitch,
2215 size_t image_slice_pitch,
2216 const void *host_ptr)
2217 {
2218 const cl_image_desc desc = {
2219 CL_MEM_OBJECT_IMAGE3D, image_width, image_height, image_depth, 0u,
2220 image_row_pitch, image_slice_pitch, 0u, 0u, {nullptr}};
2221 return ValidateCreateImage(context, flags, image_format, &desc, host_ptr);
2222 }
2223
ValidateEnqueueMarker(cl_command_queue command_queue,const cl_event * event)2224 cl_int ValidateEnqueueMarker(cl_command_queue command_queue, const cl_event *event)
2225 {
2226 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
2227 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
2228 {
2229 return CL_INVALID_COMMAND_QUEUE;
2230 }
2231
2232 // CL_INVALID_VALUE if event is NULL.
2233 if (event == nullptr)
2234 {
2235 return CL_INVALID_VALUE;
2236 }
2237
2238 return CL_SUCCESS;
2239 }
2240
ValidateEnqueueWaitForEvents(cl_command_queue command_queue,cl_uint num_events,const cl_event * event_list)2241 cl_int ValidateEnqueueWaitForEvents(cl_command_queue command_queue,
2242 cl_uint num_events,
2243 const cl_event *event_list)
2244 {
2245 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
2246 if (!CommandQueue::IsValid(command_queue))
2247 {
2248 return CL_INVALID_COMMAND_QUEUE;
2249 }
2250 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2251 if (!queue.isOnHost())
2252 {
2253 return CL_INVALID_COMMAND_QUEUE;
2254 }
2255
2256 // CL_INVALID_VALUE if num_events is 0 or event_list is NULL.
2257 if (num_events == 0u || event_list == nullptr)
2258 {
2259 return CL_INVALID_VALUE;
2260 }
2261
2262 while (num_events-- != 0u)
2263 {
2264 // The documentation for invalid events is missing.
2265 if (!Event::IsValid(*event_list))
2266 {
2267 return CL_INVALID_VALUE;
2268 }
2269
2270 // CL_INVALID_CONTEXT if context associated with command_queue
2271 // and events in event_list are not the same.
2272 if (&queue.getContext() != &(*event_list++)->cast<Event>().getContext())
2273 {
2274 return CL_INVALID_CONTEXT;
2275 }
2276 }
2277
2278 return CL_SUCCESS;
2279 }
2280
ValidateEnqueueBarrier(cl_command_queue command_queue)2281 cl_int ValidateEnqueueBarrier(cl_command_queue command_queue)
2282 {
2283 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
2284 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
2285 {
2286 return CL_INVALID_COMMAND_QUEUE;
2287 }
2288 return CL_SUCCESS;
2289 }
2290
ValidateUnloadCompiler()2291 cl_int ValidateUnloadCompiler()
2292 {
2293 return CL_SUCCESS;
2294 }
2295
ValidateGetExtensionFunctionAddress(const char * func_name)2296 cl_int ValidateGetExtensionFunctionAddress(const char *func_name)
2297 {
2298 return func_name != nullptr && *func_name != '\0' ? CL_SUCCESS : CL_INVALID_VALUE;
2299 }
2300
ValidateCreateCommandQueue(cl_context context,cl_device_id device,CommandQueueProperties properties)2301 cl_int ValidateCreateCommandQueue(cl_context context,
2302 cl_device_id device,
2303 CommandQueueProperties properties)
2304 {
2305 // CL_INVALID_CONTEXT if context is not a valid context.
2306 if (!Context::IsValid(context))
2307 {
2308 return CL_INVALID_CONTEXT;
2309 }
2310
2311 // CL_INVALID_DEVICE if device is not a valid device or is not associated with context.
2312 if (!context->cast<Context>().hasDevice(device))
2313 {
2314 return CL_INVALID_DEVICE;
2315 }
2316
2317 // CL_INVALID_VALUE if values specified in properties are not valid.
2318 if (properties.hasOtherBitsThan(CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE |
2319 CL_QUEUE_PROFILING_ENABLE))
2320 {
2321 return CL_INVALID_VALUE;
2322 }
2323
2324 return CL_SUCCESS;
2325 }
2326
ValidateCreateSampler(cl_context context,cl_bool normalized_coords,AddressingMode addressing_mode,FilterMode filter_mode)2327 cl_int ValidateCreateSampler(cl_context context,
2328 cl_bool normalized_coords,
2329 AddressingMode addressing_mode,
2330 FilterMode filter_mode)
2331 {
2332 // CL_INVALID_CONTEXT if context is not a valid context.
2333 if (!Context::IsValid(context))
2334 {
2335 return CL_INVALID_CONTEXT;
2336 }
2337
2338 // CL_INVALID_VALUE if addressing_mode, filter_mode, normalized_coords
2339 // or a combination of these arguements are not valid.
2340 if ((normalized_coords != CL_FALSE && normalized_coords != CL_TRUE) ||
2341 addressing_mode == AddressingMode::InvalidEnum || filter_mode == FilterMode::InvalidEnum)
2342 {
2343 return CL_INVALID_VALUE;
2344 }
2345
2346 // CL_INVALID_OPERATION if images are not supported by any device associated with context.
2347 if (!context->cast<Context>().supportsImages())
2348 {
2349 return CL_INVALID_OPERATION;
2350 }
2351
2352 return CL_SUCCESS;
2353 }
2354
ValidateEnqueueTask(cl_command_queue command_queue,cl_kernel kernel,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2355 cl_int ValidateEnqueueTask(cl_command_queue command_queue,
2356 cl_kernel kernel,
2357 cl_uint num_events_in_wait_list,
2358 const cl_event *event_wait_list,
2359 const cl_event *event)
2360 {
2361 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
2362 event_wait_list));
2363
2364 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
2365 if (!Kernel::IsValid(kernel))
2366 {
2367 return CL_INVALID_KERNEL;
2368 }
2369
2370 // CL_INVALID_CONTEXT if context associated with command_queue and kernel are not the same.
2371 if (&command_queue->cast<CommandQueue>().getContext() !=
2372 &kernel->cast<Kernel>().getProgram().getContext())
2373 {
2374 return CL_INVALID_CONTEXT;
2375 }
2376
2377 return CL_SUCCESS;
2378 }
2379
2380 // CL 1.1
ValidateCreateSubBuffer(cl_mem buffer,MemFlags flags,cl_buffer_create_type buffer_create_type,const void * buffer_create_info)2381 cl_int ValidateCreateSubBuffer(cl_mem buffer,
2382 MemFlags flags,
2383 cl_buffer_create_type buffer_create_type,
2384 const void *buffer_create_info)
2385 {
2386 // CL_INVALID_MEM_OBJECT if buffer is not a valid buffer object or is a sub-buffer object.
2387 if (!Buffer::IsValid(buffer))
2388 {
2389 return CL_INVALID_MEM_OBJECT;
2390 }
2391 const Buffer &buf = buffer->cast<Buffer>();
2392 if (buf.isSubBuffer() || !buf.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2393 {
2394 return CL_INVALID_MEM_OBJECT;
2395 }
2396
2397 if (!ValidateMemoryFlags(flags, buf.getContext().getPlatform()))
2398 {
2399 return CL_INVALID_VALUE;
2400 }
2401
2402 const MemFlags bufFlags = buf.getFlags();
2403 // CL_INVALID_VALUE if buffer was created with CL_MEM_WRITE_ONLY
2404 // and flags specifies CL_MEM_READ_WRITE or CL_MEM_READ_ONLY,
2405 if ((bufFlags.isSet(CL_MEM_WRITE_ONLY) && flags.isSet(CL_MEM_READ_WRITE | CL_MEM_READ_ONLY)) ||
2406 // or if buffer was created with CL_MEM_READ_ONLY
2407 // and flags specifies CL_MEM_READ_WRITE or CL_MEM_WRITE_ONLY,
2408 (bufFlags.isSet(CL_MEM_READ_ONLY) && flags.isSet(CL_MEM_READ_WRITE | CL_MEM_WRITE_ONLY)) ||
2409 // or if flags specifies CL_MEM_USE_HOST_PTR, CL_MEM_ALLOC_HOST_PTR or CL_MEM_COPY_HOST_PTR.
2410 flags.isSet(CL_MEM_USE_HOST_PTR | CL_MEM_ALLOC_HOST_PTR | CL_MEM_COPY_HOST_PTR))
2411 {
2412 return CL_INVALID_VALUE;
2413 }
2414
2415 // CL_INVALID_VALUE if buffer was created with CL_MEM_HOST_WRITE_ONLY
2416 // and flags specify CL_MEM_HOST_READ_ONLY,
2417 if ((bufFlags.isSet(CL_MEM_HOST_WRITE_ONLY) && flags.isSet(CL_MEM_HOST_READ_ONLY)) ||
2418 // or if buffer was created with CL_MEM_HOST_READ_ONLY
2419 // and flags specify CL_MEM_HOST_WRITE_ONLY,
2420 (bufFlags.isSet(CL_MEM_HOST_READ_ONLY) && flags.isSet(CL_MEM_HOST_WRITE_ONLY)) ||
2421 // or if buffer was created with CL_MEM_HOST_NO_ACCESS
2422 // and flags specify CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_WRITE_ONLY.
2423 (bufFlags.isSet(CL_MEM_HOST_NO_ACCESS) &&
2424 flags.isSet(CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_WRITE_ONLY)))
2425 {
2426 return CL_INVALID_VALUE;
2427 }
2428
2429 // CL_INVALID_VALUE if the value specified in buffer_create_type is not valid.
2430 if (buffer_create_type != CL_BUFFER_CREATE_TYPE_REGION)
2431 {
2432 return CL_INVALID_VALUE;
2433 }
2434
2435 // CL_INVALID_VALUE if value(s) specified in buffer_create_info
2436 // (for a given buffer_create_type) is not valid or if buffer_create_info is NULL.
2437 // CL_INVALID_VALUE if the region specified by the cl_buffer_region structure
2438 // passed in buffer_create_info is out of bounds in buffer.
2439 const cl_buffer_region *region = static_cast<const cl_buffer_region *>(buffer_create_info);
2440 if (region == nullptr || !buf.isRegionValid(*region))
2441 {
2442 return CL_INVALID_VALUE;
2443 }
2444
2445 // CL_INVALID_BUFFER_SIZE if the size field of the cl_buffer_region structure
2446 // passed in buffer_create_info is 0.
2447 if (region->size == 0u)
2448 {
2449 return CL_INVALID_BUFFER_SIZE;
2450 }
2451
2452 return CL_SUCCESS;
2453 }
2454
ValidateSetMemObjectDestructorCallback(cl_mem memobj,void (CL_CALLBACK * pfn_notify)(cl_mem memobj,void * user_data),const void * user_data)2455 cl_int ValidateSetMemObjectDestructorCallback(cl_mem memobj,
2456 void(CL_CALLBACK *pfn_notify)(cl_mem memobj,
2457 void *user_data),
2458 const void *user_data)
2459 {
2460 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object.
2461 if (!Memory::IsValid(memobj))
2462 {
2463 return CL_INVALID_MEM_OBJECT;
2464 }
2465
2466 // CL_INVALID_VALUE if pfn_notify is NULL.
2467 if (pfn_notify == nullptr)
2468 {
2469 return CL_INVALID_VALUE;
2470 }
2471
2472 return CL_SUCCESS;
2473 }
2474
ValidateCreateUserEvent(cl_context context)2475 cl_int ValidateCreateUserEvent(cl_context context)
2476 {
2477 // CL_INVALID_CONTEXT if context is not a valid context.
2478 return Context::IsValidAndVersionOrNewer(context, 1u, 1u) ? CL_SUCCESS : CL_INVALID_CONTEXT;
2479 }
2480
ValidateSetUserEventStatus(cl_event event,cl_int execution_status)2481 cl_int ValidateSetUserEventStatus(cl_event event, cl_int execution_status)
2482 {
2483 // CL_INVALID_EVENT if event is not a valid user event object.
2484 if (!Event::IsValid(event))
2485 {
2486 return CL_INVALID_EVENT;
2487 }
2488 const Event &evt = event->cast<Event>();
2489 if (!evt.getContext().getPlatform().isVersionOrNewer(1u, 1u) ||
2490 evt.getCommandType() != CL_COMMAND_USER)
2491 {
2492 return CL_INVALID_EVENT;
2493 }
2494
2495 // CL_INVALID_VALUE if the execution_status is not CL_COMPLETE or a negative integer value.
2496 if (execution_status != CL_COMPLETE && execution_status >= 0)
2497 {
2498 return CL_INVALID_VALUE;
2499 }
2500
2501 // CL_INVALID_OPERATION if the execution_status for event has already been changed
2502 // by a previous call to clSetUserEventStatus.
2503 if (evt.wasStatusChanged())
2504 {
2505 return CL_INVALID_OPERATION;
2506 }
2507
2508 return CL_SUCCESS;
2509 }
2510
ValidateSetEventCallback(cl_event event,cl_int command_exec_callback_type,void (CL_CALLBACK * pfn_notify)(cl_event event,cl_int event_command_status,void * user_data),const void * user_data)2511 cl_int ValidateSetEventCallback(cl_event event,
2512 cl_int command_exec_callback_type,
2513 void(CL_CALLBACK *pfn_notify)(cl_event event,
2514 cl_int event_command_status,
2515 void *user_data),
2516 const void *user_data)
2517 {
2518 // CL_INVALID_EVENT if event is not a valid event object.
2519 if (!Event::IsValid(event) ||
2520 !event->cast<Event>().getContext().getPlatform().isVersionOrNewer(1u, 1u))
2521 {
2522 return CL_INVALID_EVENT;
2523 }
2524
2525 // CL_INVALID_VALUE if pfn_event_notify is NULL
2526 // or if command_exec_callback_type is not CL_SUBMITTED, CL_RUNNING, or CL_COMPLETE.
2527 if (pfn_notify == nullptr ||
2528 (command_exec_callback_type != CL_SUBMITTED && command_exec_callback_type != CL_RUNNING &&
2529 command_exec_callback_type != CL_COMPLETE))
2530 {
2531 return CL_INVALID_VALUE;
2532 }
2533
2534 return CL_SUCCESS;
2535 }
2536
ValidateEnqueueReadBufferRect(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_read,const size_t * buffer_origin,const size_t * host_origin,const size_t * region,size_t buffer_row_pitch,size_t buffer_slice_pitch,size_t host_row_pitch,size_t host_slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2537 cl_int ValidateEnqueueReadBufferRect(cl_command_queue command_queue,
2538 cl_mem buffer,
2539 cl_bool blocking_read,
2540 const size_t *buffer_origin,
2541 const size_t *host_origin,
2542 const size_t *region,
2543 size_t buffer_row_pitch,
2544 size_t buffer_slice_pitch,
2545 size_t host_row_pitch,
2546 size_t host_slice_pitch,
2547 const void *ptr,
2548 cl_uint num_events_in_wait_list,
2549 const cl_event *event_wait_list,
2550 const cl_event *event)
2551 {
2552 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
2553 event_wait_list));
2554 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2555 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2556 {
2557 return CL_INVALID_COMMAND_QUEUE;
2558 }
2559
2560 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, buffer, true, false));
2561 ANGLE_CL_TRY(ValidateBufferRect(buffer->cast<Buffer>(), buffer_origin, region, buffer_row_pitch,
2562 buffer_slice_pitch));
2563 ANGLE_CL_TRY(ValidateHostRect(host_origin, region, host_row_pitch, host_slice_pitch, ptr));
2564
2565 return CL_SUCCESS;
2566 }
2567
ValidateEnqueueWriteBufferRect(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_write,const size_t * buffer_origin,const size_t * host_origin,const size_t * region,size_t buffer_row_pitch,size_t buffer_slice_pitch,size_t host_row_pitch,size_t host_slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2568 cl_int ValidateEnqueueWriteBufferRect(cl_command_queue command_queue,
2569 cl_mem buffer,
2570 cl_bool blocking_write,
2571 const size_t *buffer_origin,
2572 const size_t *host_origin,
2573 const size_t *region,
2574 size_t buffer_row_pitch,
2575 size_t buffer_slice_pitch,
2576 size_t host_row_pitch,
2577 size_t host_slice_pitch,
2578 const void *ptr,
2579 cl_uint num_events_in_wait_list,
2580 const cl_event *event_wait_list,
2581 const cl_event *event)
2582 {
2583 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
2584 event_wait_list));
2585 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2586 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2587 {
2588 return CL_INVALID_COMMAND_QUEUE;
2589 }
2590
2591 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, buffer, false, true));
2592 ANGLE_CL_TRY(ValidateBufferRect(buffer->cast<Buffer>(), buffer_origin, region, buffer_row_pitch,
2593 buffer_slice_pitch));
2594 ANGLE_CL_TRY(ValidateHostRect(host_origin, region, host_row_pitch, host_slice_pitch, ptr));
2595
2596 return CL_SUCCESS;
2597 }
2598
ValidateEnqueueCopyBufferRect(cl_command_queue command_queue,cl_mem src_buffer,cl_mem dst_buffer,const size_t * src_origin,const size_t * dst_origin,const size_t * region,size_t src_row_pitch,size_t src_slice_pitch,size_t dst_row_pitch,size_t dst_slice_pitch,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2599 cl_int ValidateEnqueueCopyBufferRect(cl_command_queue command_queue,
2600 cl_mem src_buffer,
2601 cl_mem dst_buffer,
2602 const size_t *src_origin,
2603 const size_t *dst_origin,
2604 const size_t *region,
2605 size_t src_row_pitch,
2606 size_t src_slice_pitch,
2607 size_t dst_row_pitch,
2608 size_t dst_slice_pitch,
2609 cl_uint num_events_in_wait_list,
2610 const cl_event *event_wait_list,
2611 const cl_event *event)
2612 {
2613 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
2614 event_wait_list));
2615 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2616 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2617 {
2618 return CL_INVALID_COMMAND_QUEUE;
2619 }
2620
2621 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, src_buffer, false, false));
2622 const Buffer &src = src_buffer->cast<Buffer>();
2623
2624 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, dst_buffer, false, false));
2625 const Buffer &dst = dst_buffer->cast<Buffer>();
2626
2627 ANGLE_CL_TRY(ValidateBufferRect(src, src_origin, region, src_row_pitch, src_slice_pitch));
2628 ANGLE_CL_TRY(ValidateBufferRect(dst, dst_origin, region, dst_row_pitch, dst_slice_pitch));
2629
2630 // CL_INVALID_VALUE if src_buffer and dst_buffer are the same buffer object and src_slice_pitch
2631 // is not equal to dst_slice_pitch or src_row_pitch is not equal to dst_row_pitch.
2632 if (&src == &dst && (src_slice_pitch != dst_slice_pitch || src_row_pitch != dst_row_pitch))
2633 {
2634 return CL_INVALID_VALUE;
2635 }
2636
2637 return CL_SUCCESS;
2638 }
2639
2640 // CL 1.2
ValidateCreateSubDevices(cl_device_id in_device,const cl_device_partition_property * properties,cl_uint num_devices,const cl_device_id * out_devices,const cl_uint * num_devices_ret)2641 cl_int ValidateCreateSubDevices(cl_device_id in_device,
2642 const cl_device_partition_property *properties,
2643 cl_uint num_devices,
2644 const cl_device_id *out_devices,
2645 const cl_uint *num_devices_ret)
2646 {
2647 // CL_INVALID_DEVICE if in_device is not a valid device.
2648 if (!Device::IsValid(in_device))
2649 {
2650 return CL_INVALID_DEVICE;
2651 }
2652 const Device &device = in_device->cast<Device>();
2653 if (!device.isVersionOrNewer(1u, 2u))
2654 {
2655 return CL_INVALID_DEVICE;
2656 }
2657
2658 // CL_INVALID_VALUE if values specified in properties are not valid
2659 // or if values specified in properties are valid but not supported by the device
2660 const std::vector<cl_device_partition_property> &devProps =
2661 device.getInfo().partitionProperties;
2662 if (properties == nullptr ||
2663 std::find(devProps.cbegin(), devProps.cend(), *properties) == devProps.cend())
2664 {
2665 return CL_INVALID_VALUE;
2666 }
2667
2668 return CL_SUCCESS;
2669 }
2670
ValidateRetainDevice(cl_device_id device)2671 cl_int ValidateRetainDevice(cl_device_id device)
2672 {
2673 // CL_INVALID_DEVICE if device is not a valid device.
2674 if (!Device::IsValid(device) || !device->cast<Device>().isVersionOrNewer(1u, 2u))
2675 {
2676 return CL_INVALID_DEVICE;
2677 }
2678 return CL_SUCCESS;
2679 }
2680
ValidateReleaseDevice(cl_device_id device)2681 cl_int ValidateReleaseDevice(cl_device_id device)
2682 {
2683 // CL_INVALID_DEVICE if device is not a valid device.
2684 if (!Device::IsValid(device) || !device->cast<Device>().isVersionOrNewer(1u, 2u))
2685 {
2686 return CL_INVALID_DEVICE;
2687 }
2688 return CL_SUCCESS;
2689 }
2690
ValidateCreateImage(cl_context context,MemFlags flags,const cl_image_format * image_format,const cl_image_desc * image_desc,const void * host_ptr)2691 cl_int ValidateCreateImage(cl_context context,
2692 MemFlags flags,
2693 const cl_image_format *image_format,
2694 const cl_image_desc *image_desc,
2695 const void *host_ptr)
2696 {
2697 // CL_INVALID_CONTEXT if context is not a valid context.
2698 if (!Context::IsValidAndVersionOrNewer(context, 1u, 2u))
2699 {
2700 return CL_INVALID_CONTEXT;
2701 }
2702 const Context &ctx = context->cast<Context>();
2703
2704 // CL_INVALID_VALUE if values specified in flags are not valid.
2705 if (!ValidateMemoryFlags(flags, ctx.getPlatform()))
2706 {
2707 return CL_INVALID_VALUE;
2708 }
2709
2710 // CL_INVALID_IMAGE_FORMAT_DESCRIPTOR if values specified in image_format are not valid
2711 // or if image_format is NULL.
2712 if (!IsValidImageFormat(image_format, ctx.getPlatform().getInfo()))
2713 {
2714 return CL_INVALID_IMAGE_FORMAT_DESCRIPTOR;
2715 }
2716
2717 // CL_INVALID_IMAGE_DESCRIPTOR if image_desc is NULL.
2718 if (image_desc == nullptr)
2719 {
2720 return CL_INVALID_IMAGE_DESCRIPTOR;
2721 }
2722
2723 const size_t elemSize = GetElementSize(*image_format);
2724 if (elemSize == 0u)
2725 {
2726 ASSERT(false);
2727 ERR() << "Failed to calculate image element size";
2728 return CL_INVALID_IMAGE_FORMAT_DESCRIPTOR;
2729 }
2730 const size_t rowPitch = image_desc->image_row_pitch != 0u ? image_desc->image_row_pitch
2731 : image_desc->image_width * elemSize;
2732 const size_t imageHeight =
2733 image_desc->image_type == CL_MEM_OBJECT_IMAGE1D_ARRAY ? 1u : image_desc->image_height;
2734 const size_t sliceSize = imageHeight * rowPitch;
2735
2736 // CL_INVALID_IMAGE_DESCRIPTOR if values specified in image_desc are not valid.
2737 switch (FromCLenum<MemObjectType>(image_desc->image_type))
2738 {
2739 case MemObjectType::Image1D:
2740 if (image_desc->image_width == 0u)
2741 {
2742 return CL_INVALID_IMAGE_DESCRIPTOR;
2743 }
2744 break;
2745 case MemObjectType::Image2D:
2746 if (image_desc->image_width == 0u || image_desc->image_height == 0u)
2747 {
2748 return CL_INVALID_IMAGE_DESCRIPTOR;
2749 }
2750 break;
2751 case MemObjectType::Image3D:
2752 if (image_desc->image_width == 0u || image_desc->image_height == 0u ||
2753 image_desc->image_depth == 0u)
2754 {
2755 return CL_INVALID_IMAGE_DESCRIPTOR;
2756 }
2757 break;
2758 case MemObjectType::Image1D_Array:
2759 if (image_desc->image_width == 0u || image_desc->image_array_size == 0u)
2760 {
2761 return CL_INVALID_IMAGE_DESCRIPTOR;
2762 }
2763 break;
2764 case MemObjectType::Image2D_Array:
2765 if (image_desc->image_width == 0u || image_desc->image_height == 0u ||
2766 image_desc->image_array_size == 0u)
2767 {
2768 return CL_INVALID_IMAGE_DESCRIPTOR;
2769 }
2770 break;
2771 case MemObjectType::Image1D_Buffer:
2772 if (image_desc->image_width == 0u)
2773 {
2774 return CL_INVALID_IMAGE_DESCRIPTOR;
2775 }
2776 break;
2777 default:
2778 return CL_INVALID_IMAGE_DESCRIPTOR;
2779 }
2780 if (image_desc->image_row_pitch != 0u)
2781 {
2782 // image_row_pitch must be 0 if host_ptr is NULL.
2783 if (host_ptr == nullptr)
2784 {
2785 return CL_INVALID_IMAGE_DESCRIPTOR;
2786 }
2787 // image_row_pitch can be either 0
2788 // or >= image_width * size of element in bytes if host_ptr is not NULL.
2789 if (image_desc->image_row_pitch < image_desc->image_width * elemSize)
2790 {
2791 return CL_INVALID_IMAGE_DESCRIPTOR;
2792 }
2793 // If image_row_pitch is not 0, it must be a multiple of the image element size in bytes.
2794 if ((image_desc->image_row_pitch % elemSize) != 0u)
2795 {
2796 return CL_INVALID_IMAGE_DESCRIPTOR;
2797 }
2798 }
2799 if (image_desc->image_slice_pitch != 0u)
2800 {
2801 // image_slice_pitch must be 0 if host_ptr is NULL.
2802 if (host_ptr == nullptr)
2803 {
2804 return CL_INVALID_IMAGE_DESCRIPTOR;
2805 }
2806 // If host_ptr is not NULL, image_slice_pitch can be either 0
2807 // or >= image_row_pitch * image_height for a 2D image array or 3D image
2808 // and can be either 0 or >= image_row_pitch for a 1D image array.
2809 if (image_desc->image_slice_pitch < sliceSize)
2810 {
2811 return CL_INVALID_IMAGE_DESCRIPTOR;
2812 }
2813 // If image_slice_pitch is not 0, it must be a multiple of the image_row_pitch.
2814 if ((image_desc->image_slice_pitch % rowPitch) != 0u)
2815 {
2816 return CL_INVALID_IMAGE_DESCRIPTOR;
2817 }
2818 }
2819 // num_mip_levels and num_samples must be 0.
2820 if (image_desc->num_mip_levels != 0u || image_desc->num_samples != 0u)
2821 {
2822 return CL_INVALID_IMAGE_DESCRIPTOR;
2823 }
2824 // buffer can be a buffer memory object if image_type is CL_MEM_OBJECT_IMAGE1D_BUFFER or
2825 // CL_MEM_OBJECT_IMAGE2D. buffer can be an image object if image_type is CL_MEM_OBJECT_IMAGE2D.
2826 // Otherwise it must be NULL.
2827 if (image_desc->buffer != nullptr &&
2828 (!Buffer::IsValid(image_desc->buffer) ||
2829 (image_desc->image_type != CL_MEM_OBJECT_IMAGE1D_BUFFER &&
2830 image_desc->image_type != CL_MEM_OBJECT_IMAGE2D)) &&
2831 (!Image::IsValid(image_desc->buffer) || image_desc->image_type != CL_MEM_OBJECT_IMAGE2D))
2832 {
2833 return CL_INVALID_IMAGE_DESCRIPTOR;
2834 }
2835
2836 // CL_INVALID_OPERATION if there are no devices in context that support images.
2837 if (!ctx.supportsImages())
2838 {
2839 return CL_INVALID_OPERATION;
2840 }
2841
2842 // CL_INVALID_IMAGE_SIZE if image dimensions specified in image_desc exceed the maximum
2843 // image dimensions described in the Device Queries table for all devices in context.
2844 const DevicePtrs &devices = ctx.getDevices();
2845 if (std::find_if(devices.cbegin(), devices.cend(), [&](const DevicePtr &ptr) {
2846 return ptr->supportsNativeImageDimensions(*image_desc);
2847 }) == devices.cend())
2848 {
2849 return CL_INVALID_IMAGE_SIZE;
2850 }
2851
2852 // CL_INVALID_HOST_PTR
2853 // if host_ptr is NULL and CL_MEM_USE_HOST_PTR or CL_MEM_COPY_HOST_PTR are set in flags or
2854 // if host_ptr is not NULL but CL_MEM_COPY_HOST_PTR or CL_MEM_USE_HOST_PTR are not set in flags.
2855 if ((host_ptr != nullptr) != flags.isSet(CL_MEM_USE_HOST_PTR | CL_MEM_COPY_HOST_PTR))
2856 {
2857 return CL_INVALID_HOST_PTR;
2858 }
2859
2860 return CL_SUCCESS;
2861 }
2862
ValidateCreateProgramWithBuiltInKernels(cl_context context,cl_uint num_devices,const cl_device_id * device_list,const char * kernel_names)2863 cl_int ValidateCreateProgramWithBuiltInKernels(cl_context context,
2864 cl_uint num_devices,
2865 const cl_device_id *device_list,
2866 const char *kernel_names)
2867 {
2868 // CL_INVALID_CONTEXT if context is not a valid context.
2869 if (!Context::IsValidAndVersionOrNewer(context, 1u, 2u))
2870 {
2871 return CL_INVALID_CONTEXT;
2872 }
2873 const Context &ctx = context->cast<Context>();
2874
2875 // CL_INVALID_VALUE if device_list is NULL or num_devices is zero or if kernel_names is NULL.
2876 if (device_list == nullptr || num_devices == 0u || kernel_names == nullptr)
2877 {
2878 return CL_INVALID_VALUE;
2879 }
2880
2881 // CL_INVALID_DEVICE if any device in device_list
2882 // is not in the list of devices associated with context.
2883 for (size_t index = 0u; index < num_devices; ++index)
2884 {
2885 if (!ctx.hasDevice(device_list[index]))
2886 {
2887 return CL_INVALID_DEVICE;
2888 }
2889 }
2890
2891 // CL_INVALID_VALUE if kernel_names contains a kernel name
2892 // that is not supported by any of the devices in device_list.
2893 const char *start = kernel_names;
2894 do
2895 {
2896 const char *end = start;
2897 while (*end != '\0' && *end != ';')
2898 {
2899 ++end;
2900 }
2901 const size_t length = end - start;
2902 if (length != 0u && !ctx.supportsBuiltInKernel(std::string(start, length)))
2903 {
2904 return CL_INVALID_VALUE;
2905 }
2906 start = end;
2907 } while (*start++ != '\0');
2908
2909 return CL_SUCCESS;
2910 }
2911
ValidateCompileProgram(cl_program program,cl_uint num_devices,const cl_device_id * device_list,const char * options,cl_uint num_input_headers,const cl_program * input_headers,const char ** header_include_names,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)2912 cl_int ValidateCompileProgram(cl_program program,
2913 cl_uint num_devices,
2914 const cl_device_id *device_list,
2915 const char *options,
2916 cl_uint num_input_headers,
2917 const cl_program *input_headers,
2918 const char **header_include_names,
2919 void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data),
2920 const void *user_data)
2921 {
2922 // CL_INVALID_PROGRAM if program is not a valid program object.
2923 if (!Program::IsValid(program))
2924 {
2925 return CL_INVALID_PROGRAM;
2926 }
2927 const Program &prog = program->cast<Program>();
2928 if (!prog.getContext().getPlatform().isVersionOrNewer(1u, 2u))
2929 {
2930 return CL_INVALID_PROGRAM;
2931 }
2932
2933 // CL_INVALID_VALUE if device_list is NULL and num_devices is greater than zero,
2934 // or if device_list is not NULL and num_devices is zero.
2935 if ((device_list != nullptr) != (num_devices != 0u))
2936 {
2937 return CL_INVALID_VALUE;
2938 }
2939
2940 // CL_INVALID_DEVICE if any device in device_list
2941 // is not in the list of devices associated with program.
2942 while (num_devices-- != 0u)
2943 {
2944 if (!prog.hasDevice(*device_list++))
2945 {
2946 return CL_INVALID_DEVICE;
2947 }
2948 }
2949
2950 // CL_INVALID_VALUE if num_input_headers is zero and header_include_names
2951 // or input_headers are not NULL
2952 // or if num_input_headers is not zero and header_include_names or input_headers are NULL.
2953 if ((num_input_headers != 0u) != (header_include_names != nullptr) ||
2954 (num_input_headers != 0u) != (input_headers != nullptr))
2955 {
2956 return CL_INVALID_VALUE;
2957 }
2958
2959 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
2960 if (pfn_notify == nullptr && user_data != nullptr)
2961 {
2962 return CL_INVALID_VALUE;
2963 }
2964
2965 // CL_INVALID_OPERATION if the build of a program executable for any of the devices listed
2966 // in device_list by a previous call to clBuildProgram for program has not completed.
2967 if (prog.isBuilding())
2968 {
2969 return CL_INVALID_OPERATION;
2970 }
2971
2972 // CL_INVALID_OPERATION if there are kernel objects attached to program.
2973 if (prog.hasAttachedKernels())
2974 {
2975 return CL_INVALID_OPERATION;
2976 }
2977
2978 return CL_SUCCESS;
2979 }
2980
ValidateLinkProgram(cl_context context,cl_uint num_devices,const cl_device_id * device_list,const char * options,cl_uint num_input_programs,const cl_program * input_programs,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)2981 cl_int ValidateLinkProgram(cl_context context,
2982 cl_uint num_devices,
2983 const cl_device_id *device_list,
2984 const char *options,
2985 cl_uint num_input_programs,
2986 const cl_program *input_programs,
2987 void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data),
2988 const void *user_data)
2989 {
2990 // CL_INVALID_CONTEXT if context is not a valid context.
2991 if (!Context::IsValidAndVersionOrNewer(context, 1u, 2u))
2992 {
2993 return CL_INVALID_CONTEXT;
2994 }
2995 const Context &ctx = context->cast<Context>();
2996
2997 // CL_INVALID_VALUE if device_list is NULL and num_devices is greater than zero,
2998 // or if device_list is not NULL and num_devices is zero.
2999 if ((device_list != nullptr) != (num_devices != 0u))
3000 {
3001 return CL_INVALID_VALUE;
3002 }
3003
3004 // CL_INVALID_DEVICE if any device in device_list
3005 // is not in the list of devices associated with context.
3006 while (num_devices-- != 0u)
3007 {
3008 if (!ctx.hasDevice(*device_list++))
3009 {
3010 return CL_INVALID_DEVICE;
3011 }
3012 }
3013
3014 // CL_INVALID_VALUE if num_input_programs is zero or input_programs is NULL.
3015 if (num_input_programs == 0u || input_programs == nullptr)
3016 {
3017 return CL_INVALID_VALUE;
3018 }
3019
3020 // CL_INVALID_PROGRAM if programs specified in input_programs are not valid program objects.
3021 while (num_input_programs-- != 0u)
3022 {
3023 if (!Program::IsValid(*input_programs++))
3024 {
3025 return CL_INVALID_PROGRAM;
3026 }
3027 }
3028
3029 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
3030 if (pfn_notify == nullptr && user_data != nullptr)
3031 {
3032 return CL_INVALID_VALUE;
3033 }
3034
3035 return CL_SUCCESS;
3036 }
3037
ValidateUnloadPlatformCompiler(cl_platform_id platform)3038 cl_int ValidateUnloadPlatformCompiler(cl_platform_id platform)
3039 {
3040 // CL_INVALID_PLATFORM if platform is not a valid platform.
3041 if (!Platform::IsValid(platform) || !platform->cast<Platform>().isVersionOrNewer(1u, 2u))
3042 {
3043 return CL_INVALID_PLATFORM;
3044 }
3045 return CL_SUCCESS;
3046 }
3047
ValidateGetKernelArgInfo(cl_kernel kernel,cl_uint arg_index,KernelArgInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)3048 cl_int ValidateGetKernelArgInfo(cl_kernel kernel,
3049 cl_uint arg_index,
3050 KernelArgInfo param_name,
3051 size_t param_value_size,
3052 const void *param_value,
3053 const size_t *param_value_size_ret)
3054 {
3055 // CL_INVALID_KERNEL if kernel is a not a valid kernel object.
3056 if (!Kernel::IsValid(kernel))
3057 {
3058 return CL_INVALID_KERNEL;
3059 }
3060 const Kernel &krnl = kernel->cast<Kernel>();
3061 if (!krnl.getProgram().getContext().getPlatform().isVersionOrNewer(1u, 2u))
3062 {
3063 return CL_INVALID_KERNEL;
3064 }
3065
3066 // CL_INVALID_ARG_INDEX if arg_index is not a valid argument index.
3067 if (arg_index >= krnl.getInfo().args.size())
3068 {
3069 return CL_INVALID_ARG_INDEX;
3070 }
3071
3072 // CL_KERNEL_ARG_INFO_NOT_AVAILABLE if the argument information is not available for kernel.
3073 if (!krnl.getInfo().args[arg_index].isAvailable())
3074 {
3075 return CL_KERNEL_ARG_INFO_NOT_AVAILABLE;
3076 }
3077
3078 // CL_INVALID_VALUE if param_name is not valid.
3079 if (param_name == KernelArgInfo::InvalidEnum)
3080 {
3081 return CL_INVALID_VALUE;
3082 }
3083
3084 return CL_SUCCESS;
3085 }
3086
ValidateEnqueueFillBuffer(cl_command_queue command_queue,cl_mem buffer,const void * pattern,size_t pattern_size,size_t offset,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3087 cl_int ValidateEnqueueFillBuffer(cl_command_queue command_queue,
3088 cl_mem buffer,
3089 const void *pattern,
3090 size_t pattern_size,
3091 size_t offset,
3092 size_t size,
3093 cl_uint num_events_in_wait_list,
3094 const cl_event *event_wait_list,
3095 const cl_event *event)
3096 {
3097 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
3098 event_wait_list));
3099 const CommandQueue &queue = command_queue->cast<CommandQueue>();
3100 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 2u))
3101 {
3102 return CL_INVALID_COMMAND_QUEUE;
3103 }
3104
3105 ANGLE_CL_TRY(ValidateEnqueueBuffer(queue, buffer, false, false));
3106
3107 // CL_INVALID_VALUE if offset or offset + size require accessing
3108 // elements outside the buffer object respectively.
3109 if (!buffer->cast<Buffer>().isRegionValid(offset, size))
3110 {
3111 return CL_INVALID_VALUE;
3112 }
3113
3114 // CL_INVALID_VALUE if pattern is NULL or if pattern_size is 0 or
3115 // if pattern_size is not one of { 1, 2, 4, 8, 16, 32, 64, 128 }.
3116 if (pattern == nullptr || pattern_size == 0u || pattern_size > 128u ||
3117 (pattern_size & (pattern_size - 1u)) != 0u)
3118 {
3119 return CL_INVALID_VALUE;
3120 }
3121
3122 // CL_INVALID_VALUE if offset and size are not a multiple of pattern_size.
3123 if ((offset % pattern_size) != 0u || (size % pattern_size) != 0u)
3124 {
3125 return CL_INVALID_VALUE;
3126 }
3127
3128 return CL_SUCCESS;
3129 }
3130
ValidateEnqueueFillImage(cl_command_queue command_queue,cl_mem image,const void * fill_color,const size_t * origin,const size_t * region,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3131 cl_int ValidateEnqueueFillImage(cl_command_queue command_queue,
3132 cl_mem image,
3133 const void *fill_color,
3134 const size_t *origin,
3135 const size_t *region,
3136 cl_uint num_events_in_wait_list,
3137 const cl_event *event_wait_list,
3138 const cl_event *event)
3139 {
3140 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, true, num_events_in_wait_list,
3141 event_wait_list));
3142 const CommandQueue &queue = command_queue->cast<CommandQueue>();
3143 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 2u))
3144 {
3145 return CL_INVALID_COMMAND_QUEUE;
3146 }
3147
3148 ANGLE_CL_TRY(ValidateEnqueueImage(queue, image, false, false));
3149 const Image &img = image->cast<Image>();
3150
3151 ANGLE_CL_TRY(ValidateImageForDevice(img, queue.getDevice(), origin, region));
3152
3153 // CL_INVALID_VALUE if fill_color is NULL.
3154 if (fill_color == nullptr)
3155 {
3156 return CL_INVALID_VALUE;
3157 }
3158
3159 return CL_SUCCESS;
3160 }
3161
ValidateEnqueueMigrateMemObjects(cl_command_queue command_queue,cl_uint num_mem_objects,const cl_mem * mem_objects,MemMigrationFlags flags,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3162 cl_int ValidateEnqueueMigrateMemObjects(cl_command_queue command_queue,
3163 cl_uint num_mem_objects,
3164 const cl_mem *mem_objects,
3165 MemMigrationFlags flags,
3166 cl_uint num_events_in_wait_list,
3167 const cl_event *event_wait_list,
3168 const cl_event *event)
3169 {
3170 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
3171 event_wait_list));
3172 const CommandQueue &queue = command_queue->cast<CommandQueue>();
3173 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 2u))
3174 {
3175 return CL_INVALID_COMMAND_QUEUE;
3176 }
3177
3178 // CL_INVALID_VALUE if num_mem_objects is zero or if mem_objects is NULL.
3179 if (num_mem_objects == 0u || mem_objects == nullptr)
3180 {
3181 return CL_INVALID_VALUE;
3182 }
3183
3184 while (num_mem_objects-- != 0u)
3185 {
3186 // CL_INVALID_MEM_OBJECT if any of the memory objects
3187 // in mem_objects is not a valid memory object.
3188 if (!Memory::IsValid(*mem_objects))
3189 {
3190 return CL_INVALID_MEM_OBJECT;
3191 }
3192
3193 // CL_INVALID_CONTEXT if the context associated with command_queue
3194 // and memory objects in mem_objects are not the same.
3195 if (&queue.getContext() != &(*mem_objects++)->cast<Memory>().getContext())
3196 {
3197 return CL_INVALID_CONTEXT;
3198 }
3199 }
3200
3201 // CL_INVALID_VALUE if flags is not 0 or is not any of the values described in the table.
3202 const MemMigrationFlags allowedFlags(CL_MIGRATE_MEM_OBJECT_HOST |
3203 CL_MIGRATE_MEM_OBJECT_CONTENT_UNDEFINED);
3204 if (flags.hasOtherBitsThan(allowedFlags))
3205 {
3206 return CL_INVALID_VALUE;
3207 }
3208
3209 return CL_SUCCESS;
3210 }
3211
ValidateEnqueueMarkerWithWaitList(cl_command_queue command_queue,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3212 cl_int ValidateEnqueueMarkerWithWaitList(cl_command_queue command_queue,
3213 cl_uint num_events_in_wait_list,
3214 const cl_event *event_wait_list,
3215 const cl_event *event)
3216 {
3217 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
3218 event_wait_list));
3219 if (!command_queue->cast<CommandQueue>().getContext().getPlatform().isVersionOrNewer(1u, 2u))
3220 {
3221 return CL_INVALID_COMMAND_QUEUE;
3222 }
3223 return CL_SUCCESS;
3224 }
3225
ValidateEnqueueBarrierWithWaitList(cl_command_queue command_queue,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3226 cl_int ValidateEnqueueBarrierWithWaitList(cl_command_queue command_queue,
3227 cl_uint num_events_in_wait_list,
3228 const cl_event *event_wait_list,
3229 const cl_event *event)
3230 {
3231 ANGLE_CL_TRY(ValidateCommandQueueAndEventWaitList(command_queue, false, num_events_in_wait_list,
3232 event_wait_list));
3233 if (!command_queue->cast<CommandQueue>().getContext().getPlatform().isVersionOrNewer(1u, 2u))
3234 {
3235 return CL_INVALID_COMMAND_QUEUE;
3236 }
3237 return CL_SUCCESS;
3238 }
3239
ValidateGetExtensionFunctionAddressForPlatform(cl_platform_id platform,const char * func_name)3240 cl_int ValidateGetExtensionFunctionAddressForPlatform(cl_platform_id platform,
3241 const char *func_name)
3242 {
3243 if (!Platform::IsValid(platform) || func_name == nullptr || *func_name == '\0')
3244 {
3245 return CL_INVALID_VALUE;
3246 }
3247 return CL_SUCCESS;
3248 }
3249
3250 // CL 2.0
ValidateCreateCommandQueueWithProperties(cl_context context,cl_device_id device,const cl_queue_properties * properties)3251 cl_int ValidateCreateCommandQueueWithProperties(cl_context context,
3252 cl_device_id device,
3253 const cl_queue_properties *properties)
3254 {
3255 // CL_INVALID_CONTEXT if context is not a valid context.
3256 if (!Context::IsValidAndVersionOrNewer(context, 2u, 0u))
3257 {
3258 return CL_INVALID_CONTEXT;
3259 }
3260
3261 // CL_INVALID_DEVICE if device is not a valid device or is not associated with context.
3262 if (!context->cast<Context>().hasDevice(device) ||
3263 !device->cast<Device>().isVersionOrNewer(2u, 0u))
3264 {
3265 return CL_INVALID_DEVICE;
3266 }
3267
3268 // CL_INVALID_VALUE if values specified in properties are not valid.
3269 if (properties != nullptr)
3270 {
3271 bool isQueueOnDevice = false;
3272 bool hasQueueSize = false;
3273 while (*properties != 0)
3274 {
3275 switch (*properties++)
3276 {
3277 case CL_QUEUE_PROPERTIES:
3278 {
3279 const CommandQueueProperties props(*properties++);
3280 const CommandQueueProperties validProps(
3281 CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_PROFILING_ENABLE |
3282 CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT);
3283 if (props.hasOtherBitsThan(validProps) ||
3284 // If CL_QUEUE_ON_DEVICE is set, CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE
3285 // must also be set.
3286 (props.isSet(CL_QUEUE_ON_DEVICE) &&
3287 !props.isSet(CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE)) ||
3288 // CL_QUEUE_ON_DEVICE_DEFAULT can only be used with CL_QUEUE_ON_DEVICE.
3289 (props.isSet(CL_QUEUE_ON_DEVICE_DEFAULT) &&
3290 !props.isSet(CL_QUEUE_ON_DEVICE)))
3291 {
3292 return CL_INVALID_VALUE;
3293 }
3294 isQueueOnDevice = props.isSet(CL_QUEUE_ON_DEVICE);
3295 break;
3296 }
3297 case CL_QUEUE_SIZE:
3298 {
3299 // CL_QUEUE_SIZE must be a value <= CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE.
3300 if (*properties++ > device->cast<Device>().getInfo().queueOnDeviceMaxSize)
3301 {
3302 return CL_INVALID_VALUE;
3303 }
3304 hasQueueSize = true;
3305 break;
3306 }
3307 default:
3308 return CL_INVALID_VALUE;
3309 }
3310 }
3311
3312 // CL_QUEUE_SIZE can only be specified if CL_QUEUE_ON_DEVICE is set in CL_QUEUE_PROPERTIES.
3313 if (hasQueueSize && !isQueueOnDevice)
3314 {
3315 return CL_INVALID_VALUE;
3316 }
3317 }
3318
3319 return CL_SUCCESS;
3320 }
3321
ValidateCreatePipe(cl_context context,MemFlags flags,cl_uint pipe_packet_size,cl_uint pipe_max_packets,const cl_pipe_properties * properties)3322 cl_int ValidateCreatePipe(cl_context context,
3323 MemFlags flags,
3324 cl_uint pipe_packet_size,
3325 cl_uint pipe_max_packets,
3326 const cl_pipe_properties *properties)
3327 {
3328 return CL_SUCCESS;
3329 }
3330
ValidateGetPipeInfo(cl_mem pipe,PipeInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)3331 cl_int ValidateGetPipeInfo(cl_mem pipe,
3332 PipeInfo param_name,
3333 size_t param_value_size,
3334 const void *param_value,
3335 const size_t *param_value_size_ret)
3336 {
3337 return CL_SUCCESS;
3338 }
3339
ValidateSVMAlloc(cl_context context,SVM_MemFlags flags,size_t size,cl_uint alignment)3340 cl_int ValidateSVMAlloc(cl_context context, SVM_MemFlags flags, size_t size, cl_uint alignment)
3341 {
3342 return CL_SUCCESS;
3343 }
3344
ValidateSVMFree(cl_context context,const void * svm_pointer)3345 cl_int ValidateSVMFree(cl_context context, const void *svm_pointer)
3346 {
3347 return CL_SUCCESS;
3348 }
3349
ValidateCreateSamplerWithProperties(cl_context context,const cl_sampler_properties * sampler_properties)3350 cl_int ValidateCreateSamplerWithProperties(cl_context context,
3351 const cl_sampler_properties *sampler_properties)
3352 {
3353 // CL_INVALID_CONTEXT if context is not a valid context.
3354 if (!Context::IsValidAndVersionOrNewer(context, 2u, 0u))
3355 {
3356 return CL_INVALID_CONTEXT;
3357 }
3358
3359 // CL_INVALID_VALUE if the property name in sampler_properties is not a supported property name,
3360 // if the value specified for a supported property name is not valid,
3361 // or if the same property name is specified more than once.
3362 if (sampler_properties != nullptr)
3363 {
3364 bool hasNormalizedCoords = false;
3365 bool hasAddressingMode = false;
3366 bool hasFilterMode = false;
3367 const cl_sampler_properties *propIt = sampler_properties;
3368 while (*propIt != 0)
3369 {
3370 switch (*propIt++)
3371 {
3372 case CL_SAMPLER_NORMALIZED_COORDS:
3373 if (hasNormalizedCoords || (*propIt != CL_FALSE && *propIt != CL_TRUE))
3374 {
3375 return CL_INVALID_VALUE;
3376 }
3377 hasNormalizedCoords = true;
3378 ++propIt;
3379 break;
3380 case CL_SAMPLER_ADDRESSING_MODE:
3381 if (hasAddressingMode || FromCLenum<AddressingMode>(static_cast<CLenum>(
3382 *propIt++)) == AddressingMode::InvalidEnum)
3383 {
3384 return CL_INVALID_VALUE;
3385 }
3386 hasAddressingMode = true;
3387 break;
3388 case CL_SAMPLER_FILTER_MODE:
3389 if (hasFilterMode || FromCLenum<FilterMode>(static_cast<CLenum>(*propIt++)) ==
3390 FilterMode::InvalidEnum)
3391 {
3392 return CL_INVALID_VALUE;
3393 }
3394 hasFilterMode = true;
3395 break;
3396 default:
3397 return CL_INVALID_VALUE;
3398 }
3399 }
3400 }
3401
3402 // CL_INVALID_OPERATION if images are not supported by any device associated with context.
3403 if (!context->cast<Context>().supportsImages())
3404 {
3405 return CL_INVALID_OPERATION;
3406 }
3407
3408 return CL_SUCCESS;
3409 }
3410
ValidateSetKernelArgSVMPointer(cl_kernel kernel,cl_uint arg_index,const void * arg_value)3411 cl_int ValidateSetKernelArgSVMPointer(cl_kernel kernel, cl_uint arg_index, const void *arg_value)
3412 {
3413 return CL_SUCCESS;
3414 }
3415
ValidateSetKernelExecInfo(cl_kernel kernel,KernelExecInfo param_name,size_t param_value_size,const void * param_value)3416 cl_int ValidateSetKernelExecInfo(cl_kernel kernel,
3417 KernelExecInfo param_name,
3418 size_t param_value_size,
3419 const void *param_value)
3420 {
3421 return CL_SUCCESS;
3422 }
3423
ValidateEnqueueSVMFree(cl_command_queue command_queue,cl_uint num_svm_pointers,void * const svm_pointers[],void (CL_CALLBACK * pfn_free_func)(cl_command_queue queue,cl_uint num_svm_pointers,void * svm_pointers[],void * user_data),const void * user_data,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3424 cl_int ValidateEnqueueSVMFree(cl_command_queue command_queue,
3425 cl_uint num_svm_pointers,
3426 void *const svm_pointers[],
3427 void(CL_CALLBACK *pfn_free_func)(cl_command_queue queue,
3428 cl_uint num_svm_pointers,
3429 void *svm_pointers[],
3430 void *user_data),
3431 const void *user_data,
3432 cl_uint num_events_in_wait_list,
3433 const cl_event *event_wait_list,
3434 const cl_event *event)
3435 {
3436 return CL_SUCCESS;
3437 }
3438
ValidateEnqueueSVMMemcpy(cl_command_queue command_queue,cl_bool blocking_copy,const void * dst_ptr,const void * src_ptr,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3439 cl_int ValidateEnqueueSVMMemcpy(cl_command_queue command_queue,
3440 cl_bool blocking_copy,
3441 const void *dst_ptr,
3442 const void *src_ptr,
3443 size_t size,
3444 cl_uint num_events_in_wait_list,
3445 const cl_event *event_wait_list,
3446 const cl_event *event)
3447 {
3448 return CL_SUCCESS;
3449 }
3450
ValidateEnqueueSVMMemFill(cl_command_queue command_queue,const void * svm_ptr,const void * pattern,size_t pattern_size,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3451 cl_int ValidateEnqueueSVMMemFill(cl_command_queue command_queue,
3452 const void *svm_ptr,
3453 const void *pattern,
3454 size_t pattern_size,
3455 size_t size,
3456 cl_uint num_events_in_wait_list,
3457 const cl_event *event_wait_list,
3458 const cl_event *event)
3459 {
3460 return CL_SUCCESS;
3461 }
3462
ValidateEnqueueSVMMap(cl_command_queue command_queue,cl_bool blocking_map,MapFlags flags,const void * svm_ptr,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3463 cl_int ValidateEnqueueSVMMap(cl_command_queue command_queue,
3464 cl_bool blocking_map,
3465 MapFlags flags,
3466 const void *svm_ptr,
3467 size_t size,
3468 cl_uint num_events_in_wait_list,
3469 const cl_event *event_wait_list,
3470 const cl_event *event)
3471 {
3472 return CL_SUCCESS;
3473 }
3474
ValidateEnqueueSVMUnmap(cl_command_queue command_queue,const void * svm_ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3475 cl_int ValidateEnqueueSVMUnmap(cl_command_queue command_queue,
3476 const void *svm_ptr,
3477 cl_uint num_events_in_wait_list,
3478 const cl_event *event_wait_list,
3479 const cl_event *event)
3480 {
3481 return CL_SUCCESS;
3482 }
3483
3484 // CL 2.1
ValidateSetDefaultDeviceCommandQueue(cl_context context,cl_device_id device,cl_command_queue command_queue)3485 cl_int ValidateSetDefaultDeviceCommandQueue(cl_context context,
3486 cl_device_id device,
3487 cl_command_queue command_queue)
3488 {
3489 return CL_SUCCESS;
3490 }
3491
ValidateGetDeviceAndHostTimer(cl_device_id device,const cl_ulong * device_timestamp,const cl_ulong * host_timestamp)3492 cl_int ValidateGetDeviceAndHostTimer(cl_device_id device,
3493 const cl_ulong *device_timestamp,
3494 const cl_ulong *host_timestamp)
3495 {
3496 return CL_SUCCESS;
3497 }
3498
ValidateGetHostTimer(cl_device_id device,const cl_ulong * host_timestamp)3499 cl_int ValidateGetHostTimer(cl_device_id device, const cl_ulong *host_timestamp)
3500 {
3501 return CL_SUCCESS;
3502 }
3503
ValidateCreateProgramWithIL(cl_context context,const void * il,size_t length)3504 cl_int ValidateCreateProgramWithIL(cl_context context, const void *il, size_t length)
3505 {
3506 // CL_INVALID_CONTEXT if context is not a valid context.
3507 if (!Context::IsValidAndVersionOrNewer(context, 2u, 1u))
3508 {
3509 return CL_INVALID_CONTEXT;
3510 }
3511 const Context &ctx = context->cast<Context>();
3512
3513 // CL_INVALID_OPERATION if no devices in context support intermediate language programs.
3514 if (!ctx.supportsIL())
3515 {
3516 return CL_INVALID_OPERATION;
3517 }
3518
3519 // CL_INVALID_VALUE if il is NULL or if length is zero.
3520 if (il == nullptr || length == 0u)
3521 {
3522 return CL_INVALID_VALUE;
3523 }
3524
3525 return CL_SUCCESS;
3526 }
3527
ValidateCloneKernel(cl_kernel source_kernel)3528 cl_int ValidateCloneKernel(cl_kernel source_kernel)
3529 {
3530 return CL_SUCCESS;
3531 }
3532
ValidateGetKernelSubGroupInfo(cl_kernel kernel,cl_device_id device,KernelSubGroupInfo param_name,size_t input_value_size,const void * input_value,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)3533 cl_int ValidateGetKernelSubGroupInfo(cl_kernel kernel,
3534 cl_device_id device,
3535 KernelSubGroupInfo param_name,
3536 size_t input_value_size,
3537 const void *input_value,
3538 size_t param_value_size,
3539 const void *param_value,
3540 const size_t *param_value_size_ret)
3541 {
3542 return CL_SUCCESS;
3543 }
3544
ValidateEnqueueSVMMigrateMem(cl_command_queue command_queue,cl_uint num_svm_pointers,const void ** svm_pointers,const size_t * sizes,MemMigrationFlags flags,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3545 cl_int ValidateEnqueueSVMMigrateMem(cl_command_queue command_queue,
3546 cl_uint num_svm_pointers,
3547 const void **svm_pointers,
3548 const size_t *sizes,
3549 MemMigrationFlags flags,
3550 cl_uint num_events_in_wait_list,
3551 const cl_event *event_wait_list,
3552 const cl_event *event)
3553 {
3554 return CL_SUCCESS;
3555 }
3556
3557 // CL 2.2
ValidateSetProgramReleaseCallback(cl_program program,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)3558 cl_int ValidateSetProgramReleaseCallback(cl_program program,
3559 void(CL_CALLBACK *pfn_notify)(cl_program program,
3560 void *user_data),
3561 const void *user_data)
3562 {
3563 return CL_SUCCESS;
3564 }
3565
ValidateSetProgramSpecializationConstant(cl_program program,cl_uint spec_id,size_t spec_size,const void * spec_value)3566 cl_int ValidateSetProgramSpecializationConstant(cl_program program,
3567 cl_uint spec_id,
3568 size_t spec_size,
3569 const void *spec_value)
3570 {
3571 return CL_SUCCESS;
3572 }
3573
3574 // CL 3.0
ValidateSetContextDestructorCallback(cl_context context,void (CL_CALLBACK * pfn_notify)(cl_context context,void * user_data),const void * user_data)3575 cl_int ValidateSetContextDestructorCallback(cl_context context,
3576 void(CL_CALLBACK *pfn_notify)(cl_context context,
3577 void *user_data),
3578 const void *user_data)
3579 {
3580 return CL_SUCCESS;
3581 }
3582
ValidateCreateBufferWithProperties(cl_context context,const cl_mem_properties * properties,MemFlags flags,size_t size,const void * host_ptr)3583 cl_int ValidateCreateBufferWithProperties(cl_context context,
3584 const cl_mem_properties *properties,
3585 MemFlags flags,
3586 size_t size,
3587 const void *host_ptr)
3588 {
3589 ANGLE_CL_TRY(ValidateCreateBuffer(context, flags, size, host_ptr));
3590
3591 // CL_INVALID_CONTEXT if context is not a valid context.
3592 if (!context->cast<Context>().getPlatform().isVersionOrNewer(3u, 0u))
3593 {
3594 return CL_INVALID_CONTEXT;
3595 }
3596
3597 // CL_INVALID_PROPERTY if a property name in properties is not a supported property name,
3598 // if the value specified for a supported property name is not valid,
3599 // or if the same property name is specified more than once.
3600 if (!ValidateMemoryProperties(properties))
3601 {
3602 return CL_INVALID_PROPERTY;
3603 }
3604
3605 return CL_SUCCESS;
3606 }
3607
ValidateCreateImageWithProperties(cl_context context,const cl_mem_properties * properties,MemFlags flags,const cl_image_format * image_format,const cl_image_desc * image_desc,const void * host_ptr)3608 cl_int ValidateCreateImageWithProperties(cl_context context,
3609 const cl_mem_properties *properties,
3610 MemFlags flags,
3611 const cl_image_format *image_format,
3612 const cl_image_desc *image_desc,
3613 const void *host_ptr)
3614 {
3615 ANGLE_CL_TRY(ValidateCreateImage(context, flags, image_format, image_desc, host_ptr));
3616
3617 // CL_INVALID_CONTEXT if context is not a valid context.
3618 if (!context->cast<Context>().getPlatform().isVersionOrNewer(3u, 0u))
3619 {
3620 return CL_INVALID_CONTEXT;
3621 }
3622
3623 // CL_INVALID_PROPERTY if a property name in properties is not a supported property name,
3624 // if the value specified for a supported property name is not valid,
3625 // or if the same property name is specified more than once.
3626 if (!ValidateMemoryProperties(properties))
3627 {
3628 return CL_INVALID_PROPERTY;
3629 }
3630
3631 return CL_SUCCESS;
3632 }
3633
3634 // cl_khr_icd
ValidateIcdGetPlatformIDsKHR(cl_uint num_entries,const cl_platform_id * platforms,const cl_uint * num_platforms)3635 cl_int ValidateIcdGetPlatformIDsKHR(cl_uint num_entries,
3636 const cl_platform_id *platforms,
3637 const cl_uint *num_platforms)
3638 {
3639 if ((num_entries == 0u && platforms != nullptr) ||
3640 (platforms == nullptr && num_platforms == nullptr))
3641 {
3642 return CL_INVALID_VALUE;
3643 }
3644 return CL_SUCCESS;
3645 }
3646
3647 } // namespace cl
3648