1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2017 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file vktImageCompressionTranscodingSupport.cpp
21 * \brief Compression transcoding support
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktImageCompressionTranscodingSupport.hpp"
25 #include "vktImageLoadStoreUtil.hpp"
26
27 #include "deUniquePtr.hpp"
28 #include "deStringUtil.hpp"
29 #include "deSharedPtr.hpp"
30 #include "deRandom.hpp"
31
32 #include "vktTestCaseUtil.hpp"
33 #include "vkPrograms.hpp"
34 #include "vkImageUtil.hpp"
35 #include "vkBarrierUtil.hpp"
36 #include "vktImageTestsUtil.hpp"
37 #include "vkBuilderUtil.hpp"
38 #include "vkRef.hpp"
39 #include "vkRefUtil.hpp"
40 #include "vkTypeUtil.hpp"
41 #include "vkQueryUtil.hpp"
42 #include "vkCmdUtil.hpp"
43 #include "vkObjUtil.hpp"
44 #include "vkBufferWithMemory.hpp"
45
46 #include "tcuTextureUtil.hpp"
47 #include "tcuTexture.hpp"
48 #include "tcuCompressedTexture.hpp"
49 #include "tcuVectorType.hpp"
50 #include "tcuResource.hpp"
51 #include "tcuImageIO.hpp"
52 #include "tcuImageCompare.hpp"
53 #include "tcuTestLog.hpp"
54 #include "tcuRGBA.hpp"
55 #include "tcuSurface.hpp"
56
57 #include <vector>
58
59 using namespace vk;
60 namespace vkt
61 {
62 namespace image
63 {
64 namespace
65 {
66 using std::string;
67 using std::vector;
68 using tcu::TestContext;
69 using tcu::TestStatus;
70 using tcu::UVec3;
71 using tcu::IVec3;
72 using tcu::CompressedTexFormat;
73 using tcu::CompressedTexture;
74 using tcu::Resource;
75 using tcu::Archive;
76 using tcu::ConstPixelBufferAccess;
77 using de::MovePtr;
78 using de::SharedPtr;
79 using de::Random;
80
81 typedef SharedPtr<MovePtr<Image> > ImageSp;
82 typedef SharedPtr<Move<VkImageView> > ImageViewSp;
83 typedef SharedPtr<Move<VkDescriptorSet> > SharedVkDescriptorSet;
84
85 enum ShaderType
86 {
87 SHADER_TYPE_COMPUTE,
88 SHADER_TYPE_FRAGMENT,
89 SHADER_TYPE_LAST
90 };
91
92 enum Operation
93 {
94 OPERATION_IMAGE_LOAD,
95 OPERATION_TEXEL_FETCH,
96 OPERATION_TEXTURE,
97 OPERATION_IMAGE_STORE,
98 OPERATION_ATTACHMENT_READ,
99 OPERATION_ATTACHMENT_WRITE,
100 OPERATION_TEXTURE_READ,
101 OPERATION_TEXTURE_WRITE,
102 OPERATION_LAST
103 };
104
105 struct TestParameters
106 {
107 Operation operation;
108 ShaderType shader;
109 UVec3 size;
110 deUint32 layers;
111 ImageType imageType;
112 VkFormat formatCompressed;
113 VkFormat formatUncompressed;
114 deUint32 imagesCount;
115 VkImageUsageFlags compressedImageUsage;
116 VkImageUsageFlags compressedImageViewUsage;
117 VkImageUsageFlags uncompressedImageUsage;
118 bool useMipmaps;
119 VkFormat formatForVerify;
120 bool formatIsASTC;
121 };
122
123 template<typename T>
makeVkSharedPtr(Move<T> move)124 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
125 {
126 return SharedPtr<Move<T> >(new Move<T>(move));
127 }
128
129 template<typename T>
makeVkSharedPtr(MovePtr<T> movePtr)130 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
131 {
132 return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
133 }
134
135 const deUint32 SINGLE_LEVEL = 1u;
136 const deUint32 SINGLE_LAYER = 1u;
137
138 enum BinaryCompareMode
139 {
140 COMPARE_MODE_NORMAL,
141 COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING,
142 };
143
144 enum BinaryCompareResult
145 {
146 COMPARE_RESULT_OK,
147 COMPARE_RESULT_ASTC_QUALITY_WARNING,
148 COMPARE_RESULT_FAILED,
149 };
150
151 const deUint32 ASTC_LDR_ERROR_COLOUR = 0xFFFF00FF;
152 const deUint32 ASTC_HDR_ERROR_COLOUR = 0x00000000;
153
BinaryCompare(const void * reference,const void * result,VkDeviceSize sizeInBytes,VkFormat formatForVerify,BinaryCompareMode mode)154 static BinaryCompareResult BinaryCompare(const void *reference,
155 const void *result,
156 VkDeviceSize sizeInBytes,
157 VkFormat formatForVerify,
158 BinaryCompareMode mode)
159 {
160 DE_UNREF(formatForVerify);
161
162 // Compare quickly using deMemCmp
163 if (deMemCmp(reference, result, (size_t)sizeInBytes) == 0)
164 {
165 return COMPARE_RESULT_OK;
166 }
167 // If deMemCmp indicated a mismatch, we can re-check with a manual comparison of
168 // the ref and res images that allows for ASTC error colour mismatches if the ASTC
169 // comparison mode was selected. This slows down the affected ASTC tests if you
170 // didn't pass in the first comparison, but means in the general case the
171 // comparion is still fast.
172 else if (mode == COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
173 {
174 bool bWarn = false;
175 bool bFail = false;
176 const deUint32 *pui32RefVal = (deUint32*)reference;
177 const deUint32 *pui32ResVal = (deUint32*)result;
178
179 DE_ASSERT(formatForVerify == VK_FORMAT_R8G8B8A8_UNORM);
180 size_t numPixels = (size_t)(sizeInBytes / 4) /* bytes */;
181 for (size_t i = 0; i < numPixels; i++)
182 {
183 const deUint32 ref = *pui32RefVal++;
184 const deUint32 res = *pui32ResVal++;
185
186 if (ref != res)
187 {
188 // QualityWarning !1231: If the astc pixel was the ASTC LDR error colour
189 // and the result image has the HDR error colour (or vice versa as the test
190 // cases below sometimes reverse the operands) then issue a quality warning
191 // instead of a failure.
192 if ((ref == ASTC_LDR_ERROR_COLOUR && res == ASTC_HDR_ERROR_COLOUR) ||
193 (ref == ASTC_HDR_ERROR_COLOUR && res == ASTC_LDR_ERROR_COLOUR))
194 {
195 bWarn = true;
196 }
197 else
198 {
199 bFail = true;
200 }
201 }
202 }
203
204 if (!bFail)
205 {
206 return (bWarn)
207 ? (COMPARE_RESULT_ASTC_QUALITY_WARNING)
208 : (COMPARE_RESULT_OK);
209 }
210 }
211
212 return COMPARE_RESULT_FAILED;
213 }
214
FormatIsASTC(VkFormat format)215 static bool FormatIsASTC(VkFormat format)
216 {
217 return deInRange32(format, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK);
218 }
219
TestStatusASTCQualityWarning()220 static TestStatus TestStatusASTCQualityWarning()
221 {
222 return TestStatus(QP_TEST_RESULT_QUALITY_WARNING, "ASTC HDR error colour output instead of LDR error colour");
223 }
224
225 class BasicTranscodingTestInstance : public TestInstance
226 {
227 public:
228 BasicTranscodingTestInstance (Context& context,
229 const TestParameters& parameters);
230 virtual TestStatus iterate (void) = 0;
231 protected:
232 void generateData (deUint8* toFill,
233 const size_t size,
234 const VkFormat format,
235 const deUint32 layer = 0u,
236 const deUint32 level = 0u);
237 deUint32 getLevelCount ();
238 deUint32 getLayerCount ();
239 UVec3 getLayerDims ();
240 vector<UVec3> getMipLevelSizes (UVec3 baseSize);
241 vector<UVec3> getCompressedMipLevelSizes (const VkFormat compressedFormat,
242 const vector<UVec3>& uncompressedSizes);
243
244 const TestParameters m_parameters;
245 const deUint32 m_blockWidth;
246 const deUint32 m_blockHeight;
247 const deUint32 m_levelCount;
248 const UVec3 m_layerSize;
249
250 // Detected error colour mismatch while verifying image. Output
251 // the ASTC quality warning instead of a pass
252 bool m_bASTCErrorColourMismatch;
253
254 private:
255 deUint32 findMipMapLevelCount ();
256 };
257
findMipMapLevelCount()258 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
259 {
260 deUint32 levelCount = 1;
261
262 // We cannot use mipmap levels which have resolution below block size.
263 // Reduce number of mipmap levels
264 if (m_parameters.useMipmaps)
265 {
266 deUint32 w = m_parameters.size.x();
267 deUint32 h = m_parameters.size.y();
268
269 DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
270
271 while (w > m_blockWidth && h > m_blockHeight)
272 {
273 w >>= 1;
274 h >>= 1;
275
276 if (w > m_blockWidth && h > m_blockHeight)
277 levelCount++;
278 }
279
280 DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
281 DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
282 }
283
284 return levelCount;
285 }
286
BasicTranscodingTestInstance(Context & context,const TestParameters & parameters)287 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
288 : TestInstance (context)
289 , m_parameters (parameters)
290 , m_blockWidth (getBlockWidth(m_parameters.formatCompressed))
291 , m_blockHeight (getBlockHeight(m_parameters.formatCompressed))
292 , m_levelCount (findMipMapLevelCount())
293 , m_layerSize (getLayerSize(m_parameters.imageType, m_parameters.size))
294 , m_bASTCErrorColourMismatch(false)
295 {
296 DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
297 }
298
getLevelCount()299 deUint32 BasicTranscodingTestInstance::getLevelCount()
300 {
301 return m_levelCount;
302 }
303
getLayerCount()304 deUint32 BasicTranscodingTestInstance::getLayerCount()
305 {
306 return m_parameters.layers;
307 }
308
getLayerDims()309 UVec3 BasicTranscodingTestInstance::getLayerDims()
310 {
311 return m_layerSize;
312 }
313
getMipLevelSizes(UVec3 baseSize)314 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
315 {
316 vector<UVec3> levelSizes;
317 const deUint32 levelCount = getLevelCount();
318
319 baseSize.z() = 1u;
320
321 levelSizes.push_back(baseSize);
322
323 if (m_parameters.imageType == IMAGE_TYPE_1D)
324 {
325 baseSize.y() = 1u;
326
327 while (levelSizes.size() < levelCount && (baseSize.x() != 1))
328 {
329 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
330 levelSizes.push_back(baseSize);
331 }
332 }
333 else
334 {
335 while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
336 {
337 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
338 baseSize.y() = deMax32(baseSize.y() >> 1, 1);
339 levelSizes.push_back(baseSize);
340 }
341 }
342
343 DE_ASSERT(levelSizes.size() == getLevelCount());
344
345 return levelSizes;
346 }
347
getCompressedMipLevelSizes(const VkFormat compressedFormat,const vector<UVec3> & uncompressedSizes)348 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
349 {
350 vector<UVec3> levelSizes;
351 vector<UVec3>::const_iterator it;
352
353 for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
354 levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
355
356 return levelSizes;
357 }
358
generateData(deUint8 * toFill,const size_t size,const VkFormat format,const deUint32 layer,const deUint32 level)359 void BasicTranscodingTestInstance::generateData (deUint8* toFill,
360 const size_t size,
361 const VkFormat format,
362 const deUint32 layer,
363 const deUint32 level)
364 {
365 const deUint8 pattern[] =
366 {
367 // 64-bit values
368 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
369 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
370 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
371 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
372 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
373 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
374 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
375 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
376 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
377 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
378 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Positive infinity
379 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Negative infinity
380 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
381 0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
382 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
383 0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
384 0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
385 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
386 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
387 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
388 // 32-bit values
389 0x7F, 0x80, 0x00, 0x00, // Positive infinity
390 0xFF, 0x80, 0x00, 0x00, // Negative infinity
391 0x7F, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
392 0x7F, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
393 0xFF, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
394 0xFF, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
395 0x7F, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
396 0x7F, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
397 0xFF, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
398 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
399 0xAA, 0xAA, 0xAA, 0xAA,
400 0x55, 0x55, 0x55, 0x55,
401 };
402
403 deUint8* start = toFill;
404 size_t sizeToRnd = size;
405
406 // Pattern part
407 if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
408 {
409 // Rotated pattern
410 for (size_t i = 0; i < sizeof(pattern); i++)
411 start[sizeof(pattern) - i - 1] = pattern[i];
412
413 start += sizeof(pattern);
414 sizeToRnd -= sizeof(pattern);
415
416 // Direct pattern
417 deMemcpy(start, pattern, sizeof(pattern));
418
419 start += sizeof(pattern);
420 sizeToRnd -= sizeof(pattern);
421 }
422
423 // Random part
424 {
425 DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
426
427 deUint32* start32 = reinterpret_cast<deUint32*>(start);
428 size_t sizeToRnd32 = sizeToRnd / sizeof(deUint32);
429 deUint32 seed = (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
430 Random rnd (seed);
431
432 for (size_t i = 0; i < sizeToRnd32; i++)
433 start32[i] = rnd.getUint32();
434 }
435
436 {
437 // Remove certain values that may not be preserved based on the uncompressed view format
438 if (isSnormFormat(m_parameters.formatUncompressed))
439 {
440 for (size_t i = 0; i < size; i += 2)
441 {
442 // SNORM fix: due to write operation in SNORM format
443 // replaces 0x00 0x80 to 0x01 0x80
444 if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
445 toFill[i+1] = 0x81;
446 }
447 }
448 else if (isFloatFormat(m_parameters.formatUncompressed))
449 {
450 tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
451
452 if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
453 {
454 for (size_t i = 0; i < size; i += 2)
455 {
456 // HALF_FLOAT fix: remove INF and NaN
457 if ((toFill[i+1] & 0x7C) == 0x7C)
458 toFill[i+1] = 0x00;
459 }
460 }
461 else if (textureFormat.type == tcu::TextureFormat::FLOAT)
462 {
463 for (size_t i = 0; i < size; i += 4)
464 {
465 // HALF_FLOAT fix: remove INF and NaN
466 if ((toFill[i+1] & 0x7C) == 0x7C)
467 toFill[i+1] = 0x00;
468 }
469
470 for (size_t i = 0; i < size; i += 4)
471 {
472 // FLOAT fix: remove INF, NaN, and denorm
473 // Little endian fix
474 if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
475 toFill[i+3] = 0x01;
476 // Big endian fix
477 if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
478 toFill[i+0] = 0x01;
479 }
480 }
481 }
482 }
483 }
484
485 class BasicComputeTestInstance : public BasicTranscodingTestInstance
486 {
487 public:
488 BasicComputeTestInstance (Context& context,
489 const TestParameters& parameters);
490 TestStatus iterate (void);
491 protected:
492 struct ImageData
493 {
getImagesCountvkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData494 deUint32 getImagesCount (void) { return static_cast<deUint32>(images.size()); }
getImageViewCountvkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData495 deUint32 getImageViewCount (void) { return static_cast<deUint32>(imagesViews.size()); }
getImageInfoCountvkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData496 deUint32 getImageInfoCount (void) { return static_cast<deUint32>(imagesInfos.size()); }
getImagevkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData497 VkImage getImage (const deUint32 ndx) { return **images[ndx]->get(); }
getImageViewvkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData498 VkImageView getImageView (const deUint32 ndx) { return **imagesViews[ndx]; }
getImageInfovkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData499 VkImageCreateInfo getImageInfo (const deUint32 ndx) { return imagesInfos[ndx]; }
addImagevkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData500 void addImage (MovePtr<Image> image) { images.push_back(makeVkSharedPtr(image)); }
addImageViewvkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData501 void addImageView (Move<VkImageView> imageView) { imagesViews.push_back(makeVkSharedPtr(imageView));}
addImageInfovkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData502 void addImageInfo (const VkImageCreateInfo imageInfo) { imagesInfos.push_back(imageInfo); }
resetViewsvkt::image::__anoncdc4bf400111::BasicComputeTestInstance::ImageData503 void resetViews () { imagesViews.clear(); }
504 private:
505 vector<ImageSp> images;
506 vector<ImageViewSp> imagesViews;
507 vector<VkImageCreateInfo> imagesInfos;
508 };
509 void copyDataToImage (const VkCommandPool& cmdPool,
510 const VkCommandBuffer& cmdBuffer,
511 ImageData& imageData,
512 const vector<UVec3>& mipMapSizes,
513 const bool isCompressed);
514 virtual void executeShader (const VkCommandPool& cmdPool,
515 const VkCommandBuffer& cmdBuffer,
516 const VkDescriptorSetLayout& descriptorSetLayout,
517 const VkDescriptorPool& descriptorPool,
518 vector<ImageData>& imageData);
519 bool copyResultAndCompare (const VkCommandPool& cmdPool,
520 const VkCommandBuffer& cmdBuffer,
521 const VkImage& uncompressed,
522 const VkDeviceSize offset,
523 const UVec3& size);
524 void descriptorSetUpdate (VkDescriptorSet descriptorSet,
525 const VkDescriptorImageInfo* descriptorImageInfos);
526 void createImageInfos (ImageData& imageData,
527 const vector<UVec3>& mipMapSizes,
528 const bool isCompressed);
529 bool decompressImage (const VkCommandPool& cmdPool,
530 const VkCommandBuffer& cmdBuffer,
531 vector<ImageData>& imageData,
532 const vector<UVec3>& mipMapSizes);
533 vector<deUint8> m_data;
534 };
535
536
BasicComputeTestInstance(Context & context,const TestParameters & parameters)537 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
538 :BasicTranscodingTestInstance (context, parameters)
539 {
540 }
541
iterate(void)542 TestStatus BasicComputeTestInstance::iterate (void)
543 {
544 const DeviceInterface& vk = m_context.getDeviceInterface();
545 const VkDevice device = m_context.getDevice();
546 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
547 Allocator& allocator = m_context.getDefaultAllocator();
548 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
549 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
550 const UVec3 fullSize (m_parameters.size.x(), m_parameters.imageType == IMAGE_TYPE_1D ? 1 : m_parameters.size.y(), 1);
551 const vector<UVec3> mipMapSizes = m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, fullSize);
552 vector<ImageData> imageData (m_parameters.imagesCount);
553 const deUint32 compressedNdx = 0u;
554 const deUint32 resultImageNdx = m_parameters.imagesCount -1u;
555
556 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
557 {
558 const bool isCompressed = compressedNdx == imageNdx ? true : false;
559 createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
560 for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
561 {
562 imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
563 if (isCompressed)
564 {
565 const VkImageViewUsageCreateInfo imageViewUsageKHR =
566 {
567 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, //VkStructureType sType;
568 DE_NULL, //const void* pNext;
569 m_parameters.compressedImageUsage, //VkImageUsageFlags usage;
570 };
571 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
572 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
573 {
574 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
575 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
576 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
577 &imageViewUsageKHR));
578 }
579 }
580 else
581 {
582 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
583 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
584 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
585 }
586 }
587 }
588
589 {
590 size_t size = 0ull;
591 for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
592 {
593 size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
594 }
595 m_data.resize(size);
596 generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
597 }
598
599 switch(m_parameters.operation)
600 {
601 case OPERATION_IMAGE_LOAD:
602 case OPERATION_TEXEL_FETCH:
603 case OPERATION_TEXTURE:
604 copyDataToImage(*cmdPool, *cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
605 break;
606 case OPERATION_IMAGE_STORE:
607 copyDataToImage(*cmdPool, *cmdBuffer, imageData[1], mipMapSizes, false);
608 break;
609 default:
610 DE_ASSERT(false);
611 break;
612 }
613
614 {
615 Move<VkDescriptorSetLayout> descriptorSetLayout;
616 Move<VkDescriptorPool> descriptorPool;
617
618 DescriptorSetLayoutBuilder descriptorSetLayoutBuilder;
619 DescriptorPoolBuilder descriptorPoolBuilder;
620 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
621 {
622 switch(m_parameters.operation)
623 {
624 case OPERATION_IMAGE_LOAD:
625 case OPERATION_IMAGE_STORE:
626 descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
627 descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
628 break;
629 case OPERATION_TEXEL_FETCH:
630 case OPERATION_TEXTURE:
631 descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
632 descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
633 break;
634 default:
635 DE_ASSERT(false);
636 break;
637 }
638 }
639 descriptorSetLayout = descriptorSetLayoutBuilder.build(vk, device);
640 descriptorPool = descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
641 executeShader(*cmdPool, *cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
642
643 {
644 VkDeviceSize offset = 0ull;
645 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
646 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
647 {
648 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
649 const UVec3 size = UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
650 imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
651 imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
652 if (!copyResultAndCompare(*cmdPool, *cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
653 return TestStatus::fail("Uncompressed output mismatch at offset " + de::toString(offset) + " even before executing decompression");
654 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
655 }
656 }
657 }
658 if (!decompressImage(*cmdPool, *cmdBuffer, imageData, mipMapSizes))
659 return TestStatus::fail("Decompression failed");
660
661 if (m_bASTCErrorColourMismatch)
662 {
663 DE_ASSERT(m_parameters.formatIsASTC);
664 return TestStatusASTCQualityWarning();
665 }
666
667 return TestStatus::pass("Pass");
668 }
669
copyDataToImage(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)670 void BasicComputeTestInstance::copyDataToImage (const VkCommandPool& cmdPool,
671 const VkCommandBuffer& cmdBuffer,
672 ImageData& imageData,
673 const vector<UVec3>& mipMapSizes,
674 const bool isCompressed)
675 {
676 const DeviceInterface& vk = m_context.getDeviceInterface();
677 const VkDevice device = m_context.getDevice();
678 const VkQueue queue = m_context.getUniversalQueue();
679 Allocator& allocator = m_context.getDefaultAllocator();
680
681 BufferWithMemory imageBuffer (vk, device, allocator,
682 makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
683 MemoryRequirement::HostVisible);
684 VkDeviceSize offset = 0ull;
685 {
686 const Allocation& alloc = imageBuffer.getAllocation();
687 deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
688 flushAlloc(vk, device, alloc);
689 }
690
691 beginCommandBuffer(vk, cmdBuffer);
692 const VkImageSubresourceRange subresourceRange =
693 {
694 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
695 0u, //deUint32 baseMipLevel
696 imageData.getImageInfo(0u).mipLevels, //deUint32 levelCount
697 0u, //deUint32 baseArrayLayer
698 imageData.getImageInfo(0u).arrayLayers //deUint32 layerCount
699 };
700
701 for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
702 {
703 const VkImageMemoryBarrier preCopyImageBarrier = makeImageMemoryBarrier(
704 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
705 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
706 imageData.getImage(imageNdx), subresourceRange);
707
708 const VkBufferMemoryBarrier FlushHostCopyBarrier = makeBufferMemoryBarrier(
709 VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
710 imageBuffer.get(), 0ull, m_data.size());
711
712 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
713 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
714
715 for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
716 {
717 const VkExtent3D imageExtent = isCompressed ?
718 makeExtent3D(mipMapSizes[mipNdx]) :
719 imageData.getImageInfo(imageNdx).extent;
720 const VkBufferImageCopy copyRegion =
721 {
722 offset, //VkDeviceSize bufferOffset;
723 0u, //deUint32 bufferRowLength;
724 0u, //deUint32 bufferImageHeight;
725 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers), //VkImageSubresourceLayers imageSubresource;
726 makeOffset3D(0, 0, 0), //VkOffset3D imageOffset;
727 imageExtent, //VkExtent3D imageExtent;
728 };
729
730 vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
731 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
732 UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
733 imageData.getImageInfo(imageNdx).arrayLayers;
734 }
735 }
736 endCommandBuffer(vk, cmdBuffer);
737 submitCommandsAndWait(vk, device, queue, cmdBuffer);
738 m_context.resetCommandPoolForVKSC(device, cmdPool);
739 }
740
executeShader(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)741 void BasicComputeTestInstance::executeShader (const VkCommandPool& cmdPool,
742 const VkCommandBuffer& cmdBuffer,
743 const VkDescriptorSetLayout& descriptorSetLayout,
744 const VkDescriptorPool& descriptorPool,
745 vector<ImageData>& imageData)
746 {
747 const DeviceInterface& vk = m_context.getDeviceInterface();
748 const VkDevice device = m_context.getDevice();
749 const VkQueue queue = m_context.getUniversalQueue();
750 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
751 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
752 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
753 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
754 Move<VkSampler> sampler;
755 {
756 const VkSamplerCreateInfo createInfo =
757 {
758 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
759 DE_NULL, //const void* pNext;
760 0u, //VkSamplerCreateFlags flags;
761 VK_FILTER_NEAREST, //VkFilter magFilter;
762 VK_FILTER_NEAREST, //VkFilter minFilter;
763 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
764 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
765 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
766 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
767 0.0f, //float mipLodBias;
768 VK_FALSE, //VkBool32 anisotropyEnable;
769 1.0f, //float maxAnisotropy;
770 VK_FALSE, //VkBool32 compareEnable;
771 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
772 0.0f, //float minLod;
773 0.0f, //float maxLod;
774 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
775 VK_FALSE, //VkBool32 unnormalizedCoordinates;
776 };
777 sampler = createSampler(vk, device, &createInfo);
778 }
779
780 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
781 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
782 {
783 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
784 for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
785 {
786 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
787 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
788 }
789 }
790
791 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
792 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
793
794 beginCommandBuffer(vk, cmdBuffer);
795 {
796 const VkImageSubresourceRange compressedRange =
797 {
798 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
799 0u, //deUint32 baseMipLevel
800 imageData[0].getImageInfo(0u).mipLevels, //deUint32 levelCount
801 0u, //deUint32 baseArrayLayer
802 imageData[0].getImageInfo(0u).arrayLayers //deUint32 layerCount
803 };
804 const VkImageSubresourceRange uncompressedRange =
805 {
806 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
807 0u, //deUint32 baseMipLevel
808 1u, //deUint32 levelCount
809 0u, //deUint32 baseArrayLayer
810 1u //deUint32 layerCount
811 };
812
813 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
814
815 vector<VkImageMemoryBarrier> preShaderImageBarriers;
816 preShaderImageBarriers.resize(descriptorSets.size() + 1u);
817 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
818 {
819 preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
820 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
821 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
822 imageData[1].getImage(imageNdx), uncompressedRange);
823 }
824
825 preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
826 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
827 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
828 imageData[0].getImage(0), compressedRange);
829
830 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
831 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
832 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
833
834 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
835 {
836 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
837 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
838 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
839 imageData[1].getImageInfo(ndx).extent.height,
840 imageData[1].getImageInfo(ndx).extent.depth);
841 }
842 }
843 endCommandBuffer(vk, cmdBuffer);
844 submitCommandsAndWait(vk, device, queue, cmdBuffer);
845 m_context.resetCommandPoolForVKSC(device, cmdPool);
846 }
847
copyResultAndCompare(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,const VkImage & uncompressed,const VkDeviceSize offset,const UVec3 & size)848 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandPool& cmdPool,
849 const VkCommandBuffer& cmdBuffer,
850 const VkImage& uncompressed,
851 const VkDeviceSize offset,
852 const UVec3& size)
853 {
854 const DeviceInterface& vk = m_context.getDeviceInterface();
855 const VkQueue queue = m_context.getUniversalQueue();
856 const VkDevice device = m_context.getDevice();
857 Allocator& allocator = m_context.getDefaultAllocator();
858
859 VkDeviceSize imageResultSize = getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
860 BufferWithMemory imageBufferResult (vk, device, allocator,
861 makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
862 MemoryRequirement::HostVisible);
863
864 beginCommandBuffer(vk, cmdBuffer);
865 {
866 const VkImageSubresourceRange subresourceRange =
867 {
868 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
869 0u, //deUint32 baseMipLevel
870 1u, //deUint32 levelCount
871 0u, //deUint32 baseArrayLayer
872 1u //deUint32 layerCount
873 };
874
875 const VkBufferImageCopy copyRegion =
876 {
877 0ull, // VkDeviceSize bufferOffset;
878 0u, // deUint32 bufferRowLength;
879 0u, // deUint32 bufferImageHeight;
880 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
881 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
882 makeExtent3D(size), // VkExtent3D imageExtent;
883 };
884
885 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
886 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
887 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
888 uncompressed, subresourceRange);
889
890 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
891 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
892 imageBufferResult.get(), 0ull, imageResultSize);
893
894 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
895 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, ©Region);
896 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
897 }
898 endCommandBuffer(vk, cmdBuffer);
899 submitCommandsAndWait(vk, device, queue, cmdBuffer);
900 m_context.resetCommandPoolForVKSC(device, cmdPool);
901
902 const Allocation& allocResult = imageBufferResult.getAllocation();
903 invalidateAlloc(vk, device, allocResult);
904 if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
905 return true;
906 return false;
907 }
908
descriptorSetUpdate(VkDescriptorSet descriptorSet,const VkDescriptorImageInfo * descriptorImageInfos)909 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
910 {
911 const DeviceInterface& vk = m_context.getDeviceInterface();
912 const VkDevice device = m_context.getDevice();
913 DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
914
915 switch(m_parameters.operation)
916 {
917 case OPERATION_IMAGE_LOAD:
918 case OPERATION_IMAGE_STORE:
919 {
920 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
921 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
922
923 break;
924 }
925
926 case OPERATION_TEXEL_FETCH:
927 case OPERATION_TEXTURE:
928 {
929 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
930 {
931 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
932 bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
933 }
934
935 break;
936 }
937
938 default:
939 DE_ASSERT(false);
940 }
941 descriptorSetUpdateBuilder.update(vk, device);
942 }
943
createImageInfos(ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)944 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
945 {
946 const VkImageType imageType = mapImageType(m_parameters.imageType);
947
948 if (isCompressed)
949 {
950 const VkExtent3D extentCompressed = makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
951 const VkImageCreateInfo compressedInfo =
952 {
953 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
954 DE_NULL, // const void* pNext;
955 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
956 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT |
957 VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, // VkImageCreateFlags flags;
958 imageType, // VkImageType imageType;
959 m_parameters.formatCompressed, // VkFormat format;
960 extentCompressed, // VkExtent3D extent;
961 static_cast<deUint32>(mipMapSizes.size()), // deUint32 mipLevels;
962 getLayerCount(), // deUint32 arrayLayers;
963 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
964 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
965 VK_IMAGE_USAGE_SAMPLED_BIT |
966 VK_IMAGE_USAGE_STORAGE_BIT |
967 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
968 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
969 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
970 0u, // deUint32 queueFamilyIndexCount;
971 DE_NULL, // const deUint32* pQueueFamilyIndices;
972 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
973 };
974
975 VkImageFormatProperties imageFormatProperties;
976 if (m_context.getInstanceInterface().getPhysicalDeviceImageFormatProperties(m_context.getPhysicalDevice(), compressedInfo.format, compressedInfo.imageType, compressedInfo.tiling, compressedInfo.usage, compressedInfo.flags, &imageFormatProperties) != VK_SUCCESS)
977 TCU_THROW(NotSupportedError, "Image parameters not supported");
978
979 imageData.addImageInfo(compressedInfo);
980 }
981 else
982 {
983 UVec3 size = m_parameters.size;
984 if (m_parameters.imageType == IMAGE_TYPE_1D) {
985 size.y() = 1;
986 }
987 size.z() = 1;
988 const VkExtent3D originalResolutionInBlocks = makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, size));
989
990 for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
991 for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
992 {
993 const VkExtent3D extentUncompressed = m_parameters.useMipmaps ?
994 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
995 originalResolutionInBlocks;
996 const VkImageCreateInfo uncompressedInfo =
997 {
998 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
999 DE_NULL, // const void* pNext;
1000 0u, // VkImageCreateFlags flags;
1001 imageType, // VkImageType imageType;
1002 m_parameters.formatUncompressed, // VkFormat format;
1003 extentUncompressed, // VkExtent3D extent;
1004 1u, // deUint32 mipLevels;
1005 1u, // deUint32 arrayLayers;
1006 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1007 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1008 m_parameters.uncompressedImageUsage |
1009 VK_IMAGE_USAGE_SAMPLED_BIT, // VkImageUsageFlags usage;
1010 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1011 0u, // deUint32 queueFamilyIndexCount;
1012 DE_NULL, // const deUint32* pQueueFamilyIndices;
1013 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1014 };
1015 imageData.addImageInfo(uncompressedInfo);
1016 }
1017 }
1018 }
1019
decompressImage(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,vector<ImageData> & imageData,const vector<UVec3> & mipMapSizes)1020 bool BasicComputeTestInstance::decompressImage (const VkCommandPool& cmdPool,
1021 const VkCommandBuffer& cmdBuffer,
1022 vector<ImageData>& imageData,
1023 const vector<UVec3>& mipMapSizes)
1024 {
1025 const DeviceInterface& vk = m_context.getDeviceInterface();
1026 const VkDevice device = m_context.getDevice();
1027 const VkQueue queue = m_context.getUniversalQueue();
1028 Allocator& allocator = m_context.getDefaultAllocator();
1029 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
1030 const VkImage& compressed = imageData[0].getImage(0);
1031 const VkImageType imageType = mapImageType(m_parameters.imageType);
1032
1033 for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
1034 imageData[ndx].resetViews();
1035
1036 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
1037 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
1038 {
1039 const bool layoutShaderReadOnly = (layerNdx % 2u) == 1;
1040 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
1041 const VkExtent3D extentCompressed = makeExtent3D(mipMapSizes[mipNdx]);
1042 const VkImage& uncompressed = imageData[m_parameters.imagesCount -1].getImage(imageNdx);
1043 const VkExtent3D extentUncompressed = imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
1044 const VkDeviceSize bufferSizeComp = getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
1045
1046 VkFormatProperties properties;
1047 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), m_parameters.formatForVerify, &properties);
1048 if (!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1049 TCU_THROW(NotSupportedError, "Format storage feature not supported");
1050
1051 const VkImageCreateInfo decompressedImageInfo =
1052 {
1053 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1054 DE_NULL, // const void* pNext;
1055 0u, // VkImageCreateFlags flags;
1056 imageType, // VkImageType imageType;
1057 m_parameters.formatForVerify, // VkFormat format;
1058 extentCompressed, // VkExtent3D extent;
1059 1u, // deUint32 mipLevels;
1060 1u, // deUint32 arrayLayers;
1061 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1062 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1063 VK_IMAGE_USAGE_SAMPLED_BIT |
1064 VK_IMAGE_USAGE_STORAGE_BIT |
1065 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
1066 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
1067 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1068 0u, // deUint32 queueFamilyIndexCount;
1069 DE_NULL, // const deUint32* pQueueFamilyIndices;
1070 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1071 };
1072
1073 const VkImageCreateInfo compressedImageInfo =
1074 {
1075 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1076 DE_NULL, // const void* pNext;
1077 0u, // VkImageCreateFlags flags;
1078 imageType, // VkImageType imageType;
1079 m_parameters.formatCompressed, // VkFormat format;
1080 extentCompressed, // VkExtent3D extent;
1081 1u, // deUint32 mipLevels;
1082 1u, // deUint32 arrayLayers;
1083 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1084 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1085 VK_IMAGE_USAGE_SAMPLED_BIT |
1086 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
1087 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1088 0u, // deUint32 queueFamilyIndexCount;
1089 DE_NULL, // const deUint32* pQueueFamilyIndices;
1090 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1091 };
1092 const VkImageUsageFlags compressedViewUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1093 const VkImageViewUsageCreateInfo compressedViewUsageCI =
1094 {
1095 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, //VkStructureType sType;
1096 DE_NULL, //const void* pNext;
1097 compressedViewUsageFlags, //VkImageUsageFlags usage;
1098 };
1099 const VkImageViewType imageViewType (mapImageViewType(m_parameters.imageType));
1100 Image resultImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1101 Image referenceImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1102 Image uncompressedImage (vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
1103 Move<VkImageView> resultView = makeImageView(vk, device, resultImage.get(), imageViewType, decompressedImageInfo.format,
1104 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1105 Move<VkImageView> referenceView = makeImageView(vk, device, referenceImage.get(), imageViewType, decompressedImageInfo.format,
1106 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1107 Move<VkImageView> uncompressedView = makeImageView(vk, device, uncompressedImage.get(), imageViewType, m_parameters.formatCompressed,
1108 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
1109 bool const useMultiLayer = m_parameters.imageType == IMAGE_TYPE_2D && m_parameters.layers > 1u;
1110 Move<VkImageView> compressedView = (useMultiLayer) ?
1111 makeImageView(vk, device, compressed, VK_IMAGE_VIEW_TYPE_2D_ARRAY, m_parameters.formatCompressed,
1112 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, static_cast<uint32_t>(mipMapSizes.size()), 0u, m_parameters.layers), &compressedViewUsageCI) :
1113 makeImageView(vk, device, compressed, imageViewType, m_parameters.formatCompressed,
1114 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
1115 Move<VkDescriptorSetLayout> descriptorSetLayout = DescriptorSetLayoutBuilder()
1116 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1117 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1118 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1119 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1120 .build(vk, device);
1121 Move<VkDescriptorPool> descriptorPool = DescriptorPoolBuilder()
1122 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1123 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1124 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1125 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1126 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
1127
1128 Move<VkDescriptorSet> descriptorSet = makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
1129 const struct PushData { uint32_t layer; uint32_t level; } pushData
1130 = { layerNdx, mipNdx };
1131 const VkPushConstantRange pushConstantRange = { VK_SHADER_STAGE_COMPUTE_BIT, 0u, static_cast<uint32_t>(sizeof pushData) };
1132 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, 1u, &descriptorSetLayout.get(), 1u, &pushConstantRange));
1133 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1134 const VkDeviceSize bufferSize = getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), m_parameters.formatForVerify);
1135 BufferWithMemory resultBuffer (vk, device, allocator,
1136 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1137 BufferWithMemory referenceBuffer (vk, device, allocator,
1138 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1139 BufferWithMemory transferBuffer (vk, device, allocator,
1140 makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1141 Move<VkSampler> sampler;
1142 {
1143 const VkSamplerCreateInfo createInfo =
1144 {
1145 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1146 DE_NULL, //const void* pNext;
1147 0u, //VkSamplerCreateFlags flags;
1148 VK_FILTER_NEAREST, //VkFilter magFilter;
1149 VK_FILTER_NEAREST, //VkFilter minFilter;
1150 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1151 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1152 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1153 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1154 0.0f, //float mipLodBias;
1155 VK_FALSE, //VkBool32 anisotropyEnable;
1156 1.0f, //float maxAnisotropy;
1157 VK_FALSE, //VkBool32 compareEnable;
1158 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1159 0.0f, //float minLod;
1160 (float)mipMapSizes.size(), //float maxLod;
1161 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1162 VK_FALSE, //VkBool32 unnormalizedCoordinates;
1163 };
1164 sampler = createSampler(vk, device, &createInfo);
1165 }
1166
1167 VkDescriptorImageInfo descriptorImageInfos[] =
1168 {
1169 makeDescriptorImageInfo(*sampler, *uncompressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1170 makeDescriptorImageInfo(*sampler, *compressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1171 makeDescriptorImageInfo(DE_NULL, *resultView, VK_IMAGE_LAYOUT_GENERAL),
1172 makeDescriptorImageInfo(DE_NULL, *referenceView, VK_IMAGE_LAYOUT_GENERAL)
1173 };
1174 DescriptorSetUpdateBuilder()
1175 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1176 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1177 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1178 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1179 .update(vk, device);
1180
1181
1182 beginCommandBuffer(vk, cmdBuffer);
1183 {
1184 const VkImageSubresourceRange subresourceRange =
1185 {
1186 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1187 0u, //deUint32 baseMipLevel
1188 1u, //deUint32 levelCount
1189 0u, //deUint32 baseArrayLayer
1190 1u //deUint32 layerCount
1191 };
1192
1193 const VkImageSubresourceRange subresourceRangeComp =
1194 {
1195 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1196 (useMultiLayer) ? 0u : mipNdx, //deUint32 baseMipLevel
1197 (useMultiLayer) ? static_cast<uint32_t>(mipMapSizes.size()) : 1u, //deUint32 levelCount
1198 (useMultiLayer) ? 0u : layerNdx, //deUint32 baseArrayLayer
1199 (useMultiLayer) ? m_parameters.layers : 1u //deUint32 layerCount
1200 };
1201
1202 const VkBufferImageCopy copyRegion =
1203 {
1204 0ull, // VkDeviceSize bufferOffset;
1205 0u, // deUint32 bufferRowLength;
1206 0u, // deUint32 bufferImageHeight;
1207 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1208 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1209 decompressedImageInfo.extent, // VkExtent3D imageExtent;
1210 };
1211
1212 const VkBufferImageCopy compressedCopyRegion =
1213 {
1214 0ull, // VkDeviceSize bufferOffset;
1215 0u, // deUint32 bufferRowLength;
1216 0u, // deUint32 bufferImageHeight;
1217 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1218 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1219 extentUncompressed, // VkExtent3D imageExtent;
1220 };
1221
1222 {
1223
1224 const VkBufferMemoryBarrier preCopyBufferBarriers = makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1225 transferBuffer.get(), 0ull, bufferSizeComp);
1226
1227 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1228 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1229 }
1230
1231 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1232
1233 {
1234 const VkBufferMemoryBarrier postCopyBufferBarriers = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1235 transferBuffer.get(), 0ull, bufferSizeComp);
1236
1237 const VkImageMemoryBarrier preCopyImageBarriers = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1238 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1239
1240 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1241 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1242 }
1243
1244 vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
1245
1246 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1247 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1248
1249 {
1250 const VkImageMemoryBarrier preShaderImageBarriers[] =
1251 {
1252
1253 makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1254 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1255 uncompressedImage.get(), subresourceRange),
1256
1257 makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT,
1258 (useMultiLayer && !layoutShaderReadOnly && layerNdx) ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1259 compressed, subresourceRangeComp),
1260
1261 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1262 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1263 resultImage.get(), subresourceRange),
1264
1265 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1266 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1267 referenceImage.get(), subresourceRange)
1268 };
1269
1270 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1271 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1272 DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1273 }
1274
1275 vk.cmdPushConstants(cmdBuffer, *pipelineLayout, VK_SHADER_STAGE_COMPUTE_BIT, 0u, sizeof pushData, &pushData);
1276 vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1277
1278 {
1279 const VkImageMemoryBarrier postShaderImageBarriers[] =
1280 {
1281 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1282 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1283 resultImage.get(), subresourceRange),
1284
1285 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1286 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1287 referenceImage.get(), subresourceRange)
1288 };
1289
1290 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1291 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1292 DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1293 }
1294
1295 vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, ©Region);
1296 vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, ©Region);
1297
1298 {
1299 const VkBufferMemoryBarrier postCopyBufferBarrier[] =
1300 {
1301 makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1302 resultBuffer.get(), 0ull, bufferSize),
1303
1304 makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1305 referenceBuffer.get(), 0ull, bufferSize),
1306 };
1307
1308 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT,
1309 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(postCopyBufferBarrier), postCopyBufferBarrier,
1310 0u, (const VkImageMemoryBarrier*)DE_NULL);
1311 }
1312 }
1313 endCommandBuffer(vk, cmdBuffer);
1314 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1315 m_context.resetCommandPoolForVKSC(device, cmdPool);
1316
1317 const Allocation& resultAlloc = resultBuffer.getAllocation();
1318 const Allocation& referenceAlloc = referenceBuffer.getAllocation();
1319 invalidateAlloc(vk, device, resultAlloc);
1320 invalidateAlloc(vk, device, referenceAlloc);
1321
1322 BinaryCompareMode compareMode =
1323 (m_parameters.formatIsASTC)
1324 ?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
1325 :(COMPARE_MODE_NORMAL);
1326
1327 BinaryCompareResult res = BinaryCompare(referenceAlloc.getHostPtr(),
1328 resultAlloc.getHostPtr(),
1329 (size_t)bufferSize,
1330 m_parameters.formatForVerify,
1331 compareMode);
1332
1333 if (res == COMPARE_RESULT_FAILED)
1334 {
1335 ConstPixelBufferAccess resultPixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1336 ConstPixelBufferAccess referencePixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1337
1338 if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1339 return false;
1340 }
1341 else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
1342 {
1343 m_bASTCErrorColourMismatch = true;
1344 }
1345 }
1346
1347 return true;
1348 }
1349
1350 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1351 {
1352 public:
1353 ImageStoreComputeTestInstance (Context& context,
1354 const TestParameters& parameters);
1355 protected:
1356 virtual void executeShader (const VkCommandPool& cmdPool,
1357 const VkCommandBuffer& cmdBuffer,
1358 const VkDescriptorSetLayout& descriptorSetLayout,
1359 const VkDescriptorPool& descriptorPool,
1360 vector<ImageData>& imageData);
1361 private:
1362 };
1363
ImageStoreComputeTestInstance(Context & context,const TestParameters & parameters)1364 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1365 :BasicComputeTestInstance (context, parameters)
1366 {
1367 }
1368
executeShader(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)1369 void ImageStoreComputeTestInstance::executeShader (const VkCommandPool& cmdPool,
1370 const VkCommandBuffer& cmdBuffer,
1371 const VkDescriptorSetLayout& descriptorSetLayout,
1372 const VkDescriptorPool& descriptorPool,
1373 vector<ImageData>& imageData)
1374 {
1375 const DeviceInterface& vk = m_context.getDeviceInterface();
1376 const VkDevice device = m_context.getDevice();
1377 const VkQueue queue = m_context.getUniversalQueue();
1378 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1379 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
1380 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
1381 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1382 Move<VkSampler> sampler;
1383 {
1384 const VkSamplerCreateInfo createInfo =
1385 {
1386 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1387 DE_NULL, //const void* pNext;
1388 0u, //VkSamplerCreateFlags flags;
1389 VK_FILTER_NEAREST, //VkFilter magFilter;
1390 VK_FILTER_NEAREST, //VkFilter minFilter;
1391 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1392 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1393 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1394 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1395 0.0f, //float mipLodBias;
1396 VK_FALSE, //VkBool32 anisotropyEnable;
1397 1.0f, //float maxAnisotropy;
1398 VK_FALSE, //VkBool32 compareEnable;
1399 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1400 0.0f, //float minLod;
1401 0.0f, //float maxLod;
1402 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1403 VK_TRUE, //VkBool32 unnormalizedCoordinates;
1404 };
1405 sampler = createSampler(vk, device, &createInfo);
1406 }
1407
1408 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
1409 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1410 {
1411 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1412 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1413 {
1414 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1415 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1416 }
1417 }
1418
1419 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1420 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1421
1422 beginCommandBuffer(vk, cmdBuffer);
1423 {
1424 const VkImageSubresourceRange compressedRange =
1425 {
1426 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1427 0u, //deUint32 baseMipLevel
1428 imageData[0].getImageInfo(0).mipLevels, //deUint32 levelCount
1429 0u, //deUint32 baseArrayLayer
1430 imageData[0].getImageInfo(0).arrayLayers //deUint32 layerCount
1431 };
1432
1433 const VkImageSubresourceRange uncompressedRange =
1434 {
1435 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1436 0u, //deUint32 baseMipLevel
1437 1u, //deUint32 levelCount
1438 0u, //deUint32 baseArrayLayer
1439 1u //deUint32 layerCount
1440 };
1441
1442 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1443
1444 vector<VkImageMemoryBarrier> preShaderImageBarriers (descriptorSets.size() * 2u + 1u);
1445 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1446 {
1447 preShaderImageBarriers[imageNdx] = makeImageMemoryBarrier(
1448 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1449 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1450 imageData[1].getImage(imageNdx), uncompressedRange);
1451
1452 preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()] = makeImageMemoryBarrier(
1453 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1454 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1455 imageData[2].getImage(imageNdx), uncompressedRange);
1456 }
1457
1458 preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1459 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1460 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1461 imageData[0].getImage(0u), compressedRange);
1462
1463 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1464 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1465 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1466
1467 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1468 {
1469 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1470 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1471 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
1472 imageData[1].getImageInfo(ndx).extent.height,
1473 imageData[1].getImageInfo(ndx).extent.depth);
1474 }
1475 }
1476 endCommandBuffer(vk, cmdBuffer);
1477 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1478 m_context.resetCommandPoolForVKSC(device, cmdPool);
1479 }
1480
1481 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1482 {
1483 public:
1484 GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters);
1485 virtual TestStatus iterate (void);
1486
1487 protected:
1488 virtual bool isWriteToCompressedOperation ();
1489 VkImageCreateInfo makeCreateImageInfo (const VkFormat format,
1490 const ImageType type,
1491 const UVec3& size,
1492 const VkImageUsageFlags usageFlags,
1493 const VkImageCreateFlags* createFlags,
1494 const deUint32 levels,
1495 const deUint32 layers);
1496 VkDeviceSize getCompressedImageData (const VkFormat format,
1497 const UVec3& size,
1498 std::vector<deUint8>& data,
1499 const deUint32 layer,
1500 const deUint32 level);
1501 VkDeviceSize getUncompressedImageData (const VkFormat format,
1502 const UVec3& size,
1503 std::vector<deUint8>& data,
1504 const deUint32 layer,
1505 const deUint32 level);
1506 virtual void prepareData ();
1507 virtual void prepareVertexBuffer ();
1508 virtual void transcodeRead (const VkCommandPool& cmdPool);
1509 virtual void transcodeWrite (const VkCommandPool& cmdPool);
1510 bool verifyDecompression (const VkCommandPool& cmdPool,
1511 const std::vector<deUint8>& refCompressedData,
1512 const de::MovePtr<Image>& resCompressedImage,
1513 const deUint32 layer,
1514 const deUint32 level,
1515 const UVec3& mipmapDims);
1516
1517 typedef std::vector<deUint8> RawDataVector;
1518 typedef SharedPtr<RawDataVector> RawDataPtr;
1519 typedef std::vector<RawDataPtr> LevelData;
1520 typedef std::vector<LevelData> FullImageData;
1521
1522 FullImageData m_srcData;
1523 FullImageData m_dstData;
1524
1525 typedef SharedPtr<Image> ImagePtr;
1526 typedef std::vector<ImagePtr> LevelImages;
1527 typedef std::vector<LevelImages> ImagesArray;
1528
1529 ImagesArray m_uncompressedImages;
1530 MovePtr<Image> m_compressedImage;
1531
1532 VkImageViewUsageCreateInfo m_imageViewUsageKHR;
1533 VkImageViewUsageCreateInfo* m_srcImageViewUsageKHR;
1534 VkImageViewUsageCreateInfo* m_dstImageViewUsageKHR;
1535 std::vector<tcu::UVec3> m_compressedImageResVec;
1536 std::vector<tcu::UVec3> m_uncompressedImageResVec;
1537 VkFormat m_srcFormat;
1538 VkFormat m_dstFormat;
1539 VkImageUsageFlags m_srcImageUsageFlags;
1540 VkImageUsageFlags m_dstImageUsageFlags;
1541 std::vector<tcu::UVec3> m_srcImageResolutions;
1542 std::vector<tcu::UVec3> m_dstImageResolutions;
1543
1544 MovePtr<BufferWithMemory> m_vertexBuffer;
1545 deUint32 m_vertexCount;
1546 VkDeviceSize m_vertexBufferOffset;
1547 };
1548
GraphicsAttachmentsTestInstance(Context & context,const TestParameters & parameters)1549 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1550 : BasicTranscodingTestInstance(context, parameters)
1551 , m_srcData()
1552 , m_dstData()
1553 , m_uncompressedImages()
1554 , m_compressedImage()
1555 , m_imageViewUsageKHR()
1556 , m_srcImageViewUsageKHR()
1557 , m_dstImageViewUsageKHR()
1558 , m_compressedImageResVec()
1559 , m_uncompressedImageResVec()
1560 , m_srcFormat()
1561 , m_dstFormat()
1562 , m_srcImageUsageFlags()
1563 , m_dstImageUsageFlags()
1564 , m_srcImageResolutions()
1565 , m_dstImageResolutions()
1566 , m_vertexBuffer()
1567 , m_vertexCount(0u)
1568 , m_vertexBufferOffset(0ull)
1569 {
1570 }
1571
iterate(void)1572 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1573 {
1574 const DeviceInterface& vk = m_context.getDeviceInterface();
1575 const VkDevice device = m_context.getDevice();
1576 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1577 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1578
1579 prepareData();
1580 prepareVertexBuffer();
1581
1582 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1583 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1584 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1585
1586 if (isWriteToCompressedOperation())
1587 transcodeWrite(*cmdPool);
1588 else
1589 transcodeRead(*cmdPool);
1590
1591 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1592 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1593 if (isWriteToCompressedOperation())
1594 {
1595 if (!verifyDecompression(*cmdPool, *m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1596 return TestStatus::fail("Images difference detected");
1597 }
1598 else
1599 {
1600 if (!verifyDecompression(*cmdPool, *m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1601 return TestStatus::fail("Images difference detected");
1602 }
1603
1604 if (m_bASTCErrorColourMismatch)
1605 {
1606 DE_ASSERT(m_parameters.formatIsASTC);
1607 return TestStatusASTCQualityWarning();
1608 }
1609
1610 return TestStatus::pass("Pass");
1611 }
1612
prepareData()1613 void GraphicsAttachmentsTestInstance::prepareData ()
1614 {
1615 VkImageViewUsageCreateInfo* imageViewUsageKHRNull = (VkImageViewUsageCreateInfo*)DE_NULL;
1616
1617 m_imageViewUsageKHR = makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1618
1619 m_srcImageViewUsageKHR = isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1620 m_dstImageViewUsageKHR = isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1621
1622 m_srcFormat = isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1623 m_dstFormat = isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1624
1625 m_srcImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1626 m_dstImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1627
1628 m_compressedImageResVec = getMipLevelSizes(getLayerDims());
1629 m_uncompressedImageResVec = getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1630
1631 m_srcImageResolutions = isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1632 m_dstImageResolutions = isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1633
1634 m_srcData.resize(getLevelCount());
1635 m_dstData.resize(getLevelCount());
1636 m_uncompressedImages.resize(getLevelCount());
1637
1638 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1639 {
1640 m_srcData[levelNdx].resize(getLayerCount());
1641 m_dstData[levelNdx].resize(getLayerCount());
1642 m_uncompressedImages[levelNdx].resize(getLayerCount());
1643
1644 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1645 {
1646 m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1647 m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1648
1649 if (isWriteToCompressedOperation())
1650 {
1651 getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1652
1653 m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1654 }
1655 else
1656 {
1657 getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1658
1659 m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1660 }
1661
1662 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1663 }
1664 }
1665 }
1666
prepareVertexBuffer()1667 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1668 {
1669 const DeviceInterface& vk = m_context.getDeviceInterface();
1670 const VkDevice device = m_context.getDevice();
1671 Allocator& allocator = m_context.getDefaultAllocator();
1672
1673 const std::vector<tcu::Vec4> vertexArray = createFullscreenQuad();
1674 const size_t vertexBufferSizeInBytes = vertexArray.size() * sizeof(vertexArray[0]);
1675
1676 m_vertexCount = static_cast<deUint32>(vertexArray.size());
1677 m_vertexBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1678
1679 // Upload vertex data
1680 const Allocation& vertexBufferAlloc = m_vertexBuffer->getAllocation();
1681 deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1682 flushAlloc(vk, device, vertexBufferAlloc);
1683 }
1684
transcodeRead(const VkCommandPool & cmdPool)1685 void GraphicsAttachmentsTestInstance::transcodeRead (const VkCommandPool& cmdPool)
1686 {
1687 const DeviceInterface& vk = m_context.getDeviceInterface();
1688 const VkDevice device = m_context.getDevice();
1689 const VkQueue queue = m_context.getUniversalQueue();
1690 Allocator& allocator = m_context.getDefaultAllocator();
1691
1692 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1693
1694 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1695 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1696
1697 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1698 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1699
1700 const Unique<VkRenderPass> renderPass (vkt::image::makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1701
1702 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1703 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1704 .build(vk, device));
1705 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1706 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1707 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1708 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1709
1710 const VkExtent2D renderSizeUnused (makeExtent2D(1u, 1u));
1711 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1712 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 1u, true));
1713
1714 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1715
1716 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1717 {
1718 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1719 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1720 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1721 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1722 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1723 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1724
1725 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1726
1727 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1728 const MovePtr<BufferWithMemory> srcImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1729
1730 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1731 MovePtr<BufferWithMemory> dstImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1732
1733 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1734 const VkViewport viewport = makeViewport(renderSize);
1735 const VkRect2D scissor = makeRect2D(renderSize);
1736
1737 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1738 {
1739 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1740 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1741
1742 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1743
1744 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1745 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1746
1747 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1748 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1749 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1750 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1751 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1752 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1753
1754 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1755 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1756 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
1757
1758 // Upload source image data
1759 const Allocation& alloc = srcImageBuffer->getAllocation();
1760 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1761 flushAlloc(vk, device, alloc);
1762
1763 beginCommandBuffer(vk, *cmdBuffer);
1764 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1765
1766 // Copy buffer to image
1767 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1768 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1769 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1770
1771 // Define destination image layout
1772 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1773
1774 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1775
1776 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1777 DescriptorSetUpdateBuilder()
1778 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1779 .update(vk, device);
1780
1781 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1782 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1783
1784 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1785 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1786
1787 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1788
1789 endRenderPass(vk, *cmdBuffer);
1790
1791 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1792 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1793 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1794 dstImage->get(), dstSubresourceRange);
1795
1796 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1797 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1798 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1799
1800 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1801 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1802 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1803
1804 endCommandBuffer(vk, *cmdBuffer);
1805
1806 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1807 m_context.resetCommandPoolForVKSC(device, cmdPool);
1808
1809 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1810 invalidateAlloc(vk, device, dstImageBufferAlloc);
1811 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1812 }
1813 }
1814
1815 m_compressedImage = srcImage;
1816 }
1817
transcodeWrite(const VkCommandPool & cmdPool)1818 void GraphicsAttachmentsTestInstance::transcodeWrite (const VkCommandPool& cmdPool)
1819 {
1820 const DeviceInterface& vk = m_context.getDeviceInterface();
1821 const VkDevice device = m_context.getDevice();
1822 const VkQueue queue = m_context.getUniversalQueue();
1823 Allocator& allocator = m_context.getDefaultAllocator();
1824
1825 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1826
1827 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1828 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1829
1830 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1831 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1832
1833 const Unique<VkRenderPass> renderPass (vkt::image::makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1834
1835 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1836 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1837 .build(vk, device));
1838 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1839 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1840 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1841 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1842
1843 const VkExtent2D renderSizeUnused (makeExtent2D(1u, 1u));
1844 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1845 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 1u, true));
1846
1847 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1848
1849 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1850 {
1851 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1852 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1853 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1854 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1855 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1856 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1857
1858 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1859
1860 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1861 const VkViewport viewport = makeViewport(renderSize);
1862 const VkRect2D scissor = makeRect2D(renderSize);
1863
1864 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1865 {
1866 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1867 const MovePtr<BufferWithMemory> srcImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1868
1869 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1870 MovePtr<BufferWithMemory> dstImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1871
1872 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1873 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1874
1875 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1876
1877 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1878 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1879
1880 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1881 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1882 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1883 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1884 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1885 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1886
1887 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1888 const VkExtent2D framebufferSize (renderSize);
1889 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
1890
1891 // Upload source image data
1892 const Allocation& alloc = srcImageBuffer->getAllocation();
1893 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1894 flushAlloc(vk, device, alloc);
1895
1896 beginCommandBuffer(vk, *cmdBuffer);
1897 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1898
1899 // Copy buffer to image
1900 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1901 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1902 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1903
1904 // Define destination image layout
1905 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1906
1907 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1908
1909 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1910 DescriptorSetUpdateBuilder()
1911 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1912 .update(vk, device);
1913
1914 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1915 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1916
1917 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1918 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1919
1920 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1921
1922 endRenderPass(vk, *cmdBuffer);
1923
1924 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1925 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1926 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1927 dstImage->get(), dstSubresourceRange);
1928
1929 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1930 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1931 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1932
1933 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1934 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1935 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1936
1937 endCommandBuffer(vk, *cmdBuffer);
1938
1939 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1940 m_context.resetCommandPoolForVKSC(device, cmdPool);
1941
1942 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1943 invalidateAlloc(vk, device, dstImageBufferAlloc);
1944 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1945 }
1946 }
1947
1948 m_compressedImage = dstImage;
1949 }
1950
isWriteToCompressedOperation()1951 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1952 {
1953 return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1954 }
1955
makeCreateImageInfo(const VkFormat format,const ImageType type,const UVec3 & size,const VkImageUsageFlags usageFlags,const VkImageCreateFlags * createFlags,const deUint32 levels,const deUint32 layers)1956 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat format,
1957 const ImageType type,
1958 const UVec3& size,
1959 const VkImageUsageFlags usageFlags,
1960 const VkImageCreateFlags* createFlags,
1961 const deUint32 levels,
1962 const deUint32 layers)
1963 {
1964 const VkImageType imageType = mapImageType(type);
1965 const VkImageCreateFlags imageCreateFlagsBase = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1966 const VkImageCreateFlags imageCreateFlagsAddOn = isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT : 0;
1967 const VkImageCreateFlags imageCreateFlags = (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1968
1969 VkFormatProperties properties;
1970 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), format, &properties);
1971 if ((usageFlags & VK_IMAGE_USAGE_STORAGE_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1972 TCU_THROW(NotSupportedError, "Format storage feature not supported");
1973 if ((usageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
1974 TCU_THROW(NotSupportedError, "Format color attachment feature not supported");
1975 if ((usageFlags & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) &&
1976 !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
1977 TCU_THROW(NotSupportedError, "Format color/depth/stencil attachment feature not supported for input attachment usage");
1978
1979 const VkImageCreateInfo createImageInfo =
1980 {
1981 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1982 DE_NULL, // const void* pNext;
1983 imageCreateFlags, // VkImageCreateFlags flags;
1984 imageType, // VkImageType imageType;
1985 format, // VkFormat format;
1986 makeExtent3D(getLayerSize(type, size)), // VkExtent3D extent;
1987 levels, // deUint32 mipLevels;
1988 layers, // deUint32 arrayLayers;
1989 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1990 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1991 usageFlags, // VkImageUsageFlags usage;
1992 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1993 0u, // deUint32 queueFamilyIndexCount;
1994 DE_NULL, // const deUint32* pQueueFamilyIndices;
1995 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1996 };
1997
1998 return createImageInfo;
1999 }
2000
getCompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)2001 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat format,
2002 const UVec3& size,
2003 std::vector<deUint8>& data,
2004 const deUint32 layer,
2005 const deUint32 level)
2006 {
2007 VkDeviceSize sizeBytes = getCompressedImageSizeInBytes(format, size);
2008
2009 data.resize((size_t)sizeBytes);
2010 generateData(&data[0], data.size(), format, layer, level);
2011
2012 return sizeBytes;
2013 }
2014
getUncompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)2015 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat format,
2016 const UVec3& size,
2017 std::vector<deUint8>& data,
2018 const deUint32 layer,
2019 const deUint32 level)
2020 {
2021 tcu::IVec3 sizeAsIVec3 = tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
2022 VkDeviceSize sizeBytes = getImageSizeBytes(sizeAsIVec3, format);
2023
2024 data.resize((size_t)sizeBytes);
2025 generateData(&data[0], data.size(), format, layer, level);
2026
2027 return sizeBytes;
2028 }
2029
verifyDecompression(const VkCommandPool & cmdPool,const std::vector<deUint8> & refCompressedData,const de::MovePtr<Image> & resCompressedImage,const deUint32 level,const deUint32 layer,const UVec3 & mipmapDims)2030 bool GraphicsAttachmentsTestInstance::verifyDecompression (const VkCommandPool& cmdPool,
2031 const std::vector<deUint8>& refCompressedData,
2032 const de::MovePtr<Image>& resCompressedImage,
2033 const deUint32 level,
2034 const deUint32 layer,
2035 const UVec3& mipmapDims)
2036 {
2037 const DeviceInterface& vk = m_context.getDeviceInterface();
2038 const VkDevice device = m_context.getDevice();
2039 const VkQueue queue = m_context.getUniversalQueue();
2040 Allocator& allocator = m_context.getDefaultAllocator();
2041
2042 const bool layoutShaderReadOnly = (layer % 2u) == 1;
2043 const UVec3 mipmapDimsBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
2044
2045 const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2046 const VkImageSubresourceRange resSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
2047
2048 const VkDeviceSize dstBufferSize = getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
2049 const VkImageUsageFlags refSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2050
2051 const VkBufferCreateInfo refSrcImageBufferInfo (makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
2052 const MovePtr<BufferWithMemory> refSrcImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
2053
2054 const VkImageCreateFlags refSrcImageCreateFlags = 0;
2055 const VkImageCreateInfo refSrcImageCreateInfo = makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2056 const MovePtr<Image> refSrcImage (new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
2057 Move<VkImageView> refSrcImageView (makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
2058
2059 const VkImageUsageFlags resSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2060 const VkImageViewUsageCreateInfo resSrcImageViewUsageKHR = makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
2061 Move<VkImageView> resSrcImageView (makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
2062
2063 const VkImageCreateFlags refDstImageCreateFlags = 0;
2064 const VkImageUsageFlags refDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2065 const VkImageCreateInfo refDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2066 const MovePtr<Image> refDstImage (new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
2067 const Move<VkImageView> refDstImageView (makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2068 const VkImageMemoryBarrier refDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
2069 const VkBufferCreateInfo refDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2070 const MovePtr<BufferWithMemory> refDstBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
2071
2072 const VkImageCreateFlags resDstImageCreateFlags = 0;
2073 const VkImageUsageFlags resDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2074 const VkImageCreateInfo resDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2075 const MovePtr<Image> resDstImage (new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
2076 const Move<VkImageView> resDstImageView (makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2077 const VkImageMemoryBarrier resDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
2078 const VkBufferCreateInfo resDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2079 const MovePtr<BufferWithMemory> resDstBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
2080
2081 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2082 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
2083
2084 const Unique<VkRenderPass> renderPass (vk::makeRenderPass(vk, device));
2085
2086 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2087 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2088 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2089 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2090 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2091 .build(vk, device));
2092 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2093 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2094 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2095 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2096 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2097 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2098 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2099 const VkSamplerCreateInfo refSrcSamplerInfo (makeSamplerCreateInfo());
2100 const Move<VkSampler> refSrcSampler = vk::createSampler(vk, device, &refSrcSamplerInfo);
2101 const VkSamplerCreateInfo resSrcSamplerInfo (makeSamplerCreateInfo());
2102 const Move<VkSampler> resSrcSampler = vk::createSampler(vk, device, &resSrcSamplerInfo);
2103 const VkDescriptorImageInfo descriptorRefSrcImage (makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2104 const VkDescriptorImageInfo descriptorResSrcImage (makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2105 const VkDescriptorImageInfo descriptorRefDstImage (makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2106 const VkDescriptorImageInfo descriptorResDstImage (makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2107
2108 const VkExtent2D renderSize (makeExtent2D(mipmapDims.x(), mipmapDims.y()));
2109 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2110 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
2111
2112 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2113
2114 const VkBufferImageCopy copyBufferToImageRegion = makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
2115 const VkBufferImageCopy copyRegion = makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
2116 const VkBufferMemoryBarrier refSrcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
2117 const VkImageMemoryBarrier refSrcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2118 const VkImageMemoryBarrier refSrcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2119 const VkImageMemoryBarrier resCompressedImageBarrier = makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, resCompressedImage->get(), resSubresourceRange);
2120
2121 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize.width, renderSize.height, getLayerCount()));
2122
2123 // Upload source image data
2124 {
2125 const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
2126 deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
2127 flushAlloc(vk, device, refSrcImageBufferAlloc);
2128 }
2129
2130 beginCommandBuffer(vk, *cmdBuffer);
2131 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2132
2133 // Copy buffer to image
2134 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
2135 vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, ©BufferToImageRegion);
2136 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
2137
2138 // Make reference and result images readable
2139 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
2140 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
2141 {
2142 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resCompressedImageBarrier);
2143 }
2144
2145 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2146 {
2147 DescriptorSetUpdateBuilder()
2148 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
2149 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
2150 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
2151 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
2152 .update(vk, device);
2153
2154 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2155 vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2156 vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
2157 }
2158 endRenderPass(vk, *cmdBuffer);
2159
2160 // Decompress reference image
2161 {
2162 const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
2163 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2164 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2165 refDstImage->get(), subresourceRange);
2166
2167 const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
2168 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2169 refDstBuffer->get(), 0ull, dstBufferSize);
2170
2171 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
2172 vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, ©Region);
2173 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2174 }
2175
2176 // Decompress result image
2177 {
2178 const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
2179 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2180 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2181 resDstImage->get(), subresourceRange);
2182
2183 const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
2184 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2185 resDstBuffer->get(), 0ull, dstBufferSize);
2186
2187 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
2188 vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, ©Region);
2189 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2190 }
2191
2192 endCommandBuffer(vk, *cmdBuffer);
2193
2194 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2195 m_context.resetCommandPoolForVKSC(device, cmdPool);
2196
2197 // Compare decompressed pixel data in reference and result images
2198 {
2199 const Allocation& refDstBufferAlloc = refDstBuffer->getAllocation();
2200 invalidateAlloc(vk, device, refDstBufferAlloc);
2201
2202 const Allocation& resDstBufferAlloc = resDstBuffer->getAllocation();
2203 invalidateAlloc(vk, device, resDstBufferAlloc);
2204
2205 BinaryCompareMode compareMode =
2206 (m_parameters.formatIsASTC)
2207 ?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
2208 :(COMPARE_MODE_NORMAL);
2209
2210 BinaryCompareResult res = BinaryCompare(refDstBufferAlloc.getHostPtr(),
2211 resDstBufferAlloc.getHostPtr(),
2212 dstBufferSize,
2213 m_parameters.formatForVerify,
2214 compareMode);
2215
2216 if (res == COMPARE_RESULT_FAILED)
2217 {
2218 // Do fuzzy to log error mask
2219 invalidateAlloc(vk, device, resDstBufferAlloc);
2220 invalidateAlloc(vk, device, refDstBufferAlloc);
2221
2222 tcu::ConstPixelBufferAccess resPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
2223 tcu::ConstPixelBufferAccess refPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
2224
2225 string comment = string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
2226
2227 if (isWriteToCompressedOperation())
2228 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2229 else
2230 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2231
2232 return false;
2233 }
2234 else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
2235 {
2236 m_bASTCErrorColourMismatch = true;
2237 }
2238 }
2239
2240 return true;
2241 }
2242
2243
2244 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2245 {
2246 public:
2247 GraphicsTextureTestInstance (Context& context, const TestParameters& parameters);
2248
2249 protected:
2250 virtual bool isWriteToCompressedOperation ();
2251 virtual void transcodeRead (const VkCommandPool& cmdPool);
2252 virtual void transcodeWrite (const VkCommandPool& cmdPool);
2253 };
2254
GraphicsTextureTestInstance(Context & context,const TestParameters & parameters)2255 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2256 : GraphicsAttachmentsTestInstance(context, parameters)
2257 {
2258 }
2259
isWriteToCompressedOperation()2260 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2261 {
2262 return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2263 }
2264
transcodeRead(const VkCommandPool & cmdPool)2265 void GraphicsTextureTestInstance::transcodeRead (const VkCommandPool& cmdPool)
2266 {
2267 const DeviceInterface& vk = m_context.getDeviceInterface();
2268 const VkDevice device = m_context.getDevice();
2269 const VkQueue queue = m_context.getUniversalQueue();
2270 Allocator& allocator = m_context.getDefaultAllocator();
2271
2272 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2273
2274 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2275 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2276
2277 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2278 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2279
2280 const Unique<VkRenderPass> renderPass (vk::makeRenderPass(vk, device));
2281
2282 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2283 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2284 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2285 .build(vk, device));
2286 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2287 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2288 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2289 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2290 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2291
2292 const VkExtent2D renderSizeUnused (makeExtent2D(1u, 1u));
2293 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2294 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 0u, true));
2295
2296 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2297
2298 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2299 {
2300 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2301 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2302 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2303 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2304 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2305 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2306
2307 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2308
2309 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2310 const MovePtr<BufferWithMemory> srcImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2311
2312 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2313 MovePtr<BufferWithMemory> dstImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2314
2315 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2316 const VkViewport viewport = makeViewport(renderSize);
2317 const VkRect2D scissor = makeRect2D(renderSize);
2318
2319 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2320 {
2321 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2322 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2323
2324 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2325
2326 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2327 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2328
2329 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2330 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2331 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2332 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2333
2334 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2335 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2336 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2337 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2338 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2339 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2340
2341 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2342 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
2343
2344 // Upload source image data
2345 const Allocation& alloc = srcImageBuffer->getAllocation();
2346 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2347 flushAlloc(vk, device, alloc);
2348
2349 beginCommandBuffer(vk, *cmdBuffer);
2350 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2351
2352 // Copy buffer to image
2353 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2354 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2355 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2356
2357 // Define destination image layout
2358 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2359
2360 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2361
2362 DescriptorSetUpdateBuilder()
2363 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2364 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2365 .update(vk, device);
2366
2367 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2368 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2369
2370 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2371 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2372
2373 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2374
2375 endRenderPass(vk, *cmdBuffer);
2376
2377 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2378 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2379 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2380 dstImage->get(), dstSubresourceRange);
2381
2382 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2383 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2384 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2385
2386 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2387 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2388 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2389
2390 endCommandBuffer(vk, *cmdBuffer);
2391
2392 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2393 m_context.resetCommandPoolForVKSC(device, cmdPool);
2394
2395 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2396 invalidateAlloc(vk, device, dstImageBufferAlloc);
2397 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2398 }
2399 }
2400
2401 m_compressedImage = srcImage;
2402 }
2403
transcodeWrite(const VkCommandPool & cmdPool)2404 void GraphicsTextureTestInstance::transcodeWrite (const VkCommandPool& cmdPool)
2405 {
2406 const DeviceInterface& vk = m_context.getDeviceInterface();
2407 const VkDevice device = m_context.getDevice();
2408 const VkQueue queue = m_context.getUniversalQueue();
2409 Allocator& allocator = m_context.getDefaultAllocator();
2410
2411 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2412
2413 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2414 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2415
2416 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2417 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2418
2419 const Unique<VkRenderPass> renderPass (vk::makeRenderPass(vk, device));
2420
2421 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2422 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2423 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2424 .build(vk, device));
2425 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2426 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2427 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2428 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2429 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2430
2431 const VkExtent2D renderSizeUnused (makeExtent2D(1u, 1u));
2432 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2433 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 0u, true));
2434
2435 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2436
2437 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2438 {
2439 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2440 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2441 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2442 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2443 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2444 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2445
2446 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2447
2448 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2449 const VkViewport viewport = makeViewport(renderSize);
2450 const VkRect2D scissor = makeRect2D(renderSize);
2451
2452 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2453 {
2454 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2455 const MovePtr<BufferWithMemory> srcImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2456
2457 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2458 MovePtr<BufferWithMemory> dstImageBuffer = MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2459
2460 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2461 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2462
2463 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2464
2465 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2466 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2467
2468 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2469 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2470 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2471 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2472
2473 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2474 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2475 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2476 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2477 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2478 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2479
2480 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2481 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
2482
2483 // Upload source image data
2484 const Allocation& alloc = srcImageBuffer->getAllocation();
2485 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2486 flushAlloc(vk, device, alloc);
2487
2488 beginCommandBuffer(vk, *cmdBuffer);
2489 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2490
2491 // Copy buffer to image
2492 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2493 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2494 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2495
2496 // Define destination image layout
2497 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2498
2499 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2500
2501 DescriptorSetUpdateBuilder()
2502 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2503 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2504 .update(vk, device);
2505
2506 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2507 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2508
2509 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2510 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2511
2512 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2513
2514 endRenderPass(vk, *cmdBuffer);
2515
2516 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2517 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2518 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2519 dstImage->get(), dstSubresourceRange);
2520
2521 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2522 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2523 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2524
2525 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2526 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2527 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2528
2529 endCommandBuffer(vk, *cmdBuffer);
2530
2531 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2532 m_context.resetCommandPoolForVKSC(device, cmdPool);
2533
2534 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2535 invalidateAlloc(vk, device, dstImageBufferAlloc);
2536 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2537 }
2538 }
2539
2540 m_compressedImage = dstImage;
2541 }
2542
2543 class TexelViewCompatibleCase : public TestCase
2544 {
2545 public:
2546 TexelViewCompatibleCase (TestContext& testCtx,
2547 const std::string& name,
2548 const std::string& desc,
2549 const TestParameters& parameters);
2550 void initPrograms (SourceCollections& programCollection) const;
2551 TestInstance* createInstance (Context& context) const;
2552 virtual void checkSupport (Context& context) const;
2553 protected:
2554 const TestParameters m_parameters;
2555 };
2556
TexelViewCompatibleCase(TestContext & testCtx,const std::string & name,const std::string & desc,const TestParameters & parameters)2557 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
2558 : TestCase (testCtx, name, desc)
2559 , m_parameters (parameters)
2560 {
2561 }
2562
initPrograms(vk::SourceCollections & programCollection) const2563 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections& programCollection) const
2564 {
2565 DE_ASSERT(m_parameters.size.x() > 0);
2566 DE_ASSERT(m_parameters.size.y() > 0);
2567
2568 const unsigned int imageTypeIndex =
2569 (m_parameters.imageType == IMAGE_TYPE_2D) +
2570 (m_parameters.imageType == IMAGE_TYPE_3D) * 2;
2571
2572 switch (m_parameters.shader)
2573 {
2574 case SHADER_TYPE_COMPUTE:
2575 {
2576 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2577 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2578 std::ostringstream src;
2579 std::ostringstream src_decompress;
2580
2581 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2582 << "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2583 src_decompress << src.str();
2584
2585 switch(m_parameters.operation)
2586 {
2587 case OPERATION_IMAGE_LOAD:
2588 {
2589 const char* posDefinitions[3] =
2590 {
2591 // IMAGE_TYPE_1D
2592 " highp int pos = int(gl_GlobalInvocationID.x);\n",
2593 // IMAGE_TYPE_2D
2594 " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2595 // IMAGE_TYPE_3D
2596 " ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2597 };
2598
2599 src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2600 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2601 << "void main (void)\n"
2602 << "{\n"
2603 << posDefinitions[imageTypeIndex]
2604 << " imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2605 << "}\n";
2606
2607 break;
2608 }
2609
2610 case OPERATION_TEXEL_FETCH:
2611 {
2612 const char* storeDefinitions[3] =
2613 {
2614 // IMAGE_TYPE_1D
2615 " imageStore(u_image1, pos.x, texelFetch(u_image0, pos.x, pos.z));\n",
2616 // IMAGE_TYPE_2D
2617 " imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n",
2618 // IMAGE_TYPE_3D
2619 " imageStore(u_image1, pos, texelFetch(u_image0, pos, pos.z));\n",
2620 };
2621
2622 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2623 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2624 << "void main (void)\n"
2625 << "{\n"
2626 << " ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2627 << storeDefinitions[imageTypeIndex]
2628 << "}\n";
2629
2630 break;
2631 }
2632
2633 case OPERATION_TEXTURE:
2634 {
2635 const char* coordDefinitions[3] =
2636 {
2637 // IMAGE_TYPE_1D
2638 " const int pos = int(gl_GlobalInvocationID.x);\n"
2639 " const float coord = (float(gl_GlobalInvocationID.x) + 0.5) / pixels_resolution.x;\n",
2640 // IMAGE_TYPE_2D
2641 " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2642 " const vec2 coord = (vec2(gl_GlobalInvocationID.xy) + 0.5) / vec2(pixels_resolution);\n",
2643 // IMAGE_TYPE_3D
2644 " const ivec3 pos = ivec3(gl_GlobalInvocationID.xy, 0);\n"
2645 " const vec2 v2 = (vec2(gl_GlobalInvocationID.xy) + 0.5) / vec2(pixels_resolution);\n"
2646 " const vec3 coord = vec3(v2, 0.0);\n",
2647 };
2648
2649 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2650 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2651 << "void main (void)\n"
2652 << "{\n"
2653 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x, gl_NumWorkGroups.y);\n"
2654 << coordDefinitions[imageTypeIndex]
2655 << " imageStore(u_image1, pos, texture(u_image0, coord));\n"
2656 << "}\n";
2657
2658 break;
2659 }
2660
2661 case OPERATION_IMAGE_STORE:
2662 {
2663 const char* posDefinitions[3] =
2664 {
2665 // IMAGE_TYPE_1D
2666 " highp int pos = int(gl_GlobalInvocationID.x);\n",
2667 // IMAGE_TYPE_2D
2668 " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2669 // IMAGE_TYPE_3D
2670 " ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2671 };
2672
2673 src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<" u_image0;\n"
2674 << "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image1;\n"
2675 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2676 << "void main (void)\n"
2677 << "{\n"
2678 << posDefinitions[imageTypeIndex]
2679 << " imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2680 << " imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2681 << "}\n";
2682
2683 break;
2684 }
2685
2686 default:
2687 DE_ASSERT(false);
2688 }
2689
2690 const ImageType compressedReferenceImageType = (m_parameters.imageType == IMAGE_TYPE_2D && m_parameters.layers > 1u) ? IMAGE_TYPE_2D_ARRAY : m_parameters.imageType;
2691 const char* cordDefinitions[3] =
2692 {
2693 // IMAGE_TYPE_1D
2694 " const highp float cord = float(gl_GlobalInvocationID.x) / pixels_resolution.x;\n"
2695 " const highp int pos = int(gl_GlobalInvocationID.x); \n",
2696 // IMAGE_TYPE_2D
2697 " const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2698 " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n",
2699 // IMAGE_TYPE_3D
2700 " const vec2 v2 = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2701 " const vec3 cord = vec3(v2, 0.0);\n"
2702 " const ivec3 pos = ivec3(gl_GlobalInvocationID); \n",
2703 };
2704 src_decompress << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2705 << "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(compressedReferenceImageType))<<" compressed_reference;\n"
2706 << "layout (binding = 2, "<<getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify))<<") writeonly uniform "<<getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType)<<" decompressed_result;\n"
2707 << "layout (binding = 3, "<<getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify))<<") writeonly uniform "<<getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType)<<" decompressed_reference;\n"
2708 << "layout (push_constant, std430) uniform PushConstants { uint layer; uint level; };\n\n"
2709 << "void main (void)\n"
2710 << "{\n"
2711 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2712 << cordDefinitions[imageTypeIndex]
2713 << " imageStore(decompressed_result, pos, texture(compressed_result, cord));\n";
2714 if (compressedReferenceImageType == IMAGE_TYPE_2D_ARRAY)
2715 src_decompress << " imageStore(decompressed_reference, pos, textureLod(compressed_reference, vec3(cord, layer), level));\n";
2716 else
2717 src_decompress << " imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n";
2718 src_decompress << "}\n";
2719 programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2720 programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2721
2722 break;
2723 }
2724
2725 case SHADER_TYPE_FRAGMENT:
2726 {
2727 ImageType imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2728
2729 // Vertex shader
2730 {
2731 std::ostringstream src;
2732 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2733 << "layout(location = 0) in vec4 v_in_position;\n"
2734 << "\n"
2735 << "void main (void)\n"
2736 << "{\n"
2737 << " gl_Position = v_in_position;\n"
2738 << "}\n";
2739
2740 programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2741 }
2742
2743 // Fragment shader
2744 {
2745 switch(m_parameters.operation)
2746 {
2747 case OPERATION_ATTACHMENT_READ:
2748 case OPERATION_ATTACHMENT_WRITE:
2749 {
2750 std::ostringstream src;
2751
2752 const std::string dstTypeStr = getGlslFormatType(m_parameters.formatUncompressed);
2753 const std::string srcTypeStr = getGlslInputFormatType(m_parameters.formatUncompressed);
2754
2755 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2756 << "precision highp int;\n"
2757 << "precision highp float;\n"
2758 << "\n"
2759 << "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2760 << "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2761 << "\n"
2762 << "void main (void)\n"
2763 << "{\n"
2764 << " o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2765 << "}\n";
2766
2767 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2768
2769 break;
2770 }
2771
2772 case OPERATION_TEXTURE_READ:
2773 case OPERATION_TEXTURE_WRITE:
2774 {
2775 std::ostringstream src;
2776
2777 const std::string srcSamplerTypeStr = getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2778 const std::string dstImageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2779 const std::string dstFormatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2780
2781 const char* inDefinitions[3] =
2782 {
2783 // IMAGE_TYPE_1D
2784 " const highp int out_pos = int(gl_FragCoord.x);\n"
2785 " const highp float pixels_resolution = textureSize(u_imageIn, 0);\n"
2786 " const highp float in_pos = gl_FragCoord.x / pixels_resolution;\n",
2787 // IMAGE_TYPE_2D
2788 " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2789 " const vec2 pixels_resolution = vec2(textureSize(u_imageIn, 0));\n"
2790 " const vec2 in_pos = vec2(gl_FragCoord.xy) / vec2(pixels_resolution);\n",
2791 // IMAGE_TYPE_3D
2792 " const ivec3 out_pos = ivec3(gl_FragCoord.xy, 0);\n"
2793 " const vec3 pixels_resolution = vec3(textureSize(u_imageIn, 0));\n"
2794 " const vec3 in_pos = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution.xy, 1.0);\n",
2795 };
2796
2797 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2798 << "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2799 << "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2800 << "\n"
2801 << "void main (void)\n"
2802 << "{\n"
2803 << inDefinitions[imageTypeIndex]
2804 << " imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2805 << "}\n";
2806
2807 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2808
2809 break;
2810 }
2811
2812 default:
2813 DE_ASSERT(false);
2814 }
2815 }
2816
2817 // Verification fragment shader
2818 {
2819 std::ostringstream src;
2820
2821 const std::string samplerType = getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2822 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2823 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2824
2825 const char* pos0Definitions[3] =
2826 {
2827 // IMAGE_TYPE_1D
2828 " const highp int out_pos = int(gl_FragCoord.x);\n"
2829 " const highp float pixels_resolution0 = textureSize(u_imageIn0, 0);\n"
2830 " const highp float in_pos0 = gl_FragCoord.x / pixels_resolution0;\n",
2831 // IMAGE_TYPE_2D
2832 " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2833 " const vec2 pixels_resolution0 = vec2(textureSize(u_imageIn0, 0));\n"
2834 " const vec2 in_pos0 = vec2(gl_FragCoord.xy) / vec2(pixels_resolution0);\n",
2835 // IMAGE_TYPE_3D
2836 " const ivec3 out_pos = ivec3(ivec2(gl_FragCoord.xy), 0);\n"
2837 " const vec3 pixels_resolution0 = vec3(textureSize(u_imageIn0, 0));\n"
2838 " const vec3 in_pos0 = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution0.xy, 1.0);\n",
2839 };
2840 const char* pos1Definitions[3] =
2841 {
2842 // IMAGE_TYPE_1D
2843 " const highp float pixels_resolution1 = textureSize(u_imageIn1, 0);\n"
2844 " const highp float in_pos1 = gl_FragCoord.x / pixels_resolution1;\n",
2845 // IMAGE_TYPE_2D
2846 " const vec2 pixels_resolution1 = vec2(textureSize(u_imageIn1, 0));\n"
2847 " const vec2 in_pos1 = vec2(gl_FragCoord.xy) / vec2(pixels_resolution1);\n",
2848 // IMAGE_TYPE_3D
2849 " const vec3 pixels_resolution1 = vec3(textureSize(u_imageIn1, 0));\n"
2850 " const vec3 in_pos1 = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution1.xy, 1.0);\n",
2851 };
2852
2853 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2854 << "layout (binding = 0) uniform " << getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(m_parameters.imageType)) << " u_imageIn0;\n"
2855 << "layout (binding = 1) uniform " << getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(m_parameters.imageType)) << " u_imageIn1;\n"
2856 << "layout (binding = 2, " << getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify)) << ") writeonly uniform " << getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType) << " u_imageOut0;\n"
2857 << "layout (binding = 3, " << getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify)) << ") writeonly uniform " << getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType) << " u_imageOut1;\n"
2858 << "\n"
2859 << "void main (void)\n"
2860 << "{\n"
2861 << pos0Definitions[imageTypeIndex]
2862 << " imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2863 << "\n"
2864 << pos1Definitions[imageTypeIndex]
2865 << " imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2866 << "}\n";
2867
2868 programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2869 }
2870
2871 break;
2872 }
2873
2874 default:
2875 DE_ASSERT(false);
2876 }
2877 }
2878
checkSupport(Context & context) const2879 void TexelViewCompatibleCase::checkSupport (Context& context) const
2880 {
2881 const VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
2882 const InstanceInterface& vk = context.getInstanceInterface();
2883
2884 context.requireDeviceFunctionality("VK_KHR_maintenance2");
2885
2886 {
2887 VkImageFormatProperties imageFormatProperties;
2888
2889 if (vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2890 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2891 m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties) == VK_ERROR_FORMAT_NOT_SUPPORTED)
2892 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2893
2894 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2895 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2896 m_parameters.compressedImageUsage,
2897 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
2898 &imageFormatProperties))
2899 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2900 }
2901
2902 {
2903 const VkPhysicalDeviceFeatures physicalDeviceFeatures = getPhysicalDeviceFeatures(vk, physicalDevice);
2904
2905 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2906 !physicalDeviceFeatures.textureCompressionBC)
2907 TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2908
2909 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2910 !physicalDeviceFeatures.textureCompressionETC2)
2911 TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2912
2913 if (m_parameters.formatIsASTC &&
2914 !physicalDeviceFeatures.textureCompressionASTC_LDR)
2915 TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2916
2917 if (m_parameters.uncompressedImageUsage & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)
2918 {
2919 const VkFormatProperties p = getPhysicalDeviceFormatProperties(vk, physicalDevice, m_parameters.formatUncompressed);
2920 if ((p.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) == 0)
2921 TCU_THROW(NotSupportedError, "Storage view format not supported");
2922 }
2923 }
2924 }
2925
createInstance(Context & context) const2926 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2927 {
2928 if (!m_parameters.useMipmaps)
2929 DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size) == 1u);
2930
2931 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() > 0u);
2932 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() > 0u);
2933
2934 switch (m_parameters.shader)
2935 {
2936 case SHADER_TYPE_COMPUTE:
2937 {
2938 switch (m_parameters.operation)
2939 {
2940 case OPERATION_IMAGE_LOAD:
2941 case OPERATION_TEXEL_FETCH:
2942 case OPERATION_TEXTURE:
2943 return new BasicComputeTestInstance(context, m_parameters);
2944 case OPERATION_IMAGE_STORE:
2945 return new ImageStoreComputeTestInstance(context, m_parameters);
2946 default:
2947 TCU_THROW(InternalError, "Impossible");
2948 }
2949 }
2950
2951 case SHADER_TYPE_FRAGMENT:
2952 {
2953 switch (m_parameters.operation)
2954 {
2955 case OPERATION_ATTACHMENT_READ:
2956 case OPERATION_ATTACHMENT_WRITE:
2957 return new GraphicsAttachmentsTestInstance(context, m_parameters);
2958
2959 case OPERATION_TEXTURE_READ:
2960 case OPERATION_TEXTURE_WRITE:
2961 return new GraphicsTextureTestInstance(context, m_parameters);
2962
2963 default:
2964 TCU_THROW(InternalError, "Impossible");
2965 }
2966 }
2967
2968 default:
2969 TCU_THROW(InternalError, "Impossible");
2970 }
2971 }
2972
2973 } // anonymous ns
2974
getUnniceResolution(const VkFormat format,const deUint32 layers)2975 static tcu::UVec3 getUnniceResolution (const VkFormat format, const deUint32 layers)
2976 {
2977 const deUint32 unniceMipmapTextureSize[] = { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2978 const deUint32 baseTextureWidth = unniceMipmapTextureSize[getBlockWidth(format)];
2979 const deUint32 baseTextureHeight = unniceMipmapTextureSize[getBlockHeight(format)];
2980 const deUint32 baseTextureWidthLevels = deLog2Floor32(baseTextureWidth);
2981 const deUint32 baseTextureHeightLevels = deLog2Floor32(baseTextureHeight);
2982 const deUint32 widthMultiplier = (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2983 const deUint32 heightMultiplier = (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2984 const deUint32 width = baseTextureWidth * widthMultiplier;
2985 const deUint32 height = baseTextureHeight * heightMultiplier;
2986
2987 // Number of levels should be same on both axises
2988 DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
2989
2990 return tcu::UVec3(width, height, layers);
2991 }
2992
createImageCompressionTranscodingTests(tcu::TestContext & testCtx)2993 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
2994 {
2995 struct FormatsArray
2996 {
2997 const VkFormat* formats;
2998 deUint32 count;
2999 };
3000
3001 const bool mipmapness[] =
3002 {
3003 false,
3004 true,
3005 };
3006
3007 const std::string pipelineName[SHADER_TYPE_LAST] =
3008 {
3009 "compute",
3010 "graphic",
3011 };
3012
3013 const std::string mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)] =
3014 {
3015 "basic",
3016 "extended",
3017 };
3018
3019 const std::string operationName[OPERATION_LAST] =
3020 {
3021 "image_load",
3022 "texel_fetch",
3023 "texture",
3024 "image_store",
3025 "attachment_read",
3026 "attachment_write",
3027 "texture_read",
3028 "texture_write",
3029 };
3030
3031 struct ImageTypeName
3032 {
3033 ImageType type;
3034 std::string name;
3035 };
3036 ImageTypeName imageTypes[] =
3037 {
3038 { IMAGE_TYPE_1D, "1d_image" },
3039 { IMAGE_TYPE_2D, "2d_image" },
3040 { IMAGE_TYPE_3D, "3d_image" },
3041 };
3042
3043 const VkImageUsageFlags baseImageUsageFlagSet = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3044 const VkImageUsageFlags compressedImageUsageFlags[OPERATION_LAST] =
3045 {
3046 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), // "image_load"
3047 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texel_fetch"
3048 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture"
3049 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "image_store"
3050 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), // "attachment_read"
3051 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT), // "attachment_write"
3052 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), // "texture_read"
3053 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture_write"
3054 };
3055
3056 const VkImageUsageFlags compressedImageViewUsageFlags[OPERATION_LAST] =
3057 {
3058 compressedImageUsageFlags[0], //"image_load"
3059 compressedImageUsageFlags[1], //"texel_fetch"
3060 compressedImageUsageFlags[2], //"texture"
3061 compressedImageUsageFlags[3], //"image_store"
3062 compressedImageUsageFlags[4], //"attachment_read"
3063 compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, //"attachment_write"
3064 compressedImageUsageFlags[6], //"texture_read"
3065 compressedImageUsageFlags[7], //"texture_write"
3066 };
3067
3068 const VkImageUsageFlags uncompressedImageUsageFlags[OPERATION_LAST] =
3069 {
3070 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), //"image_load"
3071 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texel_fetch"
3072 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texture"
3073 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"image_store"
3074 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"attachment_read"
3075 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT), //"attachment_write"
3076 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_read"
3077 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_write"
3078 };
3079
3080 const VkFormat compressedFormats64bit[] =
3081 {
3082 VK_FORMAT_BC1_RGB_UNORM_BLOCK,
3083 VK_FORMAT_BC1_RGB_SRGB_BLOCK,
3084 VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
3085 VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
3086 VK_FORMAT_BC4_UNORM_BLOCK,
3087 VK_FORMAT_BC4_SNORM_BLOCK,
3088 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
3089 VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
3090 VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
3091 VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
3092 VK_FORMAT_EAC_R11_UNORM_BLOCK,
3093 VK_FORMAT_EAC_R11_SNORM_BLOCK,
3094 };
3095
3096 const VkFormat compressedFormats128bit[] =
3097 {
3098 VK_FORMAT_BC2_UNORM_BLOCK,
3099 VK_FORMAT_BC2_SRGB_BLOCK,
3100 VK_FORMAT_BC3_UNORM_BLOCK,
3101 VK_FORMAT_BC3_SRGB_BLOCK,
3102 VK_FORMAT_BC5_UNORM_BLOCK,
3103 VK_FORMAT_BC5_SNORM_BLOCK,
3104 VK_FORMAT_BC6H_UFLOAT_BLOCK,
3105 VK_FORMAT_BC6H_SFLOAT_BLOCK,
3106 VK_FORMAT_BC7_UNORM_BLOCK,
3107 VK_FORMAT_BC7_SRGB_BLOCK,
3108 VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
3109 VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
3110 VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
3111 VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
3112 VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
3113 VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
3114 VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
3115 VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
3116 VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
3117 VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
3118 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
3119 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
3120 VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
3121 VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
3122 VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
3123 VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
3124 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
3125 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
3126 VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
3127 VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
3128 VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
3129 VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
3130 VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
3131 VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
3132 VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
3133 VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
3134 VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
3135 VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
3136 VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
3137 VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
3138 VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
3139 VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
3140 };
3141
3142 const VkFormat uncompressedFormats64bit[] =
3143 {
3144 VK_FORMAT_R16G16B16A16_UNORM,
3145 VK_FORMAT_R16G16B16A16_SNORM,
3146 VK_FORMAT_R16G16B16A16_USCALED,
3147 VK_FORMAT_R16G16B16A16_SSCALED,
3148 VK_FORMAT_R16G16B16A16_UINT,
3149 VK_FORMAT_R16G16B16A16_SINT,
3150 //VK_FORMAT_R16G16B16A16_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3151 VK_FORMAT_R32G32_UINT,
3152 VK_FORMAT_R32G32_SINT,
3153 //VK_FORMAT_R32G32_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3154 //VK_FORMAT_R64_UINT, remove from the test it couldn't be used
3155 //VK_FORMAT_R64_SINT, remove from the test it couldn't be used
3156 //VK_FORMAT_R64_SFLOAT, remove from the test it couldn't be used
3157 };
3158
3159 const VkFormat uncompressedFormats128bit[] =
3160 {
3161 VK_FORMAT_R32G32B32A32_UINT,
3162 VK_FORMAT_R32G32B32A32_SINT,
3163 //VK_FORMAT_R32G32B32A32_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3164 //VK_FORMAT_R64G64_UINT, remove from the test it couldn't be used
3165 //VK_FORMAT_R64G64_SINT, remove from the test it couldn't be used
3166 //VK_FORMAT_R64G64_SFLOAT, remove from the test it couldn't be used
3167 };
3168
3169 const FormatsArray formatsCompressedSets[] =
3170 {
3171 {
3172 compressedFormats64bit,
3173 DE_LENGTH_OF_ARRAY(compressedFormats64bit)
3174 },
3175 {
3176 compressedFormats128bit,
3177 DE_LENGTH_OF_ARRAY(compressedFormats128bit)
3178 },
3179 };
3180
3181 // Uncompressed formats - floating point formats should not be used in these
3182 // tests as they cannot be relied upon to preserve all possible values in the
3183 // underlying texture data. Refer to the note under the 'VkImageViewCreateInfo'
3184 // section of the specification.
3185 const FormatsArray formatsUncompressedSets[] =
3186 {
3187 {
3188 uncompressedFormats64bit,
3189 DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
3190 },
3191 {
3192 uncompressedFormats128bit,
3193 DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
3194 },
3195 };
3196
3197 DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
3198
3199 MovePtr<tcu::TestCaseGroup> texelViewCompatibleTests (new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
3200
3201 for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
3202 {
3203 MovePtr<tcu::TestCaseGroup> pipelineTypeGroup (new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
3204
3205 for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
3206 {
3207 const bool mipmapTest = mipmapness[mipmapTestNdx];
3208
3209 MovePtr<tcu::TestCaseGroup> mipmapTypeGroup (new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
3210
3211 for (int imageTypeNdx = 0; imageTypeNdx < DE_LENGTH_OF_ARRAY(imageTypes); imageTypeNdx++)
3212 {
3213 MovePtr<tcu::TestCaseGroup> imageTypeGroup (new tcu::TestCaseGroup(testCtx, imageTypes[imageTypeNdx].name.c_str(), ""));
3214 ImageType imageType = imageTypes[imageTypeNdx].type;
3215
3216 for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
3217 {
3218 if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
3219 continue;
3220
3221 if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
3222 continue;
3223
3224 if (imageType == IMAGE_TYPE_3D &&
3225 (operationNdx == OPERATION_ATTACHMENT_READ || operationNdx == OPERATION_ATTACHMENT_WRITE))
3226 continue;
3227
3228 MovePtr<tcu::TestCaseGroup> imageOperationGroup (new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
3229
3230 deUint32 depth = 1u + 2 * (imageType == IMAGE_TYPE_3D);
3231 deUint32 imageCount = 2u + (operationNdx == OPERATION_IMAGE_STORE);
3232
3233 // Iterate through bitness groups (64 bit, 128 bit, etc)
3234 for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
3235 {
3236 for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
3237 {
3238 const VkFormat formatCompressed = formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
3239 const std::string compressedFormatGroupName = getFormatShortString(formatCompressed);
3240 MovePtr<tcu::TestCaseGroup> compressedFormatGroup (new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
3241
3242 for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
3243 {
3244 const VkFormat formatUncompressed = formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
3245 const std::string uncompressedFormatGroupName = getFormatShortString(formatUncompressed);
3246
3247 const TestParameters parameters =
3248 {
3249 static_cast<Operation>(operationNdx),
3250 static_cast<ShaderType>(shaderType),
3251 mipmapTest ? getUnniceResolution(formatCompressed, 1u) : UVec3(64u, 64u, depth),
3252 1u + 2u * mipmapTest * (imageType != IMAGE_TYPE_3D), // 1 or 3 if mipmapTest is true but image is not 3d
3253 imageType,
3254 formatCompressed,
3255 formatUncompressed,
3256 imageCount,
3257 compressedImageUsageFlags[operationNdx],
3258 compressedImageViewUsageFlags[operationNdx],
3259 uncompressedImageUsageFlags[operationNdx],
3260 mipmapTest,
3261 VK_FORMAT_R8G8B8A8_UNORM,
3262 FormatIsASTC(formatCompressed)
3263 };
3264
3265 compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
3266 }
3267
3268 imageOperationGroup->addChild(compressedFormatGroup.release());
3269 }
3270 }
3271
3272 imageTypeGroup->addChild(imageOperationGroup.release());
3273 }
3274
3275 mipmapTypeGroup->addChild(imageTypeGroup.release());
3276 }
3277
3278 pipelineTypeGroup->addChild(mipmapTypeGroup.release());
3279 }
3280
3281 texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
3282 }
3283
3284 return texelViewCompatibleTests.release();
3285 }
3286
3287 } // image
3288 } // vkt
3289