1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2017 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file vktImageCompressionTranscodingSupport.cpp
21 * \brief Compression transcoding support
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktImageCompressionTranscodingSupport.hpp"
25 #include "vktImageLoadStoreUtil.hpp"
26
27 #include "deUniquePtr.hpp"
28 #include "deStringUtil.hpp"
29 #include "deSharedPtr.hpp"
30 #include "deRandom.hpp"
31
32 #include "vktTestCaseUtil.hpp"
33 #include "vkPrograms.hpp"
34 #include "vkImageUtil.hpp"
35 #include "vkBarrierUtil.hpp"
36 #include "vktImageTestsUtil.hpp"
37 #include "vkBuilderUtil.hpp"
38 #include "vkRef.hpp"
39 #include "vkRefUtil.hpp"
40 #include "vkTypeUtil.hpp"
41 #include "vkQueryUtil.hpp"
42 #include "vkCmdUtil.hpp"
43 #include "vkObjUtil.hpp"
44
45 #include "tcuTextureUtil.hpp"
46 #include "tcuTexture.hpp"
47 #include "tcuCompressedTexture.hpp"
48 #include "tcuVectorType.hpp"
49 #include "tcuResource.hpp"
50 #include "tcuImageIO.hpp"
51 #include "tcuImageCompare.hpp"
52 #include "tcuTestLog.hpp"
53 #include "tcuRGBA.hpp"
54 #include "tcuSurface.hpp"
55
56 #include <vector>
57
58 using namespace vk;
59 namespace vkt
60 {
61 namespace image
62 {
63 namespace
64 {
65 using std::string;
66 using std::vector;
67 using tcu::TestContext;
68 using tcu::TestStatus;
69 using tcu::UVec3;
70 using tcu::IVec3;
71 using tcu::CompressedTexFormat;
72 using tcu::CompressedTexture;
73 using tcu::Resource;
74 using tcu::Archive;
75 using tcu::ConstPixelBufferAccess;
76 using de::MovePtr;
77 using de::SharedPtr;
78 using de::Random;
79
80 typedef SharedPtr<MovePtr<Image> > ImageSp;
81 typedef SharedPtr<Move<VkImageView> > ImageViewSp;
82 typedef SharedPtr<Move<VkDescriptorSet> > SharedVkDescriptorSet;
83
84 enum ShaderType
85 {
86 SHADER_TYPE_COMPUTE,
87 SHADER_TYPE_FRAGMENT,
88 SHADER_TYPE_LAST
89 };
90
91 enum Operation
92 {
93 OPERATION_IMAGE_LOAD,
94 OPERATION_TEXEL_FETCH,
95 OPERATION_TEXTURE,
96 OPERATION_IMAGE_STORE,
97 OPERATION_ATTACHMENT_READ,
98 OPERATION_ATTACHMENT_WRITE,
99 OPERATION_TEXTURE_READ,
100 OPERATION_TEXTURE_WRITE,
101 OPERATION_LAST
102 };
103
104 struct TestParameters
105 {
106 Operation operation;
107 ShaderType shader;
108 UVec3 size;
109 deUint32 layers;
110 ImageType imageType;
111 VkFormat formatCompressed;
112 VkFormat formatUncompressed;
113 deUint32 imagesCount;
114 VkImageUsageFlags compressedImageUsage;
115 VkImageUsageFlags compressedImageViewUsage;
116 VkImageUsageFlags uncompressedImageUsage;
117 bool useMipmaps;
118 VkFormat formatForVerify;
119 bool formatIsASTC;
120 };
121
122 template<typename T>
makeVkSharedPtr(Move<T> move)123 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
124 {
125 return SharedPtr<Move<T> >(new Move<T>(move));
126 }
127
128 template<typename T>
makeVkSharedPtr(MovePtr<T> movePtr)129 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
130 {
131 return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
132 }
133
134 const deUint32 SINGLE_LEVEL = 1u;
135 const deUint32 SINGLE_LAYER = 1u;
136
137 enum BinaryCompareMode
138 {
139 COMPARE_MODE_NORMAL,
140 COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING,
141 };
142
143 enum BinaryCompareResult
144 {
145 COMPARE_RESULT_OK,
146 COMPARE_RESULT_ASTC_QUALITY_WARNING,
147 COMPARE_RESULT_FAILED,
148 };
149
150 const deUint32 ASTC_LDR_ERROR_COLOUR = 0xFFFF00FF;
151 const deUint32 ASTC_HDR_ERROR_COLOUR = 0x00000000;
152
BinaryCompare(const void * reference,const void * result,VkDeviceSize sizeInBytes,VkFormat formatForVerify,BinaryCompareMode mode)153 static BinaryCompareResult BinaryCompare(const void *reference,
154 const void *result,
155 VkDeviceSize sizeInBytes,
156 VkFormat formatForVerify,
157 BinaryCompareMode mode)
158 {
159 DE_UNREF(formatForVerify);
160
161 // Compare quickly using deMemCmp
162 if (deMemCmp(reference, result, (size_t)sizeInBytes) == 0)
163 {
164 return COMPARE_RESULT_OK;
165 }
166 // If deMemCmp indicated a mismatch, we can re-check with a manual comparison of
167 // the ref and res images that allows for ASTC error colour mismatches if the ASTC
168 // comparison mode was selected. This slows down the affected ASTC tests if you
169 // didn't pass in the first comparison, but means in the general case the
170 // comparion is still fast.
171 else if (mode == COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
172 {
173 bool bWarn = false;
174 bool bFail = false;
175 const deUint32 *pui32RefVal = (deUint32*)reference;
176 const deUint32 *pui32ResVal = (deUint32*)result;
177
178 DE_ASSERT(formatForVerify == VK_FORMAT_R8G8B8A8_UNORM);
179 size_t numPixels = (size_t)(sizeInBytes / 4) /* bytes */;
180 for (size_t i = 0; i < numPixels; i++)
181 {
182 const deUint32 ref = *pui32RefVal++;
183 const deUint32 res = *pui32ResVal++;
184
185 if (ref != res)
186 {
187 // QualityWarning !1231: If the astc pixel was the ASTC LDR error colour
188 // and the result image has the HDR error colour (or vice versa as the test
189 // cases below sometimes reverse the operands) then issue a quality warning
190 // instead of a failure.
191 if ((ref == ASTC_LDR_ERROR_COLOUR && res == ASTC_HDR_ERROR_COLOUR) ||
192 (ref == ASTC_HDR_ERROR_COLOUR && res == ASTC_LDR_ERROR_COLOUR))
193 {
194 bWarn = true;
195 }
196 else
197 {
198 bFail = true;
199 }
200 }
201 }
202
203 if (!bFail)
204 {
205 return (bWarn)
206 ? (COMPARE_RESULT_ASTC_QUALITY_WARNING)
207 : (COMPARE_RESULT_OK);
208 }
209 }
210
211 return COMPARE_RESULT_FAILED;
212 }
213
FormatIsASTC(VkFormat format)214 static bool FormatIsASTC(VkFormat format)
215 {
216 return deInRange32(format, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK);
217 }
218
TestStatusASTCQualityWarning()219 static TestStatus TestStatusASTCQualityWarning()
220 {
221 return TestStatus(QP_TEST_RESULT_QUALITY_WARNING, "ASTC HDR error colour output instead of LDR error colour");
222 }
223
224 class BasicTranscodingTestInstance : public TestInstance
225 {
226 public:
227 BasicTranscodingTestInstance (Context& context,
228 const TestParameters& parameters);
229 virtual TestStatus iterate (void) = 0;
230 protected:
231 void generateData (deUint8* toFill,
232 const size_t size,
233 const VkFormat format,
234 const deUint32 layer = 0u,
235 const deUint32 level = 0u);
236 deUint32 getLevelCount ();
237 deUint32 getLayerCount ();
238 UVec3 getLayerDims ();
239 vector<UVec3> getMipLevelSizes (UVec3 baseSize);
240 vector<UVec3> getCompressedMipLevelSizes (const VkFormat compressedFormat,
241 const vector<UVec3>& uncompressedSizes);
242
243 const TestParameters m_parameters;
244 const deUint32 m_blockWidth;
245 const deUint32 m_blockHeight;
246 const deUint32 m_levelCount;
247 const UVec3 m_layerSize;
248
249 // Detected error colour mismatch while verifying image. Output
250 // the ASTC quality warning instead of a pass
251 bool m_bASTCErrorColourMismatch;
252
253 private:
254 deUint32 findMipMapLevelCount ();
255 };
256
findMipMapLevelCount()257 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
258 {
259 deUint32 levelCount = 1;
260
261 // We cannot use mipmap levels which have resolution below block size.
262 // Reduce number of mipmap levels
263 if (m_parameters.useMipmaps)
264 {
265 deUint32 w = m_parameters.size.x();
266 deUint32 h = m_parameters.size.y();
267
268 DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
269
270 while (w > m_blockWidth && h > m_blockHeight)
271 {
272 w >>= 1;
273 h >>= 1;
274
275 if (w > m_blockWidth && h > m_blockHeight)
276 levelCount++;
277 }
278
279 DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
280 DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
281 }
282
283 return levelCount;
284 }
285
BasicTranscodingTestInstance(Context & context,const TestParameters & parameters)286 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
287 : TestInstance (context)
288 , m_parameters (parameters)
289 , m_blockWidth (getBlockWidth(m_parameters.formatCompressed))
290 , m_blockHeight (getBlockHeight(m_parameters.formatCompressed))
291 , m_levelCount (findMipMapLevelCount())
292 , m_layerSize (getLayerSize(m_parameters.imageType, m_parameters.size))
293 , m_bASTCErrorColourMismatch(false)
294 {
295 DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
296 }
297
getLevelCount()298 deUint32 BasicTranscodingTestInstance::getLevelCount()
299 {
300 return m_levelCount;
301 }
302
getLayerCount()303 deUint32 BasicTranscodingTestInstance::getLayerCount()
304 {
305 return m_parameters.layers;
306 }
307
getLayerDims()308 UVec3 BasicTranscodingTestInstance::getLayerDims()
309 {
310 return m_layerSize;
311 }
312
getMipLevelSizes(UVec3 baseSize)313 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
314 {
315 vector<UVec3> levelSizes;
316 const deUint32 levelCount = getLevelCount();
317
318 baseSize.z() = 1u;
319
320 levelSizes.push_back(baseSize);
321
322 if (m_parameters.imageType == IMAGE_TYPE_1D)
323 {
324 baseSize.y() = 1u;
325
326 while (levelSizes.size() < levelCount && (baseSize.x() != 1))
327 {
328 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
329 levelSizes.push_back(baseSize);
330 }
331 }
332 else
333 {
334 while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
335 {
336 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
337 baseSize.y() = deMax32(baseSize.y() >> 1, 1);
338 levelSizes.push_back(baseSize);
339 }
340 }
341
342 DE_ASSERT(levelSizes.size() == getLevelCount());
343
344 return levelSizes;
345 }
346
getCompressedMipLevelSizes(const VkFormat compressedFormat,const vector<UVec3> & uncompressedSizes)347 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
348 {
349 vector<UVec3> levelSizes;
350 vector<UVec3>::const_iterator it;
351
352 for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
353 levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
354
355 return levelSizes;
356 }
357
generateData(deUint8 * toFill,const size_t size,const VkFormat format,const deUint32 layer,const deUint32 level)358 void BasicTranscodingTestInstance::generateData (deUint8* toFill,
359 const size_t size,
360 const VkFormat format,
361 const deUint32 layer,
362 const deUint32 level)
363 {
364 const deUint8 pattern[] =
365 {
366 // 64-bit values
367 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
368 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
369 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
370 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
371 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
372 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
373 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
374 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
375 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
376 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
377 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Positive infinity
378 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Negative infinity
379 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
380 0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
381 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
382 0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
383 0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
384 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
385 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
386 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
387 // 32-bit values
388 0x7F, 0x80, 0x00, 0x00, // Positive infinity
389 0xFF, 0x80, 0x00, 0x00, // Negative infinity
390 0x7F, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
391 0x7F, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
392 0xFF, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
393 0xFF, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
394 0x7F, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
395 0x7F, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
396 0xFF, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
397 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
398 0xAA, 0xAA, 0xAA, 0xAA,
399 0x55, 0x55, 0x55, 0x55,
400 };
401
402 deUint8* start = toFill;
403 size_t sizeToRnd = size;
404
405 // Pattern part
406 if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
407 {
408 // Rotated pattern
409 for (size_t i = 0; i < sizeof(pattern); i++)
410 start[sizeof(pattern) - i - 1] = pattern[i];
411
412 start += sizeof(pattern);
413 sizeToRnd -= sizeof(pattern);
414
415 // Direct pattern
416 deMemcpy(start, pattern, sizeof(pattern));
417
418 start += sizeof(pattern);
419 sizeToRnd -= sizeof(pattern);
420 }
421
422 // Random part
423 {
424 DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
425
426 deUint32* start32 = reinterpret_cast<deUint32*>(start);
427 size_t sizeToRnd32 = sizeToRnd / sizeof(deUint32);
428 deUint32 seed = (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
429 Random rnd (seed);
430
431 for (size_t i = 0; i < sizeToRnd32; i++)
432 start32[i] = rnd.getUint32();
433 }
434
435 {
436 // Remove certain values that may not be preserved based on the uncompressed view format
437 if (isSnormFormat(m_parameters.formatUncompressed))
438 {
439 for (size_t i = 0; i < size; i += 2)
440 {
441 // SNORM fix: due to write operation in SNORM format
442 // replaces 0x00 0x80 to 0x01 0x80
443 if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
444 toFill[i+1] = 0x81;
445 }
446 }
447 else if (isFloatFormat(m_parameters.formatUncompressed))
448 {
449 tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
450
451 if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
452 {
453 for (size_t i = 0; i < size; i += 2)
454 {
455 // HALF_FLOAT fix: remove INF and NaN
456 if ((toFill[i+1] & 0x7C) == 0x7C)
457 toFill[i+1] = 0x00;
458 }
459 }
460 else if (textureFormat.type == tcu::TextureFormat::FLOAT)
461 {
462 for (size_t i = 0; i < size; i += 4)
463 {
464 // HALF_FLOAT fix: remove INF and NaN
465 if ((toFill[i+1] & 0x7C) == 0x7C)
466 toFill[i+1] = 0x00;
467 }
468
469 for (size_t i = 0; i < size; i += 4)
470 {
471 // FLOAT fix: remove INF, NaN, and denorm
472 // Little endian fix
473 if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
474 toFill[i+3] = 0x01;
475 // Big endian fix
476 if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
477 toFill[i+0] = 0x01;
478 }
479 }
480 }
481 }
482 }
483
484 class BasicComputeTestInstance : public BasicTranscodingTestInstance
485 {
486 public:
487 BasicComputeTestInstance (Context& context,
488 const TestParameters& parameters);
489 TestStatus iterate (void);
490 protected:
491 struct ImageData
492 {
getImagesCountvkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData493 deUint32 getImagesCount (void) { return static_cast<deUint32>(images.size()); }
getImageViewCountvkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData494 deUint32 getImageViewCount (void) { return static_cast<deUint32>(imagesViews.size()); }
getImageInfoCountvkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData495 deUint32 getImageInfoCount (void) { return static_cast<deUint32>(imagesInfos.size()); }
getImagevkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData496 VkImage getImage (const deUint32 ndx) { return **images[ndx]->get(); }
getImageViewvkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData497 VkImageView getImageView (const deUint32 ndx) { return **imagesViews[ndx]; }
getImageInfovkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData498 VkImageCreateInfo getImageInfo (const deUint32 ndx) { return imagesInfos[ndx]; }
addImagevkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData499 void addImage (MovePtr<Image> image) { images.push_back(makeVkSharedPtr(image)); }
addImageViewvkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData500 void addImageView (Move<VkImageView> imageView) { imagesViews.push_back(makeVkSharedPtr(imageView));}
addImageInfovkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData501 void addImageInfo (const VkImageCreateInfo imageInfo) { imagesInfos.push_back(imageInfo); }
resetViewsvkt::image::__anon32f77d140111::BasicComputeTestInstance::ImageData502 void resetViews () { imagesViews.clear(); }
503 private:
504 vector<ImageSp> images;
505 vector<ImageViewSp> imagesViews;
506 vector<VkImageCreateInfo> imagesInfos;
507 };
508 void copyDataToImage (const VkCommandBuffer& cmdBuffer,
509 ImageData& imageData,
510 const vector<UVec3>& mipMapSizes,
511 const bool isCompressed);
512 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
513 const VkDescriptorSetLayout& descriptorSetLayout,
514 const VkDescriptorPool& descriptorPool,
515 vector<ImageData>& imageData);
516 bool copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
517 const VkImage& uncompressed,
518 const VkDeviceSize offset,
519 const UVec3& size);
520 void descriptorSetUpdate (VkDescriptorSet descriptorSet,
521 const VkDescriptorImageInfo* descriptorImageInfos);
522 void createImageInfos (ImageData& imageData,
523 const vector<UVec3>& mipMapSizes,
524 const bool isCompressed);
525 bool decompressImage (const VkCommandBuffer& cmdBuffer,
526 vector<ImageData>& imageData,
527 const vector<UVec3>& mipMapSizes);
528 vector<deUint8> m_data;
529 };
530
531
BasicComputeTestInstance(Context & context,const TestParameters & parameters)532 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
533 :BasicTranscodingTestInstance (context, parameters)
534 {
535 }
536
iterate(void)537 TestStatus BasicComputeTestInstance::iterate (void)
538 {
539 const DeviceInterface& vk = m_context.getDeviceInterface();
540 const VkDevice device = m_context.getDevice();
541 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
542 Allocator& allocator = m_context.getDefaultAllocator();
543 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
544 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
545 const UVec3 fullSize (m_parameters.size.x(), m_parameters.size.y(), 1);
546 const vector<UVec3> mipMapSizes = m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, fullSize);
547 vector<ImageData> imageData (m_parameters.imagesCount);
548 const deUint32 compressedNdx = 0u;
549 const deUint32 resultImageNdx = m_parameters.imagesCount -1u;
550
551 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
552 {
553 const bool isCompressed = compressedNdx == imageNdx ? true : false;
554 createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
555 for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
556 {
557 imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
558 if (isCompressed)
559 {
560 const VkImageViewUsageCreateInfo imageViewUsageKHR =
561 {
562 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
563 DE_NULL, //const void* pNext;
564 m_parameters.compressedImageUsage, //VkImageUsageFlags usage;
565 };
566 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
567 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
568 {
569 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
570 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
571 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
572 &imageViewUsageKHR));
573 }
574 }
575 else
576 {
577 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
578 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
579 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
580 }
581 }
582 }
583
584 {
585 size_t size = 0ull;
586 for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
587 {
588 size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
589 }
590 m_data.resize(size);
591 generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
592 }
593
594 switch(m_parameters.operation)
595 {
596 case OPERATION_IMAGE_LOAD:
597 case OPERATION_TEXEL_FETCH:
598 case OPERATION_TEXTURE:
599 copyDataToImage(*cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
600 break;
601 case OPERATION_IMAGE_STORE:
602 copyDataToImage(*cmdBuffer, imageData[1], mipMapSizes, false);
603 break;
604 default:
605 DE_ASSERT(false);
606 break;
607 }
608
609 {
610 Move<VkDescriptorSetLayout> descriptorSetLayout;
611 Move<VkDescriptorPool> descriptorPool;
612
613 DescriptorSetLayoutBuilder descriptorSetLayoutBuilder;
614 DescriptorPoolBuilder descriptorPoolBuilder;
615 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
616 {
617 switch(m_parameters.operation)
618 {
619 case OPERATION_IMAGE_LOAD:
620 case OPERATION_IMAGE_STORE:
621 descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
622 descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
623 break;
624 case OPERATION_TEXEL_FETCH:
625 case OPERATION_TEXTURE:
626 descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
627 descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
628 break;
629 default:
630 DE_ASSERT(false);
631 break;
632 }
633 }
634 descriptorSetLayout = descriptorSetLayoutBuilder.build(vk, device);
635 descriptorPool = descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
636 executeShader(*cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
637
638 {
639 VkDeviceSize offset = 0ull;
640 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
641 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
642 {
643 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
644 const UVec3 size = UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
645 imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
646 imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
647 if (!copyResultAndCompare(*cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
648 return TestStatus::fail("Fail");
649 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
650 }
651 }
652 }
653 if (!decompressImage(*cmdBuffer, imageData, mipMapSizes))
654 return TestStatus::fail("Fail");
655
656 if (m_bASTCErrorColourMismatch)
657 {
658 DE_ASSERT(m_parameters.formatIsASTC);
659 return TestStatusASTCQualityWarning();
660 }
661
662 return TestStatus::pass("Pass");
663 }
664
copyDataToImage(const VkCommandBuffer & cmdBuffer,ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)665 void BasicComputeTestInstance::copyDataToImage (const VkCommandBuffer& cmdBuffer,
666 ImageData& imageData,
667 const vector<UVec3>& mipMapSizes,
668 const bool isCompressed)
669 {
670 const DeviceInterface& vk = m_context.getDeviceInterface();
671 const VkDevice device = m_context.getDevice();
672 const VkQueue queue = m_context.getUniversalQueue();
673 Allocator& allocator = m_context.getDefaultAllocator();
674
675 Buffer imageBuffer (vk, device, allocator,
676 makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
677 MemoryRequirement::HostVisible);
678 VkDeviceSize offset = 0ull;
679 {
680 const Allocation& alloc = imageBuffer.getAllocation();
681 deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
682 flushAlloc(vk, device, alloc);
683 }
684
685 beginCommandBuffer(vk, cmdBuffer);
686 const VkImageSubresourceRange subresourceRange =
687 {
688 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
689 0u, //deUint32 baseMipLevel
690 imageData.getImageInfo(0u).mipLevels, //deUint32 levelCount
691 0u, //deUint32 baseArrayLayer
692 imageData.getImageInfo(0u).arrayLayers //deUint32 layerCount
693 };
694
695 for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
696 {
697 const VkImageMemoryBarrier preCopyImageBarrier = makeImageMemoryBarrier(
698 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
699 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
700 imageData.getImage(imageNdx), subresourceRange);
701
702 const VkBufferMemoryBarrier FlushHostCopyBarrier = makeBufferMemoryBarrier(
703 VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
704 imageBuffer.get(), 0ull, m_data.size());
705
706 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
707 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
708
709 for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
710 {
711 const VkExtent3D imageExtent = isCompressed ?
712 makeExtent3D(mipMapSizes[mipNdx]) :
713 imageData.getImageInfo(imageNdx).extent;
714 const VkBufferImageCopy copyRegion =
715 {
716 offset, //VkDeviceSize bufferOffset;
717 0u, //deUint32 bufferRowLength;
718 0u, //deUint32 bufferImageHeight;
719 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers), //VkImageSubresourceLayers imageSubresource;
720 makeOffset3D(0, 0, 0), //VkOffset3D imageOffset;
721 imageExtent, //VkExtent3D imageExtent;
722 };
723
724 vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
725 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
726 UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
727 imageData.getImageInfo(imageNdx).arrayLayers;
728 }
729 }
730 endCommandBuffer(vk, cmdBuffer);
731 submitCommandsAndWait(vk, device, queue, cmdBuffer);
732 }
733
executeShader(const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)734 void BasicComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
735 const VkDescriptorSetLayout& descriptorSetLayout,
736 const VkDescriptorPool& descriptorPool,
737 vector<ImageData>& imageData)
738 {
739 const DeviceInterface& vk = m_context.getDeviceInterface();
740 const VkDevice device = m_context.getDevice();
741 const VkQueue queue = m_context.getUniversalQueue();
742 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
743 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
744 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
745 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
746 Move<VkSampler> sampler;
747 {
748 const VkSamplerCreateInfo createInfo =
749 {
750 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
751 DE_NULL, //const void* pNext;
752 0u, //VkSamplerCreateFlags flags;
753 VK_FILTER_NEAREST, //VkFilter magFilter;
754 VK_FILTER_NEAREST, //VkFilter minFilter;
755 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
756 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
757 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
758 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
759 0.0f, //float mipLodBias;
760 VK_FALSE, //VkBool32 anisotropyEnable;
761 1.0f, //float maxAnisotropy;
762 VK_FALSE, //VkBool32 compareEnable;
763 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
764 0.0f, //float minLod;
765 0.0f, //float maxLod;
766 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
767 VK_FALSE, //VkBool32 unnormalizedCoordinates;
768 };
769 sampler = createSampler(vk, device, &createInfo);
770 }
771
772 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
773 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
774 {
775 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
776 for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
777 {
778 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
779 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
780 }
781 }
782
783 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
784 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
785
786 beginCommandBuffer(vk, cmdBuffer);
787 {
788 const VkImageSubresourceRange compressedRange =
789 {
790 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
791 0u, //deUint32 baseMipLevel
792 imageData[0].getImageInfo(0u).mipLevels, //deUint32 levelCount
793 0u, //deUint32 baseArrayLayer
794 imageData[0].getImageInfo(0u).arrayLayers //deUint32 layerCount
795 };
796 const VkImageSubresourceRange uncompressedRange =
797 {
798 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
799 0u, //deUint32 baseMipLevel
800 1u, //deUint32 levelCount
801 0u, //deUint32 baseArrayLayer
802 1u //deUint32 layerCount
803 };
804
805 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
806
807 vector<VkImageMemoryBarrier> preShaderImageBarriers;
808 preShaderImageBarriers.resize(descriptorSets.size() + 1u);
809 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
810 {
811 preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
812 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
813 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
814 imageData[1].getImage(imageNdx), uncompressedRange);
815 }
816
817 preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
818 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
819 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
820 imageData[0].getImage(0), compressedRange);
821
822 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
823 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
824 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
825
826 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
827 {
828 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
829 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
830 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
831 imageData[1].getImageInfo(ndx).extent.height,
832 imageData[1].getImageInfo(ndx).extent.depth);
833 }
834 }
835 endCommandBuffer(vk, cmdBuffer);
836 submitCommandsAndWait(vk, device, queue, cmdBuffer);
837 }
838
copyResultAndCompare(const VkCommandBuffer & cmdBuffer,const VkImage & uncompressed,const VkDeviceSize offset,const UVec3 & size)839 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
840 const VkImage& uncompressed,
841 const VkDeviceSize offset,
842 const UVec3& size)
843 {
844 const DeviceInterface& vk = m_context.getDeviceInterface();
845 const VkQueue queue = m_context.getUniversalQueue();
846 const VkDevice device = m_context.getDevice();
847 Allocator& allocator = m_context.getDefaultAllocator();
848
849 VkDeviceSize imageResultSize = getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
850 Buffer imageBufferResult (vk, device, allocator,
851 makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
852 MemoryRequirement::HostVisible);
853
854 beginCommandBuffer(vk, cmdBuffer);
855 {
856 const VkImageSubresourceRange subresourceRange =
857 {
858 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
859 0u, //deUint32 baseMipLevel
860 1u, //deUint32 levelCount
861 0u, //deUint32 baseArrayLayer
862 1u //deUint32 layerCount
863 };
864
865 const VkBufferImageCopy copyRegion =
866 {
867 0ull, // VkDeviceSize bufferOffset;
868 0u, // deUint32 bufferRowLength;
869 0u, // deUint32 bufferImageHeight;
870 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
871 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
872 makeExtent3D(size), // VkExtent3D imageExtent;
873 };
874
875 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
876 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
877 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
878 uncompressed, subresourceRange);
879
880 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
881 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
882 imageBufferResult.get(), 0ull, imageResultSize);
883
884 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
885 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, ©Region);
886 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
887 }
888 endCommandBuffer(vk, cmdBuffer);
889 submitCommandsAndWait(vk, device, queue, cmdBuffer);
890
891 const Allocation& allocResult = imageBufferResult.getAllocation();
892 invalidateAlloc(vk, device, allocResult);
893 if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
894 return true;
895 return false;
896 }
897
descriptorSetUpdate(VkDescriptorSet descriptorSet,const VkDescriptorImageInfo * descriptorImageInfos)898 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
899 {
900 const DeviceInterface& vk = m_context.getDeviceInterface();
901 const VkDevice device = m_context.getDevice();
902 DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
903
904 switch(m_parameters.operation)
905 {
906 case OPERATION_IMAGE_LOAD:
907 case OPERATION_IMAGE_STORE:
908 {
909 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
910 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
911
912 break;
913 }
914
915 case OPERATION_TEXEL_FETCH:
916 case OPERATION_TEXTURE:
917 {
918 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
919 {
920 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
921 bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
922 }
923
924 break;
925 }
926
927 default:
928 DE_ASSERT(false);
929 }
930 descriptorSetUpdateBuilder.update(vk, device);
931 }
932
createImageInfos(ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)933 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
934 {
935 const VkImageType imageType = mapImageType(m_parameters.imageType);
936
937 if (isCompressed)
938 {
939 VkFormatProperties properties;
940 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), m_parameters.formatCompressed, &properties);
941 if (!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
942 TCU_THROW(NotSupportedError, "Format storage feature not supported");
943
944 const VkExtent3D extentCompressed = makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
945 const VkImageCreateInfo compressedInfo =
946 {
947 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
948 DE_NULL, // const void* pNext;
949 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
950 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR |
951 VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, // VkImageCreateFlags flags;
952 imageType, // VkImageType imageType;
953 m_parameters.formatCompressed, // VkFormat format;
954 extentCompressed, // VkExtent3D extent;
955 static_cast<deUint32>(mipMapSizes.size()), // deUint32 mipLevels;
956 getLayerCount(), // deUint32 arrayLayers;
957 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
958 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
959 VK_IMAGE_USAGE_SAMPLED_BIT |
960 VK_IMAGE_USAGE_STORAGE_BIT |
961 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
962 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
963 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
964 0u, // deUint32 queueFamilyIndexCount;
965 DE_NULL, // const deUint32* pQueueFamilyIndices;
966 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
967 };
968 imageData.addImageInfo(compressedInfo);
969 }
970 else
971 {
972 UVec3 size = m_parameters.size;
973 if (m_parameters.imageType == IMAGE_TYPE_1D) {
974 size.y() = 1;
975 }
976 size.z() = 1;
977 const VkExtent3D originalResolutionInBlocks = makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, size));
978
979 for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
980 for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
981 {
982 const VkExtent3D extentUncompressed = m_parameters.useMipmaps ?
983 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
984 originalResolutionInBlocks;
985 const VkImageCreateInfo uncompressedInfo =
986 {
987 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
988 DE_NULL, // const void* pNext;
989 0u, // VkImageCreateFlags flags;
990 imageType, // VkImageType imageType;
991 m_parameters.formatUncompressed, // VkFormat format;
992 extentUncompressed, // VkExtent3D extent;
993 1u, // deUint32 mipLevels;
994 1u, // deUint32 arrayLayers;
995 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
996 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
997 m_parameters.uncompressedImageUsage |
998 VK_IMAGE_USAGE_SAMPLED_BIT, // VkImageUsageFlags usage;
999 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1000 0u, // deUint32 queueFamilyIndexCount;
1001 DE_NULL, // const deUint32* pQueueFamilyIndices;
1002 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1003 };
1004 imageData.addImageInfo(uncompressedInfo);
1005 }
1006 }
1007 }
1008
decompressImage(const VkCommandBuffer & cmdBuffer,vector<ImageData> & imageData,const vector<UVec3> & mipMapSizes)1009 bool BasicComputeTestInstance::decompressImage (const VkCommandBuffer& cmdBuffer,
1010 vector<ImageData>& imageData,
1011 const vector<UVec3>& mipMapSizes)
1012 {
1013 const DeviceInterface& vk = m_context.getDeviceInterface();
1014 const VkDevice device = m_context.getDevice();
1015 const VkQueue queue = m_context.getUniversalQueue();
1016 Allocator& allocator = m_context.getDefaultAllocator();
1017 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
1018 const VkImage& compressed = imageData[0].getImage(0);
1019 const VkImageType imageType = mapImageType(m_parameters.imageType);
1020
1021 for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
1022 imageData[ndx].resetViews();
1023
1024 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
1025 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
1026 {
1027 const bool layoutShaderReadOnly = (layerNdx % 2u) == 1;
1028 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
1029 const VkExtent3D extentCompressed = imageType == VK_IMAGE_TYPE_1D ? makeExtent3D(mipMapSizes[mipNdx].x(), 1, mipMapSizes[mipNdx].z()) : makeExtent3D(mipMapSizes[mipNdx]);
1030 const VkImage& uncompressed = imageData[m_parameters.imagesCount -1].getImage(imageNdx);
1031 const VkExtent3D extentUncompressed = imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
1032 const VkDeviceSize bufferSizeComp = getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
1033
1034 VkFormatProperties properties;
1035 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), m_parameters.formatForVerify, &properties);
1036 if (!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1037 TCU_THROW(NotSupportedError, "Format storage feature not supported");
1038
1039 const VkImageCreateInfo decompressedImageInfo =
1040 {
1041 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1042 DE_NULL, // const void* pNext;
1043 0u, // VkImageCreateFlags flags;
1044 imageType, // VkImageType imageType;
1045 m_parameters.formatForVerify, // VkFormat format;
1046 extentCompressed, // VkExtent3D extent;
1047 1u, // deUint32 mipLevels;
1048 1u, // deUint32 arrayLayers;
1049 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1050 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1051 VK_IMAGE_USAGE_SAMPLED_BIT |
1052 VK_IMAGE_USAGE_STORAGE_BIT |
1053 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
1054 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
1055 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1056 0u, // deUint32 queueFamilyIndexCount;
1057 DE_NULL, // const deUint32* pQueueFamilyIndices;
1058 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1059 };
1060
1061 const VkImageCreateInfo compressedImageInfo =
1062 {
1063 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1064 DE_NULL, // const void* pNext;
1065 0u, // VkImageCreateFlags flags;
1066 imageType, // VkImageType imageType;
1067 m_parameters.formatCompressed, // VkFormat format;
1068 extentCompressed, // VkExtent3D extent;
1069 1u, // deUint32 mipLevels;
1070 1u, // deUint32 arrayLayers;
1071 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1072 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1073 VK_IMAGE_USAGE_SAMPLED_BIT |
1074 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
1075 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1076 0u, // deUint32 queueFamilyIndexCount;
1077 DE_NULL, // const deUint32* pQueueFamilyIndices;
1078 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1079 };
1080 const VkImageUsageFlags compressedViewUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1081 const VkImageViewUsageCreateInfo compressedViewUsageCI =
1082 {
1083 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
1084 DE_NULL, //const void* pNext;
1085 compressedViewUsageFlags, //VkImageUsageFlags usage;
1086 };
1087 const VkImageViewType imageViewType (mapImageViewType(m_parameters.imageType));
1088 Image resultImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1089 Image referenceImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1090 Image uncompressedImage (vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
1091 Move<VkImageView> resultView = makeImageView(vk, device, resultImage.get(), imageViewType, decompressedImageInfo.format,
1092 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1093 Move<VkImageView> referenceView = makeImageView(vk, device, referenceImage.get(), imageViewType, decompressedImageInfo.format,
1094 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1095 Move<VkImageView> uncompressedView = makeImageView(vk, device, uncompressedImage.get(), imageViewType, m_parameters.formatCompressed,
1096 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
1097 Move<VkImageView> compressedView = makeImageView(vk, device, compressed, imageViewType, m_parameters.formatCompressed,
1098 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
1099 Move<VkDescriptorSetLayout> descriptorSetLayout = DescriptorSetLayoutBuilder()
1100 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1101 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1102 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1103 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1104 .build(vk, device);
1105 Move<VkDescriptorPool> descriptorPool = DescriptorPoolBuilder()
1106 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1107 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1108 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1109 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1110 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
1111
1112 Move<VkDescriptorSet> descriptorSet = makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
1113 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1114 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1115 const VkDeviceSize bufferSize = getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), m_parameters.formatForVerify);
1116 Buffer resultBuffer (vk, device, allocator,
1117 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1118 Buffer referenceBuffer (vk, device, allocator,
1119 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1120 Buffer transferBuffer (vk, device, allocator,
1121 makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1122 Move<VkSampler> sampler;
1123 {
1124 const VkSamplerCreateInfo createInfo =
1125 {
1126 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1127 DE_NULL, //const void* pNext;
1128 0u, //VkSamplerCreateFlags flags;
1129 VK_FILTER_NEAREST, //VkFilter magFilter;
1130 VK_FILTER_NEAREST, //VkFilter minFilter;
1131 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1132 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1133 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1134 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1135 0.0f, //float mipLodBias;
1136 VK_FALSE, //VkBool32 anisotropyEnable;
1137 1.0f, //float maxAnisotropy;
1138 VK_FALSE, //VkBool32 compareEnable;
1139 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1140 0.0f, //float minLod;
1141 1.0f, //float maxLod;
1142 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1143 VK_FALSE, //VkBool32 unnormalizedCoordinates;
1144 };
1145 sampler = createSampler(vk, device, &createInfo);
1146 }
1147
1148 VkDescriptorImageInfo descriptorImageInfos[] =
1149 {
1150 makeDescriptorImageInfo(*sampler, *uncompressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1151 makeDescriptorImageInfo(*sampler, *compressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1152 makeDescriptorImageInfo(DE_NULL, *resultView, VK_IMAGE_LAYOUT_GENERAL),
1153 makeDescriptorImageInfo(DE_NULL, *referenceView, VK_IMAGE_LAYOUT_GENERAL)
1154 };
1155 DescriptorSetUpdateBuilder()
1156 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1157 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1158 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1159 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1160 .update(vk, device);
1161
1162
1163 beginCommandBuffer(vk, cmdBuffer);
1164 {
1165 const VkImageSubresourceRange subresourceRange =
1166 {
1167 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1168 0u, //deUint32 baseMipLevel
1169 1u, //deUint32 levelCount
1170 0u, //deUint32 baseArrayLayer
1171 1u //deUint32 layerCount
1172 };
1173
1174 const VkImageSubresourceRange subresourceRangeComp =
1175 {
1176 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1177 mipNdx, //deUint32 baseMipLevel
1178 1u, //deUint32 levelCount
1179 layerNdx, //deUint32 baseArrayLayer
1180 1u //deUint32 layerCount
1181 };
1182
1183 const VkBufferImageCopy copyRegion =
1184 {
1185 0ull, // VkDeviceSize bufferOffset;
1186 0u, // deUint32 bufferRowLength;
1187 0u, // deUint32 bufferImageHeight;
1188 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1189 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1190 decompressedImageInfo.extent, // VkExtent3D imageExtent;
1191 };
1192
1193 const VkBufferImageCopy compressedCopyRegion =
1194 {
1195 0ull, // VkDeviceSize bufferOffset;
1196 0u, // deUint32 bufferRowLength;
1197 0u, // deUint32 bufferImageHeight;
1198 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1199 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1200 extentUncompressed, // VkExtent3D imageExtent;
1201 };
1202
1203 {
1204
1205 const VkBufferMemoryBarrier preCopyBufferBarriers = makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1206 transferBuffer.get(), 0ull, bufferSizeComp);
1207
1208 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1209 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1210 }
1211
1212 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1213
1214 {
1215 const VkBufferMemoryBarrier postCopyBufferBarriers = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1216 transferBuffer.get(), 0ull, bufferSizeComp);
1217
1218 const VkImageMemoryBarrier preCopyImageBarriers = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1219 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1220
1221 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1222 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1223 }
1224
1225 vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
1226
1227 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1228 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1229
1230 {
1231 const VkImageMemoryBarrier preShaderImageBarriers[] =
1232 {
1233
1234 makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1235 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1236 uncompressedImage.get(), subresourceRange),
1237
1238 makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT,
1239 VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1240 compressed, subresourceRangeComp),
1241
1242 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1243 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1244 resultImage.get(), subresourceRange),
1245
1246 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1247 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1248 referenceImage.get(), subresourceRange)
1249 };
1250
1251 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1252 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1253 DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1254 }
1255
1256 vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1257
1258 {
1259 const VkImageMemoryBarrier postShaderImageBarriers[] =
1260 {
1261 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1262 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1263 resultImage.get(), subresourceRange),
1264
1265 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1266 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1267 referenceImage.get(), subresourceRange)
1268 };
1269
1270 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1271 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1272 DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1273 }
1274
1275 vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, ©Region);
1276 vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, ©Region);
1277
1278 {
1279 const VkBufferMemoryBarrier postCopyBufferBarrier[] =
1280 {
1281 makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1282 resultBuffer.get(), 0ull, bufferSize),
1283
1284 makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1285 referenceBuffer.get(), 0ull, bufferSize),
1286 };
1287
1288 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT,
1289 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(postCopyBufferBarrier), postCopyBufferBarrier,
1290 0u, (const VkImageMemoryBarrier*)DE_NULL);
1291 }
1292 }
1293 endCommandBuffer(vk, cmdBuffer);
1294 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1295
1296 const Allocation& resultAlloc = resultBuffer.getAllocation();
1297 const Allocation& referenceAlloc = referenceBuffer.getAllocation();
1298 invalidateAlloc(vk, device, resultAlloc);
1299 invalidateAlloc(vk, device, referenceAlloc);
1300
1301 BinaryCompareMode compareMode =
1302 (m_parameters.formatIsASTC)
1303 ?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
1304 :(COMPARE_MODE_NORMAL);
1305
1306 BinaryCompareResult res = BinaryCompare(referenceAlloc.getHostPtr(),
1307 resultAlloc.getHostPtr(),
1308 (size_t)bufferSize,
1309 m_parameters.formatForVerify,
1310 compareMode);
1311
1312 if (res == COMPARE_RESULT_FAILED)
1313 {
1314 ConstPixelBufferAccess resultPixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1315 ConstPixelBufferAccess referencePixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1316
1317 if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1318 return false;
1319 }
1320 else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
1321 {
1322 m_bASTCErrorColourMismatch = true;
1323 }
1324 }
1325
1326 return true;
1327 }
1328
1329 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1330 {
1331 public:
1332 ImageStoreComputeTestInstance (Context& context,
1333 const TestParameters& parameters);
1334 protected:
1335 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
1336 const VkDescriptorSetLayout& descriptorSetLayout,
1337 const VkDescriptorPool& descriptorPool,
1338 vector<ImageData>& imageData);
1339 private:
1340 };
1341
ImageStoreComputeTestInstance(Context & context,const TestParameters & parameters)1342 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1343 :BasicComputeTestInstance (context, parameters)
1344 {
1345 }
1346
executeShader(const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)1347 void ImageStoreComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
1348 const VkDescriptorSetLayout& descriptorSetLayout,
1349 const VkDescriptorPool& descriptorPool,
1350 vector<ImageData>& imageData)
1351 {
1352 const DeviceInterface& vk = m_context.getDeviceInterface();
1353 const VkDevice device = m_context.getDevice();
1354 const VkQueue queue = m_context.getUniversalQueue();
1355 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1356 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
1357 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
1358 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1359 Move<VkSampler> sampler;
1360 {
1361 const VkSamplerCreateInfo createInfo =
1362 {
1363 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1364 DE_NULL, //const void* pNext;
1365 0u, //VkSamplerCreateFlags flags;
1366 VK_FILTER_NEAREST, //VkFilter magFilter;
1367 VK_FILTER_NEAREST, //VkFilter minFilter;
1368 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1369 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1370 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1371 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1372 0.0f, //float mipLodBias;
1373 VK_FALSE, //VkBool32 anisotropyEnable;
1374 1.0f, //float maxAnisotropy;
1375 VK_FALSE, //VkBool32 compareEnable;
1376 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1377 0.0f, //float minLod;
1378 0.0f, //float maxLod;
1379 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1380 VK_TRUE, //VkBool32 unnormalizedCoordinates;
1381 };
1382 sampler = createSampler(vk, device, &createInfo);
1383 }
1384
1385 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
1386 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1387 {
1388 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1389 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1390 {
1391 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1392 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1393 }
1394 }
1395
1396 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1397 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1398
1399 beginCommandBuffer(vk, cmdBuffer);
1400 {
1401 const VkImageSubresourceRange compressedRange =
1402 {
1403 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1404 0u, //deUint32 baseMipLevel
1405 imageData[0].getImageInfo(0).mipLevels, //deUint32 levelCount
1406 0u, //deUint32 baseArrayLayer
1407 imageData[0].getImageInfo(0).arrayLayers //deUint32 layerCount
1408 };
1409
1410 const VkImageSubresourceRange uncompressedRange =
1411 {
1412 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1413 0u, //deUint32 baseMipLevel
1414 1u, //deUint32 levelCount
1415 0u, //deUint32 baseArrayLayer
1416 1u //deUint32 layerCount
1417 };
1418
1419 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1420
1421 vector<VkImageMemoryBarrier> preShaderImageBarriers (descriptorSets.size() * 2u + 1u);
1422 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1423 {
1424 preShaderImageBarriers[imageNdx] = makeImageMemoryBarrier(
1425 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1426 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1427 imageData[1].getImage(imageNdx), uncompressedRange);
1428
1429 preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()] = makeImageMemoryBarrier(
1430 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1431 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1432 imageData[2].getImage(imageNdx), uncompressedRange);
1433 }
1434
1435 preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1436 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1437 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1438 imageData[0].getImage(0u), compressedRange);
1439
1440 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1441 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1442 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1443
1444 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1445 {
1446 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1447 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1448 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
1449 imageData[1].getImageInfo(ndx).extent.height,
1450 imageData[1].getImageInfo(ndx).extent.depth);
1451 }
1452 }
1453 endCommandBuffer(vk, cmdBuffer);
1454 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1455 }
1456
1457 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1458 {
1459 public:
1460 GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters);
1461 virtual TestStatus iterate (void);
1462
1463 protected:
1464 virtual bool isWriteToCompressedOperation ();
1465 VkImageCreateInfo makeCreateImageInfo (const VkFormat format,
1466 const ImageType type,
1467 const UVec3& size,
1468 const VkImageUsageFlags usageFlags,
1469 const VkImageCreateFlags* createFlags,
1470 const deUint32 levels,
1471 const deUint32 layers);
1472 VkDeviceSize getCompressedImageData (const VkFormat format,
1473 const UVec3& size,
1474 std::vector<deUint8>& data,
1475 const deUint32 layer,
1476 const deUint32 level);
1477 VkDeviceSize getUncompressedImageData (const VkFormat format,
1478 const UVec3& size,
1479 std::vector<deUint8>& data,
1480 const deUint32 layer,
1481 const deUint32 level);
1482 virtual void prepareData ();
1483 virtual void prepareVertexBuffer ();
1484 virtual void transcodeRead ();
1485 virtual void transcodeWrite ();
1486 bool verifyDecompression (const std::vector<deUint8>& refCompressedData,
1487 const de::MovePtr<Image>& resCompressedImage,
1488 const deUint32 layer,
1489 const deUint32 level,
1490 const UVec3& mipmapDims);
1491
1492 typedef std::vector<deUint8> RawDataVector;
1493 typedef SharedPtr<RawDataVector> RawDataPtr;
1494 typedef std::vector<RawDataPtr> LevelData;
1495 typedef std::vector<LevelData> FullImageData;
1496
1497 FullImageData m_srcData;
1498 FullImageData m_dstData;
1499
1500 typedef SharedPtr<Image> ImagePtr;
1501 typedef std::vector<ImagePtr> LevelImages;
1502 typedef std::vector<LevelImages> ImagesArray;
1503
1504 ImagesArray m_uncompressedImages;
1505 MovePtr<Image> m_compressedImage;
1506
1507 VkImageViewUsageCreateInfo m_imageViewUsageKHR;
1508 VkImageViewUsageCreateInfo* m_srcImageViewUsageKHR;
1509 VkImageViewUsageCreateInfo* m_dstImageViewUsageKHR;
1510 std::vector<tcu::UVec3> m_compressedImageResVec;
1511 std::vector<tcu::UVec3> m_uncompressedImageResVec;
1512 VkFormat m_srcFormat;
1513 VkFormat m_dstFormat;
1514 VkImageUsageFlags m_srcImageUsageFlags;
1515 VkImageUsageFlags m_dstImageUsageFlags;
1516 std::vector<tcu::UVec3> m_srcImageResolutions;
1517 std::vector<tcu::UVec3> m_dstImageResolutions;
1518
1519 MovePtr<Buffer> m_vertexBuffer;
1520 deUint32 m_vertexCount;
1521 VkDeviceSize m_vertexBufferOffset;
1522 };
1523
GraphicsAttachmentsTestInstance(Context & context,const TestParameters & parameters)1524 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1525 : BasicTranscodingTestInstance(context, parameters)
1526 , m_srcData()
1527 , m_dstData()
1528 , m_uncompressedImages()
1529 , m_compressedImage()
1530 , m_imageViewUsageKHR()
1531 , m_srcImageViewUsageKHR()
1532 , m_dstImageViewUsageKHR()
1533 , m_compressedImageResVec()
1534 , m_uncompressedImageResVec()
1535 , m_srcFormat()
1536 , m_dstFormat()
1537 , m_srcImageUsageFlags()
1538 , m_dstImageUsageFlags()
1539 , m_srcImageResolutions()
1540 , m_dstImageResolutions()
1541 , m_vertexBuffer()
1542 , m_vertexCount(0u)
1543 , m_vertexBufferOffset(0ull)
1544 {
1545 }
1546
iterate(void)1547 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1548 {
1549 prepareData();
1550 prepareVertexBuffer();
1551
1552 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1553 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1554 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1555
1556 if (isWriteToCompressedOperation())
1557 transcodeWrite();
1558 else
1559 transcodeRead();
1560
1561 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1562 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1563 if (isWriteToCompressedOperation())
1564 {
1565 if (!verifyDecompression(*m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1566 return TestStatus::fail("Images difference detected");
1567 }
1568 else
1569 {
1570 if (!verifyDecompression(*m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1571 return TestStatus::fail("Images difference detected");
1572 }
1573
1574 if (m_bASTCErrorColourMismatch)
1575 {
1576 DE_ASSERT(m_parameters.formatIsASTC);
1577 return TestStatusASTCQualityWarning();
1578 }
1579
1580 return TestStatus::pass("Pass");
1581 }
1582
prepareData()1583 void GraphicsAttachmentsTestInstance::prepareData ()
1584 {
1585 VkImageViewUsageCreateInfo* imageViewUsageKHRNull = (VkImageViewUsageCreateInfo*)DE_NULL;
1586
1587 m_imageViewUsageKHR = makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1588
1589 m_srcImageViewUsageKHR = isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1590 m_dstImageViewUsageKHR = isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1591
1592 m_srcFormat = isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1593 m_dstFormat = isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1594
1595 m_srcImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1596 m_dstImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1597
1598 m_compressedImageResVec = getMipLevelSizes(getLayerDims());
1599 m_uncompressedImageResVec = getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1600
1601 m_srcImageResolutions = isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1602 m_dstImageResolutions = isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1603
1604 m_srcData.resize(getLevelCount());
1605 m_dstData.resize(getLevelCount());
1606 m_uncompressedImages.resize(getLevelCount());
1607
1608 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1609 {
1610 m_srcData[levelNdx].resize(getLayerCount());
1611 m_dstData[levelNdx].resize(getLayerCount());
1612 m_uncompressedImages[levelNdx].resize(getLayerCount());
1613
1614 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1615 {
1616 m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1617 m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1618
1619 if (isWriteToCompressedOperation())
1620 {
1621 getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1622
1623 m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1624 }
1625 else
1626 {
1627 getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1628
1629 m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1630 }
1631
1632 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1633 }
1634 }
1635 }
1636
prepareVertexBuffer()1637 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1638 {
1639 const DeviceInterface& vk = m_context.getDeviceInterface();
1640 const VkDevice device = m_context.getDevice();
1641 Allocator& allocator = m_context.getDefaultAllocator();
1642
1643 const std::vector<tcu::Vec4> vertexArray = createFullscreenQuad();
1644 const size_t vertexBufferSizeInBytes = vertexArray.size() * sizeof(vertexArray[0]);
1645
1646 m_vertexCount = static_cast<deUint32>(vertexArray.size());
1647 m_vertexBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1648
1649 // Upload vertex data
1650 const Allocation& vertexBufferAlloc = m_vertexBuffer->getAllocation();
1651 deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1652 flushAlloc(vk, device, vertexBufferAlloc);
1653 }
1654
transcodeRead()1655 void GraphicsAttachmentsTestInstance::transcodeRead ()
1656 {
1657 const DeviceInterface& vk = m_context.getDeviceInterface();
1658 const VkDevice device = m_context.getDevice();
1659 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1660 const VkQueue queue = m_context.getUniversalQueue();
1661 Allocator& allocator = m_context.getDefaultAllocator();
1662
1663 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1664
1665 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1666 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1667
1668 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1669 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1670
1671 const Unique<VkRenderPass> renderPass (vkt::image::makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1672
1673 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1674 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1675 .build(vk, device));
1676 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1677 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1678 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1679 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1680
1681 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1682 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1683 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1684
1685 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1686 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1687
1688 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1689 {
1690 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1691 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1692 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1693 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1694 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1695 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1696
1697 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1698
1699 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1700 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1701
1702 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1703 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1704
1705 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1706 const VkViewport viewport = makeViewport(renderSize);
1707 const VkRect2D scissor = makeRect2D(renderSize);
1708
1709 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1710 {
1711 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1712 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1713
1714 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1715
1716 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1717 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1718
1719 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1720 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1721 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1722 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1723 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1724 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1725
1726 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1727 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1728 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
1729
1730 // Upload source image data
1731 const Allocation& alloc = srcImageBuffer->getAllocation();
1732 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1733 flushAlloc(vk, device, alloc);
1734
1735 beginCommandBuffer(vk, *cmdBuffer);
1736 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1737
1738 // Copy buffer to image
1739 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1740 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1741 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1742
1743 // Define destination image layout
1744 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1745
1746 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1747
1748 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1749 DescriptorSetUpdateBuilder()
1750 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1751 .update(vk, device);
1752
1753 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1754 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1755
1756 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1757 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1758
1759 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1760
1761 endRenderPass(vk, *cmdBuffer);
1762
1763 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1764 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1765 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1766 dstImage->get(), dstSubresourceRange);
1767
1768 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1769 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1770 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1771
1772 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1773 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1774 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1775
1776 endCommandBuffer(vk, *cmdBuffer);
1777
1778 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1779
1780 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1781 invalidateAlloc(vk, device, dstImageBufferAlloc);
1782 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1783 }
1784 }
1785
1786 m_compressedImage = srcImage;
1787 }
1788
transcodeWrite()1789 void GraphicsAttachmentsTestInstance::transcodeWrite ()
1790 {
1791 const DeviceInterface& vk = m_context.getDeviceInterface();
1792 const VkDevice device = m_context.getDevice();
1793 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1794 const VkQueue queue = m_context.getUniversalQueue();
1795 Allocator& allocator = m_context.getDefaultAllocator();
1796
1797 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1798
1799 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1800 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1801
1802 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1803 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1804
1805 const Unique<VkRenderPass> renderPass (vkt::image::makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1806
1807 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1808 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1809 .build(vk, device));
1810 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1811 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1812 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1813 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1814
1815 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1816 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1817 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1818
1819 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1820 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1821
1822 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1823 {
1824 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1825 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1826 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1827 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1828 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1829 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1830
1831 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1832
1833 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1834 const VkViewport viewport = makeViewport(renderSize);
1835 const VkRect2D scissor = makeRect2D(renderSize);
1836
1837 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1838 {
1839 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1840 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1841
1842 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1843 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1844
1845 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1846 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1847
1848 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1849
1850 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1851 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1852
1853 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1854 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1855 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1856 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1857 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1858 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1859
1860 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1861 const VkExtent2D framebufferSize (renderSize);
1862 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
1863
1864 // Upload source image data
1865 const Allocation& alloc = srcImageBuffer->getAllocation();
1866 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1867 flushAlloc(vk, device, alloc);
1868
1869 beginCommandBuffer(vk, *cmdBuffer);
1870 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1871
1872 // Copy buffer to image
1873 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1874 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1875 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1876
1877 // Define destination image layout
1878 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1879
1880 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1881
1882 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1883 DescriptorSetUpdateBuilder()
1884 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1885 .update(vk, device);
1886
1887 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1888 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1889
1890 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1891 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1892
1893 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1894
1895 endRenderPass(vk, *cmdBuffer);
1896
1897 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1898 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1899 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1900 dstImage->get(), dstSubresourceRange);
1901
1902 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1903 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1904 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1905
1906 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1907 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1908 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1909
1910 endCommandBuffer(vk, *cmdBuffer);
1911
1912 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1913
1914 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1915 invalidateAlloc(vk, device, dstImageBufferAlloc);
1916 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1917 }
1918 }
1919
1920 m_compressedImage = dstImage;
1921 }
1922
isWriteToCompressedOperation()1923 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1924 {
1925 return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1926 }
1927
makeCreateImageInfo(const VkFormat format,const ImageType type,const UVec3 & size,const VkImageUsageFlags usageFlags,const VkImageCreateFlags * createFlags,const deUint32 levels,const deUint32 layers)1928 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat format,
1929 const ImageType type,
1930 const UVec3& size,
1931 const VkImageUsageFlags usageFlags,
1932 const VkImageCreateFlags* createFlags,
1933 const deUint32 levels,
1934 const deUint32 layers)
1935 {
1936 const VkImageType imageType = mapImageType(type);
1937 const VkImageCreateFlags imageCreateFlagsBase = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1938 const VkImageCreateFlags imageCreateFlagsAddOn = isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR : 0;
1939 const VkImageCreateFlags imageCreateFlags = (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1940
1941 VkFormatProperties properties;
1942 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), format, &properties);
1943 if ((usageFlags & VK_IMAGE_USAGE_STORAGE_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1944 TCU_THROW(NotSupportedError, "Format storage feature not supported");
1945 if ((usageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
1946 TCU_THROW(NotSupportedError, "Format color attachment feature not supported");
1947 if ((usageFlags & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) &&
1948 !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
1949 TCU_THROW(NotSupportedError, "Format color/depth/stencil attachment feature not supported for input attachment usage");
1950
1951 const VkImageCreateInfo createImageInfo =
1952 {
1953 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1954 DE_NULL, // const void* pNext;
1955 imageCreateFlags, // VkImageCreateFlags flags;
1956 imageType, // VkImageType imageType;
1957 format, // VkFormat format;
1958 makeExtent3D(getLayerSize(type, size)), // VkExtent3D extent;
1959 levels, // deUint32 mipLevels;
1960 layers, // deUint32 arrayLayers;
1961 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1962 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1963 usageFlags, // VkImageUsageFlags usage;
1964 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1965 0u, // deUint32 queueFamilyIndexCount;
1966 DE_NULL, // const deUint32* pQueueFamilyIndices;
1967 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1968 };
1969
1970 return createImageInfo;
1971 }
1972
getCompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)1973 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat format,
1974 const UVec3& size,
1975 std::vector<deUint8>& data,
1976 const deUint32 layer,
1977 const deUint32 level)
1978 {
1979 VkDeviceSize sizeBytes = getCompressedImageSizeInBytes(format, size);
1980
1981 data.resize((size_t)sizeBytes);
1982 generateData(&data[0], data.size(), format, layer, level);
1983
1984 return sizeBytes;
1985 }
1986
getUncompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)1987 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat format,
1988 const UVec3& size,
1989 std::vector<deUint8>& data,
1990 const deUint32 layer,
1991 const deUint32 level)
1992 {
1993 tcu::IVec3 sizeAsIVec3 = tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
1994 VkDeviceSize sizeBytes = getImageSizeBytes(sizeAsIVec3, format);
1995
1996 data.resize((size_t)sizeBytes);
1997 generateData(&data[0], data.size(), format, layer, level);
1998
1999 return sizeBytes;
2000 }
2001
verifyDecompression(const std::vector<deUint8> & refCompressedData,const de::MovePtr<Image> & resCompressedImage,const deUint32 level,const deUint32 layer,const UVec3 & mipmapDims)2002 bool GraphicsAttachmentsTestInstance::verifyDecompression (const std::vector<deUint8>& refCompressedData,
2003 const de::MovePtr<Image>& resCompressedImage,
2004 const deUint32 level,
2005 const deUint32 layer,
2006 const UVec3& mipmapDims)
2007 {
2008 const DeviceInterface& vk = m_context.getDeviceInterface();
2009 const VkDevice device = m_context.getDevice();
2010 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2011 const VkQueue queue = m_context.getUniversalQueue();
2012 Allocator& allocator = m_context.getDefaultAllocator();
2013
2014 const bool layoutShaderReadOnly = (layer % 2u) == 1;
2015 const UVec3 mipmapDimsBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
2016
2017 const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2018 const VkImageSubresourceRange resSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
2019
2020 const VkDeviceSize dstBufferSize = getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
2021 const VkImageUsageFlags refSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2022
2023 const VkBufferCreateInfo refSrcImageBufferInfo (makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
2024 const MovePtr<Buffer> refSrcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
2025
2026 const VkImageCreateFlags refSrcImageCreateFlags = 0;
2027 const VkImageCreateInfo refSrcImageCreateInfo = makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2028 const MovePtr<Image> refSrcImage (new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
2029 Move<VkImageView> refSrcImageView (makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
2030
2031 const VkImageUsageFlags resSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2032 const VkImageViewUsageCreateInfo resSrcImageViewUsageKHR = makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
2033 Move<VkImageView> resSrcImageView (makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
2034
2035 const VkImageCreateFlags refDstImageCreateFlags = 0;
2036 const VkImageUsageFlags refDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2037 const VkImageCreateInfo refDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2038 const MovePtr<Image> refDstImage (new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
2039 const Move<VkImageView> refDstImageView (makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2040 const VkImageMemoryBarrier refDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
2041 const VkBufferCreateInfo refDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2042 const MovePtr<Buffer> refDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
2043
2044 const VkImageCreateFlags resDstImageCreateFlags = 0;
2045 const VkImageUsageFlags resDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2046 const VkImageCreateInfo resDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2047 const MovePtr<Image> resDstImage (new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
2048 const Move<VkImageView> resDstImageView (makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2049 const VkImageMemoryBarrier resDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
2050 const VkBufferCreateInfo resDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2051 const MovePtr<Buffer> resDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
2052
2053 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2054 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
2055
2056 const Unique<VkRenderPass> renderPass (vk::makeRenderPass(vk, device));
2057
2058 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2059 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2060 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2061 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2062 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2063 .build(vk, device));
2064 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2065 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2066 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2067 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2068 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2069 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2070 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2071 const VkSamplerCreateInfo refSrcSamplerInfo (makeSamplerCreateInfo());
2072 const Move<VkSampler> refSrcSampler = vk::createSampler(vk, device, &refSrcSamplerInfo);
2073 const VkSamplerCreateInfo resSrcSamplerInfo (makeSamplerCreateInfo());
2074 const Move<VkSampler> resSrcSampler = vk::createSampler(vk, device, &resSrcSamplerInfo);
2075 const VkDescriptorImageInfo descriptorRefSrcImage (makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2076 const VkDescriptorImageInfo descriptorResSrcImage (makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2077 const VkDescriptorImageInfo descriptorRefDstImage (makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2078 const VkDescriptorImageInfo descriptorResDstImage (makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2079
2080 const VkExtent2D renderSize (makeExtent2D(mipmapDims.x(), mipmapDims.y()));
2081 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2082 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
2083 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT, queueFamilyIndex));
2084 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2085
2086 const VkBufferImageCopy copyBufferToImageRegion = makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
2087 const VkBufferImageCopy copyRegion = makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
2088 const VkBufferMemoryBarrier refSrcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
2089 const VkImageMemoryBarrier refSrcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2090 const VkImageMemoryBarrier refSrcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2091 const VkImageMemoryBarrier resCompressedImageBarrier = makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, resCompressedImage->get(), resSubresourceRange);
2092
2093 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize.width, renderSize.height, getLayerCount()));
2094
2095 // Upload source image data
2096 {
2097 const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
2098 deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
2099 flushAlloc(vk, device, refSrcImageBufferAlloc);
2100 }
2101
2102 beginCommandBuffer(vk, *cmdBuffer);
2103 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2104
2105 // Copy buffer to image
2106 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
2107 vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, ©BufferToImageRegion);
2108 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
2109
2110 // Make reference and result images readable
2111 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
2112 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
2113 {
2114 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resCompressedImageBarrier);
2115 }
2116
2117 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2118 {
2119 DescriptorSetUpdateBuilder()
2120 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
2121 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
2122 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
2123 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
2124 .update(vk, device);
2125
2126 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2127 vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2128 vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
2129 }
2130 endRenderPass(vk, *cmdBuffer);
2131
2132 // Decompress reference image
2133 {
2134 const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
2135 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2136 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2137 refDstImage->get(), subresourceRange);
2138
2139 const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
2140 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2141 refDstBuffer->get(), 0ull, dstBufferSize);
2142
2143 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
2144 vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, ©Region);
2145 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2146 }
2147
2148 // Decompress result image
2149 {
2150 const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
2151 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2152 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2153 resDstImage->get(), subresourceRange);
2154
2155 const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
2156 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2157 resDstBuffer->get(), 0ull, dstBufferSize);
2158
2159 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
2160 vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, ©Region);
2161 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2162 }
2163
2164 endCommandBuffer(vk, *cmdBuffer);
2165
2166 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2167
2168 // Compare decompressed pixel data in reference and result images
2169 {
2170 const Allocation& refDstBufferAlloc = refDstBuffer->getAllocation();
2171 invalidateAlloc(vk, device, refDstBufferAlloc);
2172
2173 const Allocation& resDstBufferAlloc = resDstBuffer->getAllocation();
2174 invalidateAlloc(vk, device, resDstBufferAlloc);
2175
2176 BinaryCompareMode compareMode =
2177 (m_parameters.formatIsASTC)
2178 ?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
2179 :(COMPARE_MODE_NORMAL);
2180
2181 BinaryCompareResult res = BinaryCompare(refDstBufferAlloc.getHostPtr(),
2182 resDstBufferAlloc.getHostPtr(),
2183 dstBufferSize,
2184 m_parameters.formatForVerify,
2185 compareMode);
2186
2187 if (res == COMPARE_RESULT_FAILED)
2188 {
2189 // Do fuzzy to log error mask
2190 invalidateAlloc(vk, device, resDstBufferAlloc);
2191 invalidateAlloc(vk, device, refDstBufferAlloc);
2192
2193 tcu::ConstPixelBufferAccess resPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
2194 tcu::ConstPixelBufferAccess refPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
2195
2196 string comment = string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
2197
2198 if (isWriteToCompressedOperation())
2199 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2200 else
2201 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2202
2203 return false;
2204 }
2205 else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
2206 {
2207 m_bASTCErrorColourMismatch = true;
2208 }
2209 }
2210
2211 return true;
2212 }
2213
2214
2215 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2216 {
2217 public:
2218 GraphicsTextureTestInstance (Context& context, const TestParameters& parameters);
2219
2220 protected:
2221 virtual bool isWriteToCompressedOperation ();
2222 virtual void transcodeRead ();
2223 virtual void transcodeWrite ();
2224 };
2225
GraphicsTextureTestInstance(Context & context,const TestParameters & parameters)2226 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2227 : GraphicsAttachmentsTestInstance(context, parameters)
2228 {
2229 }
2230
isWriteToCompressedOperation()2231 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2232 {
2233 return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2234 }
2235
transcodeRead()2236 void GraphicsTextureTestInstance::transcodeRead ()
2237 {
2238 const DeviceInterface& vk = m_context.getDeviceInterface();
2239 const VkDevice device = m_context.getDevice();
2240 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2241 const VkQueue queue = m_context.getUniversalQueue();
2242 Allocator& allocator = m_context.getDefaultAllocator();
2243
2244 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2245
2246 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2247 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2248
2249 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2250 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2251
2252 const Unique<VkRenderPass> renderPass (vk::makeRenderPass(vk, device));
2253
2254 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2255 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2256 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2257 .build(vk, device));
2258 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2259 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2260 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2261 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2262 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2263
2264 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2265 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2266 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2267
2268 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2269 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2270
2271 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2272 {
2273 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2274 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2275 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2276 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2277 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2278 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2279
2280 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2281
2282 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2283 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2284
2285 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2286 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2287
2288 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2289 const VkViewport viewport = makeViewport(renderSize);
2290 const VkRect2D scissor = makeRect2D(renderSize);
2291
2292 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2293 {
2294 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2295 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2296
2297 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2298
2299 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2300 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2301
2302 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2303 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2304 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2305 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2306
2307 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2308 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2309 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2310 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2311 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2312 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2313
2314 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2315 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
2316
2317 // Upload source image data
2318 const Allocation& alloc = srcImageBuffer->getAllocation();
2319 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2320 flushAlloc(vk, device, alloc);
2321
2322 beginCommandBuffer(vk, *cmdBuffer);
2323 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2324
2325 // Copy buffer to image
2326 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2327 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2328 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2329
2330 // Define destination image layout
2331 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2332
2333 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2334
2335 DescriptorSetUpdateBuilder()
2336 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2337 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2338 .update(vk, device);
2339
2340 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2341 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2342
2343 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2344 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2345
2346 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2347
2348 endRenderPass(vk, *cmdBuffer);
2349
2350 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2351 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2352 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2353 dstImage->get(), dstSubresourceRange);
2354
2355 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2356 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2357 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2358
2359 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2360 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2361 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2362
2363 endCommandBuffer(vk, *cmdBuffer);
2364
2365 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2366
2367 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2368 invalidateAlloc(vk, device, dstImageBufferAlloc);
2369 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2370 }
2371 }
2372
2373 m_compressedImage = srcImage;
2374 }
2375
transcodeWrite()2376 void GraphicsTextureTestInstance::transcodeWrite ()
2377 {
2378 const DeviceInterface& vk = m_context.getDeviceInterface();
2379 const VkDevice device = m_context.getDevice();
2380 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2381 const VkQueue queue = m_context.getUniversalQueue();
2382 Allocator& allocator = m_context.getDefaultAllocator();
2383
2384 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2385
2386 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2387 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2388
2389 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2390 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2391
2392 const Unique<VkRenderPass> renderPass (vk::makeRenderPass(vk, device));
2393
2394 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2395 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2396 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2397 .build(vk, device));
2398 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2399 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2400 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2401 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2402 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2403
2404 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2405 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2406 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2407
2408 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2409 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2410
2411 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2412 {
2413 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2414 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2415 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2416 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2417 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2418 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2419
2420 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2421
2422 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2423 const VkViewport viewport = makeViewport(renderSize);
2424 const VkRect2D scissor = makeRect2D(renderSize);
2425
2426 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2427 {
2428 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2429 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2430
2431 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2432 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2433
2434 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2435 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2436
2437 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2438
2439 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2440 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2441
2442 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2443 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2444 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2445 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2446
2447 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2448 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2449 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2450 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2451 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2452 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2453
2454 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2455 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
2456
2457 // Upload source image data
2458 const Allocation& alloc = srcImageBuffer->getAllocation();
2459 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2460 flushAlloc(vk, device, alloc);
2461
2462 beginCommandBuffer(vk, *cmdBuffer);
2463 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2464
2465 // Copy buffer to image
2466 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2467 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2468 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2469
2470 // Define destination image layout
2471 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2472
2473 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2474
2475 DescriptorSetUpdateBuilder()
2476 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2477 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2478 .update(vk, device);
2479
2480 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2481 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2482
2483 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2484 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2485
2486 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2487
2488 endRenderPass(vk, *cmdBuffer);
2489
2490 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2491 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2492 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2493 dstImage->get(), dstSubresourceRange);
2494
2495 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2496 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2497 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2498
2499 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2500 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2501 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2502
2503 endCommandBuffer(vk, *cmdBuffer);
2504
2505 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2506
2507 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2508 invalidateAlloc(vk, device, dstImageBufferAlloc);
2509 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2510 }
2511 }
2512
2513 m_compressedImage = dstImage;
2514 }
2515
2516 class TexelViewCompatibleCase : public TestCase
2517 {
2518 public:
2519 TexelViewCompatibleCase (TestContext& testCtx,
2520 const std::string& name,
2521 const std::string& desc,
2522 const TestParameters& parameters);
2523 void initPrograms (SourceCollections& programCollection) const;
2524 TestInstance* createInstance (Context& context) const;
2525 virtual void checkSupport (Context& context) const;
2526 protected:
2527 const TestParameters m_parameters;
2528 };
2529
TexelViewCompatibleCase(TestContext & testCtx,const std::string & name,const std::string & desc,const TestParameters & parameters)2530 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
2531 : TestCase (testCtx, name, desc)
2532 , m_parameters (parameters)
2533 {
2534 }
2535
initPrograms(vk::SourceCollections & programCollection) const2536 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections& programCollection) const
2537 {
2538 DE_ASSERT(m_parameters.size.x() > 0);
2539 DE_ASSERT(m_parameters.size.y() > 0);
2540
2541 const unsigned int imageTypeIndex =
2542 (m_parameters.imageType == IMAGE_TYPE_2D) +
2543 (m_parameters.imageType == IMAGE_TYPE_3D) * 2;
2544
2545 switch (m_parameters.shader)
2546 {
2547 case SHADER_TYPE_COMPUTE:
2548 {
2549 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2550 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2551 std::ostringstream src;
2552 std::ostringstream src_decompress;
2553
2554 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2555 << "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2556 src_decompress << src.str();
2557
2558 switch(m_parameters.operation)
2559 {
2560 case OPERATION_IMAGE_LOAD:
2561 {
2562 const char* posDefinitions[3] =
2563 {
2564 // IMAGE_TYPE_1D
2565 " highp int pos = int(gl_GlobalInvocationID.x);\n",
2566 // IMAGE_TYPE_2D
2567 " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2568 // IMAGE_TYPE_3D
2569 " ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2570 };
2571
2572 src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2573 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2574 << "void main (void)\n"
2575 << "{\n"
2576 << posDefinitions[imageTypeIndex]
2577 << " imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2578 << "}\n";
2579
2580 break;
2581 }
2582
2583 case OPERATION_TEXEL_FETCH:
2584 {
2585 const char* storeDefinitions[3] =
2586 {
2587 // IMAGE_TYPE_1D
2588 " imageStore(u_image1, pos.x, texelFetch(u_image0, pos.x, pos.z));\n",
2589 // IMAGE_TYPE_2D
2590 " imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n",
2591 // IMAGE_TYPE_3D
2592 " imageStore(u_image1, pos, texelFetch(u_image0, pos, pos.z));\n",
2593 };
2594
2595 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2596 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2597 << "void main (void)\n"
2598 << "{\n"
2599 << " ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2600 << storeDefinitions[imageTypeIndex]
2601 << "}\n";
2602
2603 break;
2604 }
2605
2606 case OPERATION_TEXTURE:
2607 {
2608 const char* coordDefinitions[3] =
2609 {
2610 // IMAGE_TYPE_1D
2611 " const int pos = int(gl_GlobalInvocationID.x);\n"
2612 " const float coord = (float(gl_GlobalInvocationID.x) + 0.5) / pixels_resolution.x;\n",
2613 // IMAGE_TYPE_2D
2614 " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2615 " const vec2 coord = (vec2(gl_GlobalInvocationID.xy) + 0.5) / vec2(pixels_resolution);\n",
2616 // IMAGE_TYPE_3D
2617 " const ivec3 pos = ivec3(gl_GlobalInvocationID.xy, 0);\n"
2618 " const vec2 v2 = (vec2(gl_GlobalInvocationID.xy) + 0.5) / vec2(pixels_resolution);\n"
2619 " const vec3 coord = vec3(v2, 0.0);\n",
2620 };
2621
2622 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2623 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2624 << "void main (void)\n"
2625 << "{\n"
2626 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x, gl_NumWorkGroups.y);\n"
2627 << coordDefinitions[imageTypeIndex]
2628 << " imageStore(u_image1, pos, texture(u_image0, coord));\n"
2629 << "}\n";
2630
2631 break;
2632 }
2633
2634 case OPERATION_IMAGE_STORE:
2635 {
2636 const char* posDefinitions[3] =
2637 {
2638 // IMAGE_TYPE_1D
2639 " highp int pos = int(gl_GlobalInvocationID.x);\n",
2640 // IMAGE_TYPE_2D
2641 " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2642 // IMAGE_TYPE_3D
2643 " ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2644 };
2645
2646 src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<" u_image0;\n"
2647 << "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image1;\n"
2648 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2649 << "void main (void)\n"
2650 << "{\n"
2651 << posDefinitions[imageTypeIndex]
2652 << " imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2653 << " imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2654 << "}\n";
2655
2656 break;
2657 }
2658
2659 default:
2660 DE_ASSERT(false);
2661 }
2662
2663 const char* cordDefinitions[3] =
2664 {
2665 // IMAGE_TYPE_1D
2666 " const highp float cord = float(gl_GlobalInvocationID.x) / pixels_resolution.x;\n"
2667 " const highp int pos = int(gl_GlobalInvocationID.x); \n",
2668 // IMAGE_TYPE_2D
2669 " const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2670 " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n",
2671 // IMAGE_TYPE_3D
2672 " const vec2 v2 = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2673 " const vec3 cord = vec3(v2, 0.0);\n"
2674 " const ivec3 pos = ivec3(gl_GlobalInvocationID); \n",
2675 };
2676 src_decompress << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2677 << "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_reference;\n"
2678 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_result;\n"
2679 << "layout (binding = 3, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_reference;\n\n"
2680 << "void main (void)\n"
2681 << "{\n"
2682 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2683 << cordDefinitions[imageTypeIndex]
2684 << " imageStore(decompressed_result, pos, texture(compressed_result, cord));\n"
2685 << " imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n"
2686 << "}\n";
2687 programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2688 programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2689
2690 break;
2691 }
2692
2693 case SHADER_TYPE_FRAGMENT:
2694 {
2695 ImageType imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2696
2697 // Vertex shader
2698 {
2699 std::ostringstream src;
2700 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2701 << "layout(location = 0) in vec4 v_in_position;\n"
2702 << "\n"
2703 << "void main (void)\n"
2704 << "{\n"
2705 << " gl_Position = v_in_position;\n"
2706 << "}\n";
2707
2708 programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2709 }
2710
2711 // Fragment shader
2712 {
2713 switch(m_parameters.operation)
2714 {
2715 case OPERATION_ATTACHMENT_READ:
2716 case OPERATION_ATTACHMENT_WRITE:
2717 {
2718 std::ostringstream src;
2719
2720 const std::string dstTypeStr = getGlslFormatType(m_parameters.formatUncompressed);
2721 const std::string srcTypeStr = getGlslInputFormatType(m_parameters.formatUncompressed);
2722
2723 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2724 << "precision highp int;\n"
2725 << "precision highp float;\n"
2726 << "\n"
2727 << "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2728 << "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2729 << "\n"
2730 << "void main (void)\n"
2731 << "{\n"
2732 << " o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2733 << "}\n";
2734
2735 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2736
2737 break;
2738 }
2739
2740 case OPERATION_TEXTURE_READ:
2741 case OPERATION_TEXTURE_WRITE:
2742 {
2743 std::ostringstream src;
2744
2745 const std::string srcSamplerTypeStr = getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2746 const std::string dstImageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2747 const std::string dstFormatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2748
2749 const char* inDefinitions[3] =
2750 {
2751 // IMAGE_TYPE_1D
2752 " const highp int out_pos = int(gl_FragCoord.x);\n"
2753 " const highp float pixels_resolution = textureSize(u_imageIn, 0);\n"
2754 " const highp float in_pos = gl_FragCoord.x / pixels_resolution;\n",
2755 // IMAGE_TYPE_2D
2756 " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2757 " const vec2 pixels_resolution = vec2(textureSize(u_imageIn, 0));\n"
2758 " const vec2 in_pos = vec2(gl_FragCoord.xy) / vec2(pixels_resolution);\n",
2759 // IMAGE_TYPE_3D
2760 " const ivec3 out_pos = ivec3(gl_FragCoord.xy, 0);\n"
2761 " const vec3 pixels_resolution = vec3(textureSize(u_imageIn, 0));\n"
2762 " const vec3 in_pos = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution.xy, 1.0);\n",
2763 };
2764
2765 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2766 << "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2767 << "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2768 << "\n"
2769 << "void main (void)\n"
2770 << "{\n"
2771 << inDefinitions[imageTypeIndex]
2772 << " imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2773 << "}\n";
2774
2775 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2776
2777 break;
2778 }
2779
2780 default:
2781 DE_ASSERT(false);
2782 }
2783 }
2784
2785 // Verification fragment shader
2786 {
2787 std::ostringstream src;
2788
2789 const std::string samplerType = getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2790 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2791 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2792
2793 const char* pos0Definitions[3] =
2794 {
2795 // IMAGE_TYPE_1D
2796 " const highp int out_pos = int(gl_FragCoord.x);\n"
2797 " const highp float pixels_resolution0 = textureSize(u_imageIn0, 0);\n"
2798 " const highp float in_pos0 = gl_FragCoord.x / pixels_resolution0;\n",
2799 // IMAGE_TYPE_2D
2800 " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2801 " const vec2 pixels_resolution0 = vec2(textureSize(u_imageIn0, 0));\n"
2802 " const vec2 in_pos0 = vec2(gl_FragCoord.xy) / vec2(pixels_resolution0);\n",
2803 // IMAGE_TYPE_3D
2804 " const ivec3 out_pos = ivec3(ivec2(gl_FragCoord.xy), 0);\n"
2805 " const vec3 pixels_resolution0 = vec3(textureSize(u_imageIn0, 0));\n"
2806 " const vec3 in_pos0 = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution0.xy, 1.0);\n",
2807 };
2808 const char* pos1Definitions[3] =
2809 {
2810 // IMAGE_TYPE_1D
2811 " const highp float pixels_resolution1 = textureSize(u_imageIn1, 0);\n"
2812 " const highp float in_pos1 = gl_FragCoord.x / pixels_resolution1;\n",
2813 // IMAGE_TYPE_2D
2814 " const vec2 pixels_resolution1 = vec2(textureSize(u_imageIn1, 0));\n"
2815 " const vec2 in_pos1 = vec2(gl_FragCoord.xy) / vec2(pixels_resolution1);\n",
2816 // IMAGE_TYPE_3D
2817 " const vec3 pixels_resolution1 = vec3(textureSize(u_imageIn1, 0));\n"
2818 " const vec3 in_pos1 = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution1.xy, 1.0);\n",
2819 };
2820
2821 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2822 << "layout (binding = 0) uniform " << samplerType << " u_imageIn0;\n"
2823 << "layout (binding = 1) uniform " << samplerType << " u_imageIn1;\n"
2824 << "layout (binding = 2, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut0;\n"
2825 << "layout (binding = 3, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut1;\n"
2826 << "\n"
2827 << "void main (void)\n"
2828 << "{\n"
2829 << pos0Definitions[imageTypeIndex]
2830 << " imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2831 << "\n"
2832 << pos1Definitions[imageTypeIndex]
2833 << " imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2834 << "}\n";
2835
2836 programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2837 }
2838
2839 break;
2840 }
2841
2842 default:
2843 DE_ASSERT(false);
2844 }
2845 }
2846
checkSupport(Context & context) const2847 void TexelViewCompatibleCase::checkSupport (Context& context) const
2848 {
2849 const VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
2850 const InstanceInterface& vk = context.getInstanceInterface();
2851
2852 context.requireDeviceFunctionality("VK_KHR_maintenance2");
2853
2854 {
2855 VkImageFormatProperties imageFormatProperties;
2856
2857 if (vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2858 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2859 m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties) == VK_ERROR_FORMAT_NOT_SUPPORTED)
2860 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2861
2862 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2863 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2864 m_parameters.compressedImageUsage,
2865 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
2866 &imageFormatProperties))
2867 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2868 }
2869
2870 {
2871 const VkPhysicalDeviceFeatures physicalDeviceFeatures = getPhysicalDeviceFeatures(vk, physicalDevice);
2872
2873 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2874 !physicalDeviceFeatures.textureCompressionBC)
2875 TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2876
2877 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2878 !physicalDeviceFeatures.textureCompressionETC2)
2879 TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2880
2881 if (m_parameters.formatIsASTC &&
2882 !physicalDeviceFeatures.textureCompressionASTC_LDR)
2883 TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2884
2885 if (m_parameters.uncompressedImageUsage & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)
2886 {
2887 const VkFormatProperties p = getPhysicalDeviceFormatProperties(vk, physicalDevice, m_parameters.formatUncompressed);
2888 if ((p.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) == 0)
2889 TCU_THROW(NotSupportedError, "Storage view format not supported");
2890 }
2891 }
2892 }
2893
createInstance(Context & context) const2894 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2895 {
2896 if (!m_parameters.useMipmaps)
2897 DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size) == 1u);
2898
2899 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() > 0u);
2900 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() > 0u);
2901
2902 switch (m_parameters.shader)
2903 {
2904 case SHADER_TYPE_COMPUTE:
2905 {
2906 switch (m_parameters.operation)
2907 {
2908 case OPERATION_IMAGE_LOAD:
2909 case OPERATION_TEXEL_FETCH:
2910 case OPERATION_TEXTURE:
2911 return new BasicComputeTestInstance(context, m_parameters);
2912 case OPERATION_IMAGE_STORE:
2913 return new ImageStoreComputeTestInstance(context, m_parameters);
2914 default:
2915 TCU_THROW(InternalError, "Impossible");
2916 }
2917 }
2918
2919 case SHADER_TYPE_FRAGMENT:
2920 {
2921 switch (m_parameters.operation)
2922 {
2923 case OPERATION_ATTACHMENT_READ:
2924 case OPERATION_ATTACHMENT_WRITE:
2925 return new GraphicsAttachmentsTestInstance(context, m_parameters);
2926
2927 case OPERATION_TEXTURE_READ:
2928 case OPERATION_TEXTURE_WRITE:
2929 return new GraphicsTextureTestInstance(context, m_parameters);
2930
2931 default:
2932 TCU_THROW(InternalError, "Impossible");
2933 }
2934 }
2935
2936 default:
2937 TCU_THROW(InternalError, "Impossible");
2938 }
2939 }
2940
2941 } // anonymous ns
2942
getUnniceResolution(const VkFormat format,const deUint32 layers)2943 static tcu::UVec3 getUnniceResolution (const VkFormat format, const deUint32 layers)
2944 {
2945 const deUint32 unniceMipmapTextureSize[] = { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2946 const deUint32 baseTextureWidth = unniceMipmapTextureSize[getBlockWidth(format)];
2947 const deUint32 baseTextureHeight = unniceMipmapTextureSize[getBlockHeight(format)];
2948 const deUint32 baseTextureWidthLevels = deLog2Floor32(baseTextureWidth);
2949 const deUint32 baseTextureHeightLevels = deLog2Floor32(baseTextureHeight);
2950 const deUint32 widthMultiplier = (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2951 const deUint32 heightMultiplier = (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2952 const deUint32 width = baseTextureWidth * widthMultiplier;
2953 const deUint32 height = baseTextureHeight * heightMultiplier;
2954
2955 // Number of levels should be same on both axises
2956 DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
2957
2958 return tcu::UVec3(width, height, layers);
2959 }
2960
createImageCompressionTranscodingTests(tcu::TestContext & testCtx)2961 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
2962 {
2963 struct FormatsArray
2964 {
2965 const VkFormat* formats;
2966 deUint32 count;
2967 };
2968
2969 const bool mipmapness[] =
2970 {
2971 false,
2972 true,
2973 };
2974
2975 const std::string pipelineName[SHADER_TYPE_LAST] =
2976 {
2977 "compute",
2978 "graphic",
2979 };
2980
2981 const std::string mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)] =
2982 {
2983 "basic",
2984 "extended",
2985 };
2986
2987 const std::string operationName[OPERATION_LAST] =
2988 {
2989 "image_load",
2990 "texel_fetch",
2991 "texture",
2992 "image_store",
2993 "attachment_read",
2994 "attachment_write",
2995 "texture_read",
2996 "texture_write",
2997 };
2998
2999 struct ImageTypeName
3000 {
3001 ImageType type;
3002 std::string name;
3003 };
3004 ImageTypeName imageTypes[] =
3005 {
3006 { IMAGE_TYPE_1D, "1d_image" },
3007 { IMAGE_TYPE_2D, "2d_image" },
3008 { IMAGE_TYPE_3D, "3d_image" },
3009 };
3010
3011 const VkImageUsageFlags baseImageUsageFlagSet = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3012 const VkImageUsageFlags compressedImageUsageFlags[OPERATION_LAST] =
3013 {
3014 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), // "image_load"
3015 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texel_fetch"
3016 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture"
3017 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "image_store"
3018 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), // "attachment_read"
3019 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT), // "attachment_write"
3020 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), // "texture_read"
3021 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture_write"
3022 };
3023
3024 const VkImageUsageFlags compressedImageViewUsageFlags[OPERATION_LAST] =
3025 {
3026 compressedImageUsageFlags[0], //"image_load"
3027 compressedImageUsageFlags[1], //"texel_fetch"
3028 compressedImageUsageFlags[2], //"texture"
3029 compressedImageUsageFlags[3], //"image_store"
3030 compressedImageUsageFlags[4], //"attachment_read"
3031 compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, //"attachment_write"
3032 compressedImageUsageFlags[6], //"texture_read"
3033 compressedImageUsageFlags[7], //"texture_write"
3034 };
3035
3036 const VkImageUsageFlags uncompressedImageUsageFlags[OPERATION_LAST] =
3037 {
3038 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), //"image_load"
3039 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texel_fetch"
3040 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texture"
3041 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"image_store"
3042 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"attachment_read"
3043 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT), //"attachment_write"
3044 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_read"
3045 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_write"
3046 };
3047
3048 const VkFormat compressedFormats64bit[] =
3049 {
3050 VK_FORMAT_BC1_RGB_UNORM_BLOCK,
3051 VK_FORMAT_BC1_RGB_SRGB_BLOCK,
3052 VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
3053 VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
3054 VK_FORMAT_BC4_UNORM_BLOCK,
3055 VK_FORMAT_BC4_SNORM_BLOCK,
3056 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
3057 VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
3058 VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
3059 VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
3060 VK_FORMAT_EAC_R11_UNORM_BLOCK,
3061 VK_FORMAT_EAC_R11_SNORM_BLOCK,
3062 };
3063
3064 const VkFormat compressedFormats128bit[] =
3065 {
3066 VK_FORMAT_BC2_UNORM_BLOCK,
3067 VK_FORMAT_BC2_SRGB_BLOCK,
3068 VK_FORMAT_BC3_UNORM_BLOCK,
3069 VK_FORMAT_BC3_SRGB_BLOCK,
3070 VK_FORMAT_BC5_UNORM_BLOCK,
3071 VK_FORMAT_BC5_SNORM_BLOCK,
3072 VK_FORMAT_BC6H_UFLOAT_BLOCK,
3073 VK_FORMAT_BC6H_SFLOAT_BLOCK,
3074 VK_FORMAT_BC7_UNORM_BLOCK,
3075 VK_FORMAT_BC7_SRGB_BLOCK,
3076 VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
3077 VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
3078 VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
3079 VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
3080 VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
3081 VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
3082 VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
3083 VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
3084 VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
3085 VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
3086 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
3087 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
3088 VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
3089 VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
3090 VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
3091 VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
3092 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
3093 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
3094 VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
3095 VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
3096 VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
3097 VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
3098 VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
3099 VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
3100 VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
3101 VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
3102 VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
3103 VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
3104 VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
3105 VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
3106 VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
3107 VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
3108 };
3109
3110 const VkFormat uncompressedFormats64bit[] =
3111 {
3112 VK_FORMAT_R16G16B16A16_UNORM,
3113 VK_FORMAT_R16G16B16A16_SNORM,
3114 VK_FORMAT_R16G16B16A16_USCALED,
3115 VK_FORMAT_R16G16B16A16_SSCALED,
3116 VK_FORMAT_R16G16B16A16_UINT,
3117 VK_FORMAT_R16G16B16A16_SINT,
3118 //VK_FORMAT_R16G16B16A16_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3119 VK_FORMAT_R32G32_UINT,
3120 VK_FORMAT_R32G32_SINT,
3121 //VK_FORMAT_R32G32_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3122 //VK_FORMAT_R64_UINT, remove from the test it couldn't be used
3123 //VK_FORMAT_R64_SINT, remove from the test it couldn't be used
3124 //VK_FORMAT_R64_SFLOAT, remove from the test it couldn't be used
3125 };
3126
3127 const VkFormat uncompressedFormats128bit[] =
3128 {
3129 VK_FORMAT_R32G32B32A32_UINT,
3130 VK_FORMAT_R32G32B32A32_SINT,
3131 //VK_FORMAT_R32G32B32A32_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3132 //VK_FORMAT_R64G64_UINT, remove from the test it couldn't be used
3133 //VK_FORMAT_R64G64_SINT, remove from the test it couldn't be used
3134 //VK_FORMAT_R64G64_SFLOAT, remove from the test it couldn't be used
3135 };
3136
3137 const FormatsArray formatsCompressedSets[] =
3138 {
3139 {
3140 compressedFormats64bit,
3141 DE_LENGTH_OF_ARRAY(compressedFormats64bit)
3142 },
3143 {
3144 compressedFormats128bit,
3145 DE_LENGTH_OF_ARRAY(compressedFormats128bit)
3146 },
3147 };
3148
3149 // Uncompressed formats - floating point formats should not be used in these
3150 // tests as they cannot be relied upon to preserve all possible values in the
3151 // underlying texture data. Refer to the note under the 'VkImageViewCreateInfo'
3152 // section of the specification.
3153 const FormatsArray formatsUncompressedSets[] =
3154 {
3155 {
3156 uncompressedFormats64bit,
3157 DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
3158 },
3159 {
3160 uncompressedFormats128bit,
3161 DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
3162 },
3163 };
3164
3165 DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
3166
3167 MovePtr<tcu::TestCaseGroup> texelViewCompatibleTests (new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
3168
3169 for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
3170 {
3171 MovePtr<tcu::TestCaseGroup> pipelineTypeGroup (new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
3172
3173 for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
3174 {
3175 const bool mipmapTest = mipmapness[mipmapTestNdx];
3176
3177 MovePtr<tcu::TestCaseGroup> mipmapTypeGroup (new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
3178
3179 for (int imageTypeNdx = 0; imageTypeNdx < DE_LENGTH_OF_ARRAY(imageTypes); imageTypeNdx++)
3180 {
3181 MovePtr<tcu::TestCaseGroup> imageTypeGroup (new tcu::TestCaseGroup(testCtx, imageTypes[imageTypeNdx].name.c_str(), ""));
3182 ImageType imageType = imageTypes[imageTypeNdx].type;
3183
3184 for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
3185 {
3186 if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
3187 continue;
3188
3189 if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
3190 continue;
3191
3192 if (imageType == IMAGE_TYPE_3D && (operationNdx == OPERATION_ATTACHMENT_READ || operationNdx == OPERATION_ATTACHMENT_WRITE))
3193 continue;
3194
3195 MovePtr<tcu::TestCaseGroup> imageOperationGroup (new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
3196
3197 deUint32 depth = 1u + 2 * (imageType == IMAGE_TYPE_3D);
3198 deUint32 imageCount = 2u + (operationNdx == OPERATION_IMAGE_STORE);
3199
3200 // Iterate through bitness groups (64 bit, 128 bit, etc)
3201 for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
3202 {
3203 for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
3204 {
3205 const VkFormat formatCompressed = formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
3206 const std::string compressedFormatGroupName = getFormatShortString(formatCompressed);
3207 MovePtr<tcu::TestCaseGroup> compressedFormatGroup (new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
3208
3209 for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
3210 {
3211 const VkFormat formatUncompressed = formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
3212 const std::string uncompressedFormatGroupName = getFormatShortString(formatUncompressed);
3213
3214 const TestParameters parameters =
3215 {
3216 static_cast<Operation>(operationNdx),
3217 static_cast<ShaderType>(shaderType),
3218 mipmapTest ? getUnniceResolution(formatCompressed, 1u) : UVec3(64u, 64u, depth),
3219 1u + 2u * mipmapTest * (imageType != IMAGE_TYPE_3D), // 1 or 3 if mipmapTest is true but image is not 3d
3220 imageType,
3221 formatCompressed,
3222 formatUncompressed,
3223 imageCount,
3224 compressedImageUsageFlags[operationNdx],
3225 compressedImageViewUsageFlags[operationNdx],
3226 uncompressedImageUsageFlags[operationNdx],
3227 mipmapTest,
3228 VK_FORMAT_R8G8B8A8_UNORM,
3229 FormatIsASTC(formatCompressed)
3230 };
3231
3232 compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
3233 }
3234
3235 imageOperationGroup->addChild(compressedFormatGroup.release());
3236 }
3237 }
3238
3239 imageTypeGroup->addChild(imageOperationGroup.release());
3240 }
3241
3242 mipmapTypeGroup->addChild(imageTypeGroup.release());
3243 }
3244
3245 pipelineTypeGroup->addChild(mipmapTypeGroup.release());
3246 }
3247
3248 texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
3249 }
3250
3251 return texelViewCompatibleTests.release();
3252 }
3253
3254 } // image
3255 } // vkt
3256