• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*------------------------------------------------------------------------
2  * Vulkan Conformance Tests
3  * ------------------------
4  *
5  * Copyright (c) 2017 The Khronos Group Inc.
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *      http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  *
19  *//*!
20  * \file  vktImageCompressionTranscodingSupport.cpp
21  * \brief Compression transcoding support
22  *//*--------------------------------------------------------------------*/
23 
24 #include "vktImageCompressionTranscodingSupport.hpp"
25 #include "vktImageLoadStoreUtil.hpp"
26 
27 #include "deUniquePtr.hpp"
28 #include "deStringUtil.hpp"
29 #include "deSharedPtr.hpp"
30 #include "deRandom.hpp"
31 
32 #include "vktTestCaseUtil.hpp"
33 #include "vkPrograms.hpp"
34 #include "vkImageUtil.hpp"
35 #include "vkBarrierUtil.hpp"
36 #include "vktImageTestsUtil.hpp"
37 #include "vkBuilderUtil.hpp"
38 #include "vkRef.hpp"
39 #include "vkRefUtil.hpp"
40 #include "vkTypeUtil.hpp"
41 #include "vkQueryUtil.hpp"
42 #include "vkCmdUtil.hpp"
43 #include "vkObjUtil.hpp"
44 #include "vkBufferWithMemory.hpp"
45 
46 #include "tcuTextureUtil.hpp"
47 #include "tcuTexture.hpp"
48 #include "tcuCompressedTexture.hpp"
49 #include "tcuVectorType.hpp"
50 #include "tcuResource.hpp"
51 #include "tcuImageIO.hpp"
52 #include "tcuImageCompare.hpp"
53 #include "tcuTestLog.hpp"
54 #include "tcuRGBA.hpp"
55 #include "tcuSurface.hpp"
56 
57 #include <vector>
58 
59 using namespace vk;
60 namespace vkt
61 {
62 namespace image
63 {
64 namespace
65 {
66 using std::string;
67 using std::vector;
68 using tcu::TestContext;
69 using tcu::TestStatus;
70 using tcu::UVec3;
71 using tcu::IVec3;
72 using tcu::CompressedTexFormat;
73 using tcu::CompressedTexture;
74 using tcu::Resource;
75 using tcu::Archive;
76 using tcu::ConstPixelBufferAccess;
77 using de::MovePtr;
78 using de::SharedPtr;
79 using de::Random;
80 
81 typedef SharedPtr<MovePtr<Image> >			ImageSp;
82 typedef SharedPtr<Move<VkImageView> >		ImageViewSp;
83 typedef SharedPtr<Move<VkDescriptorSet> >	SharedVkDescriptorSet;
84 
85 enum ShaderType
86 {
87 	SHADER_TYPE_COMPUTE,
88 	SHADER_TYPE_FRAGMENT,
89 	SHADER_TYPE_LAST
90 };
91 
92 enum Operation
93 {
94 	OPERATION_IMAGE_LOAD,
95 	OPERATION_TEXEL_FETCH,
96 	OPERATION_TEXTURE,
97 	OPERATION_IMAGE_STORE,
98 	OPERATION_ATTACHMENT_READ,
99 	OPERATION_ATTACHMENT_WRITE,
100 	OPERATION_TEXTURE_READ,
101 	OPERATION_TEXTURE_WRITE,
102 	OPERATION_LAST
103 };
104 
105 struct TestParameters
106 {
107 	Operation			operation;
108 	ShaderType			shader;
109 	UVec3				size;
110 	deUint32			layers;
111 	ImageType			imageType;
112 	VkFormat			formatCompressed;
113 	VkFormat			formatUncompressed;
114 	deUint32			imagesCount;
115 	VkImageUsageFlags	compressedImageUsage;
116 	VkImageUsageFlags	compressedImageViewUsage;
117 	VkImageUsageFlags	uncompressedImageUsage;
118 	bool				useMipmaps;
119 	VkFormat			formatForVerify;
120 	bool				formatIsASTC;
121 };
122 
123 template<typename T>
makeVkSharedPtr(Move<T> move)124 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
125 {
126 	return SharedPtr<Move<T> >(new Move<T>(move));
127 }
128 
129 template<typename T>
makeVkSharedPtr(MovePtr<T> movePtr)130 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
131 {
132 	return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
133 }
134 
135 const deUint32 SINGLE_LEVEL = 1u;
136 const deUint32 SINGLE_LAYER = 1u;
137 
138 enum BinaryCompareMode
139 {
140 	COMPARE_MODE_NORMAL,
141 	COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING,
142 };
143 
144 enum BinaryCompareResult
145 {
146 	COMPARE_RESULT_OK,
147 	COMPARE_RESULT_ASTC_QUALITY_WARNING,
148 	COMPARE_RESULT_FAILED,
149 };
150 
151 const deUint32 ASTC_LDR_ERROR_COLOUR = 0xFFFF00FF;
152 const deUint32 ASTC_HDR_ERROR_COLOUR = 0x00000000;
153 
BinaryCompare(const void * reference,const void * result,VkDeviceSize sizeInBytes,VkFormat formatForVerify,BinaryCompareMode mode)154 static BinaryCompareResult BinaryCompare(const void				*reference,
155 										 const void				*result,
156 										 VkDeviceSize			sizeInBytes,
157 										 VkFormat				formatForVerify,
158 										 BinaryCompareMode		mode)
159 {
160 	DE_UNREF(formatForVerify);
161 
162 	// Compare quickly using deMemCmp
163 	if (deMemCmp(reference, result, (size_t)sizeInBytes) == 0)
164 	{
165 		return COMPARE_RESULT_OK;
166 	}
167 	// If deMemCmp indicated a mismatch, we can re-check with a manual comparison of
168 	// the ref and res images that allows for ASTC error colour mismatches if the ASTC
169 	// comparison mode was selected. This slows down the affected ASTC tests if you
170 	// didn't pass in the first comparison, but means in the general case the
171 	// comparion is still fast.
172 	else if (mode == COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
173 	{
174 		bool bWarn = false;
175 		bool bFail = false;
176 		const deUint32 *pui32RefVal = (deUint32*)reference;
177 		const deUint32 *pui32ResVal = (deUint32*)result;
178 
179 		DE_ASSERT(formatForVerify == VK_FORMAT_R8G8B8A8_UNORM);
180 		size_t numPixels = (size_t)(sizeInBytes / 4) /* bytes */;
181 		for (size_t i = 0; i < numPixels; i++)
182 		{
183 			const deUint32 ref = *pui32RefVal++;
184 			const deUint32 res = *pui32ResVal++;
185 
186 			if (ref != res)
187 			{
188 				// QualityWarning !1231: If the astc pixel was the ASTC LDR error colour
189 				// and the result image has the HDR error colour (or vice versa as the test
190 				// cases below sometimes reverse the operands) then issue a quality warning
191 				// instead of a failure.
192 				if ((ref == ASTC_LDR_ERROR_COLOUR && res == ASTC_HDR_ERROR_COLOUR) ||
193 					(ref == ASTC_HDR_ERROR_COLOUR && res == ASTC_LDR_ERROR_COLOUR))
194 				{
195 					bWarn = true;
196 				}
197 				else
198 				{
199 					bFail = true;
200 				}
201 			}
202 		}
203 
204 		if (!bFail)
205 		{
206 			return (bWarn)
207 				? (COMPARE_RESULT_ASTC_QUALITY_WARNING)
208 				: (COMPARE_RESULT_OK);
209 		}
210 	}
211 
212 	return COMPARE_RESULT_FAILED;
213 }
214 
FormatIsASTC(VkFormat format)215 static bool FormatIsASTC(VkFormat format)
216 {
217 	return deInRange32(format, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK);
218 }
219 
TestStatusASTCQualityWarning()220 static TestStatus TestStatusASTCQualityWarning()
221 {
222 	return TestStatus(QP_TEST_RESULT_QUALITY_WARNING, "ASTC HDR error colour output instead of LDR error colour");
223 }
224 
225 class BasicTranscodingTestInstance : public TestInstance
226 {
227 public:
228 							BasicTranscodingTestInstance	(Context&						context,
229 															 const TestParameters&			parameters);
230 	virtual TestStatus		iterate							(void) = 0;
231 protected:
232 	void					generateData					(deUint8*						toFill,
233 															 const size_t					size,
234 															 const VkFormat					format,
235 															 const deUint32					layer = 0u,
236 															 const deUint32					level = 0u);
237 	deUint32				getLevelCount					();
238 	deUint32				getLayerCount					();
239 	UVec3					getLayerDims					();
240 	vector<UVec3>			getMipLevelSizes				(UVec3							baseSize);
241 	vector<UVec3>			getCompressedMipLevelSizes		(const VkFormat					compressedFormat,
242 															 const vector<UVec3>&			uncompressedSizes);
243 
244 	const TestParameters	m_parameters;
245 	const deUint32			m_blockWidth;
246 	const deUint32			m_blockHeight;
247 	const deUint32			m_levelCount;
248 	const UVec3				m_layerSize;
249 
250 	// Detected error colour mismatch while verifying image. Output
251 	// the ASTC quality warning instead of a pass
252 	bool					m_bASTCErrorColourMismatch;
253 
254 private:
255 	deUint32				findMipMapLevelCount			();
256 };
257 
findMipMapLevelCount()258 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
259 {
260 	deUint32 levelCount = 1;
261 
262 	// We cannot use mipmap levels which have resolution below block size.
263 	// Reduce number of mipmap levels
264 	if (m_parameters.useMipmaps)
265 	{
266 		deUint32 w = m_parameters.size.x();
267 		deUint32 h = m_parameters.size.y();
268 
269 		DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
270 
271 		while (w > m_blockWidth && h > m_blockHeight)
272 		{
273 			w >>= 1;
274 			h >>= 1;
275 
276 			if (w > m_blockWidth && h > m_blockHeight)
277 				levelCount++;
278 		}
279 
280 		DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
281 		DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
282 	}
283 
284 	return levelCount;
285 }
286 
BasicTranscodingTestInstance(Context & context,const TestParameters & parameters)287 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
288 	: TestInstance	(context)
289 	, m_parameters	(parameters)
290 	, m_blockWidth	(getBlockWidth(m_parameters.formatCompressed))
291 	, m_blockHeight	(getBlockHeight(m_parameters.formatCompressed))
292 	, m_levelCount	(findMipMapLevelCount())
293 	, m_layerSize	(getLayerSize(m_parameters.imageType, m_parameters.size))
294 	, m_bASTCErrorColourMismatch(false)
295 {
296 	DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
297 }
298 
getLevelCount()299 deUint32 BasicTranscodingTestInstance::getLevelCount()
300 {
301 	return m_levelCount;
302 }
303 
getLayerCount()304 deUint32 BasicTranscodingTestInstance::getLayerCount()
305 {
306 	return m_parameters.layers;
307 }
308 
getLayerDims()309 UVec3 BasicTranscodingTestInstance::getLayerDims()
310 {
311 	return m_layerSize;
312 }
313 
getMipLevelSizes(UVec3 baseSize)314 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
315 {
316 	vector<UVec3>	levelSizes;
317 	const deUint32	levelCount = getLevelCount();
318 
319 	baseSize.z() = 1u;
320 
321 	levelSizes.push_back(baseSize);
322 
323 	if (m_parameters.imageType == IMAGE_TYPE_1D)
324 	{
325 		baseSize.y() = 1u;
326 
327 		while (levelSizes.size() < levelCount && (baseSize.x() != 1))
328 		{
329 			baseSize.x() = deMax32(baseSize.x() >> 1, 1);
330 			levelSizes.push_back(baseSize);
331 		}
332 	}
333 	else
334 	{
335 		while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
336 		{
337 			baseSize.x() = deMax32(baseSize.x() >> 1, 1);
338 			baseSize.y() = deMax32(baseSize.y() >> 1, 1);
339 			levelSizes.push_back(baseSize);
340 		}
341 	}
342 
343 	DE_ASSERT(levelSizes.size() == getLevelCount());
344 
345 	return levelSizes;
346 }
347 
getCompressedMipLevelSizes(const VkFormat compressedFormat,const vector<UVec3> & uncompressedSizes)348 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
349 {
350 	vector<UVec3> levelSizes;
351 	vector<UVec3>::const_iterator it;
352 
353 	for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
354 		levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
355 
356 	return levelSizes;
357 }
358 
generateData(deUint8 * toFill,const size_t size,const VkFormat format,const deUint32 layer,const deUint32 level)359 void BasicTranscodingTestInstance::generateData (deUint8*		toFill,
360 												 const size_t	size,
361 												 const VkFormat format,
362 												 const deUint32 layer,
363 												 const deUint32 level)
364 {
365 	const deUint8 pattern[] =
366 	{
367 		// 64-bit values
368 		0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
369 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
370 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
371 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
372 		0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
373 		0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
374 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
375 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
376 		0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
377 		0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
378 		0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Positive infinity
379 		0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Negative infinity
380 		0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,		// Start of a signalling NaN (NANS)
381 		0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a signalling NaN (NANS)
382 		0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,		// Start of a signalling NaN (NANS)
383 		0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a signalling NaN (NANS)
384 		0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Start of a quiet NaN (NANQ)
385 		0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of of a quiet NaN (NANQ)
386 		0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Start of a quiet NaN (NANQ)
387 		0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a quiet NaN (NANQ)
388 		// 32-bit values
389 		0x7F, 0x80, 0x00, 0x00,								// Positive infinity
390 		0xFF, 0x80, 0x00, 0x00,								// Negative infinity
391 		0x7F, 0x80, 0x00, 0x01,								// Start of a signalling NaN (NANS)
392 		0x7F, 0xBF, 0xFF, 0xFF,								// End of a signalling NaN (NANS)
393 		0xFF, 0x80, 0x00, 0x01,								// Start of a signalling NaN (NANS)
394 		0xFF, 0xBF, 0xFF, 0xFF,								// End of a signalling NaN (NANS)
395 		0x7F, 0xC0, 0x00, 0x00,								// Start of a quiet NaN (NANQ)
396 		0x7F, 0xFF, 0xFF, 0xFF,								// End of of a quiet NaN (NANQ)
397 		0xFF, 0xC0, 0x00, 0x00,								// Start of a quiet NaN (NANQ)
398 		0xFF, 0xFF, 0xFF, 0xFF,								// End of a quiet NaN (NANQ)
399 		0xAA, 0xAA, 0xAA, 0xAA,
400 		0x55, 0x55, 0x55, 0x55,
401 	};
402 
403 	deUint8*	start		= toFill;
404 	size_t		sizeToRnd	= size;
405 
406 	// Pattern part
407 	if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
408 	{
409 		// Rotated pattern
410 		for (size_t i = 0; i < sizeof(pattern); i++)
411 			start[sizeof(pattern) - i - 1] = pattern[i];
412 
413 		start		+= sizeof(pattern);
414 		sizeToRnd	-= sizeof(pattern);
415 
416 		// Direct pattern
417 		deMemcpy(start, pattern, sizeof(pattern));
418 
419 		start		+= sizeof(pattern);
420 		sizeToRnd	-= sizeof(pattern);
421 	}
422 
423 	// Random part
424 	{
425 		DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
426 
427 		deUint32*	start32		= reinterpret_cast<deUint32*>(start);
428 		size_t		sizeToRnd32	= sizeToRnd / sizeof(deUint32);
429 		deUint32	seed		= (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
430 		Random		rnd			(seed);
431 
432 		for (size_t i = 0; i < sizeToRnd32; i++)
433 			start32[i] = rnd.getUint32();
434 	}
435 
436 	{
437 		// Remove certain values that may not be preserved based on the uncompressed view format
438 		if (isSnormFormat(m_parameters.formatUncompressed))
439 		{
440 			for (size_t i = 0; i < size; i += 2)
441 			{
442 				// SNORM fix: due to write operation in SNORM format
443 				// replaces 0x00 0x80 to 0x01 0x80
444 				if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
445 					toFill[i+1] = 0x81;
446 			}
447 		}
448 		else if (isFloatFormat(m_parameters.formatUncompressed))
449 		{
450 			tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
451 
452 			if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
453 			{
454 				for (size_t i = 0; i < size; i += 2)
455 				{
456 					// HALF_FLOAT fix: remove INF and NaN
457 					if ((toFill[i+1] & 0x7C) == 0x7C)
458 						toFill[i+1] = 0x00;
459 				}
460 			}
461 			else if (textureFormat.type == tcu::TextureFormat::FLOAT)
462 			{
463 				for (size_t i = 0; i < size; i += 4)
464 				{
465 					// HALF_FLOAT fix: remove INF and NaN
466 					if ((toFill[i+1] & 0x7C) == 0x7C)
467 						toFill[i+1] = 0x00;
468 				}
469 
470 				for (size_t i = 0; i < size; i += 4)
471 				{
472 					// FLOAT fix: remove INF, NaN, and denorm
473 					// Little endian fix
474 					if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
475 						toFill[i+3] = 0x01;
476 					// Big endian fix
477 					if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
478 						toFill[i+0] = 0x01;
479 				}
480 			}
481 		}
482 	}
483 }
484 
485 class BasicComputeTestInstance : public BasicTranscodingTestInstance
486 {
487 public:
488 					BasicComputeTestInstance	(Context&							context,
489 												const TestParameters&				parameters);
490 	TestStatus		iterate						(void);
491 protected:
492 	struct ImageData
493 	{
getImagesCountvkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData494 		deUint32			getImagesCount		(void)									{ return static_cast<deUint32>(images.size());		}
getImageViewCountvkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData495 		deUint32			getImageViewCount	(void)									{ return static_cast<deUint32>(imagesViews.size());	}
getImageInfoCountvkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData496 		deUint32			getImageInfoCount	(void)									{ return static_cast<deUint32>(imagesInfos.size());	}
getImagevkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData497 		VkImage				getImage			(const deUint32				ndx)		{ return **images[ndx]->get();						}
getImageViewvkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData498 		VkImageView			getImageView		(const deUint32				ndx)		{ return **imagesViews[ndx];						}
getImageInfovkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData499 		VkImageCreateInfo	getImageInfo		(const deUint32				ndx)		{ return imagesInfos[ndx];							}
addImagevkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData500 		void				addImage			(MovePtr<Image>				image)		{ images.push_back(makeVkSharedPtr(image));			}
addImageViewvkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData501 		void				addImageView		(Move<VkImageView>			imageView)	{ imagesViews.push_back(makeVkSharedPtr(imageView));}
addImageInfovkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData502 		void				addImageInfo		(const VkImageCreateInfo	imageInfo)	{ imagesInfos.push_back(imageInfo);					}
resetViewsvkt::image::__anona81cfbd30111::BasicComputeTestInstance::ImageData503 		void				resetViews			()										{ imagesViews.clear();								}
504 	private:
505 		vector<ImageSp>				images;
506 		vector<ImageViewSp>			imagesViews;
507 		vector<VkImageCreateInfo>	imagesInfos;
508 	};
509 	void			copyDataToImage				(const VkCommandPool&				cmdPool,
510 												 const VkCommandBuffer&				cmdBuffer,
511 												 ImageData&							imageData,
512 												 const vector<UVec3>&				mipMapSizes,
513 												 const bool							isCompressed);
514 	virtual void	executeShader				(const VkCommandPool&				cmdPool,
515 												 const VkCommandBuffer&				cmdBuffer,
516 												 const VkDescriptorSetLayout&		descriptorSetLayout,
517 												 const VkDescriptorPool&			descriptorPool,
518 												vector<ImageData>&					imageData);
519 	bool			copyResultAndCompare		(const VkCommandPool&				cmdPool,
520 												 const VkCommandBuffer&				cmdBuffer,
521 												 const VkImage&						uncompressed,
522 												 const VkDeviceSize					offset,
523 												 const UVec3&						size);
524 	void			descriptorSetUpdate			(VkDescriptorSet					descriptorSet,
525 												 const VkDescriptorImageInfo*		descriptorImageInfos);
526 	void			createImageInfos			(ImageData&							imageData,
527 												 const vector<UVec3>&				mipMapSizes,
528 												 const bool							isCompressed);
529 	bool			decompressImage				(const VkCommandPool&				cmdPool,
530 												 const VkCommandBuffer&				cmdBuffer,
531 												 vector<ImageData>&					imageData,
532 												 const vector<UVec3>&				mipMapSizes);
533 	vector<deUint8>	m_data;
534 };
535 
536 
BasicComputeTestInstance(Context & context,const TestParameters & parameters)537 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
538 	:BasicTranscodingTestInstance	(context, parameters)
539 {
540 }
541 
iterate(void)542 TestStatus BasicComputeTestInstance::iterate (void)
543 {
544 	const DeviceInterface&					vk					= m_context.getDeviceInterface();
545 	const VkDevice							device				= m_context.getDevice();
546 	const deUint32							queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
547 	Allocator&								allocator			= m_context.getDefaultAllocator();
548 	const Unique<VkCommandPool>				cmdPool				(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
549 	const Unique<VkCommandBuffer>			cmdBuffer			(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
550 	const UVec3								fullSize			(m_parameters.size.x(), m_parameters.imageType == IMAGE_TYPE_1D ? 1 : m_parameters.size.y(), 1);
551 	const vector<UVec3>						mipMapSizes			= m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, fullSize);
552 	vector<ImageData>						imageData			(m_parameters.imagesCount);
553 	const deUint32							compressedNdx		= 0u;
554 	const deUint32							resultImageNdx		= m_parameters.imagesCount -1u;
555 
556 	for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
557 	{
558 		const bool isCompressed = compressedNdx == imageNdx ? true : false;
559 		createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
560 		for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
561 		{
562 			imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
563 			if (isCompressed)
564 			{
565 				const VkImageViewUsageCreateInfo	imageViewUsageKHR	=
566 				{
567 					VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,					//VkStructureType		sType;
568 					DE_NULL,														//const void*			pNext;
569 					m_parameters.compressedImageUsage,								//VkImageUsageFlags		usage;
570 				};
571 				for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
572 				for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
573 				{
574 					imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
575 														mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
576 														makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
577 														&imageViewUsageKHR));
578 				}
579 			}
580 			else
581 			{
582 				imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
583 													mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
584 													makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
585 			}
586 		}
587 	}
588 
589 	{
590 		size_t size = 0ull;
591 		for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
592 		{
593 			size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
594 		}
595 		m_data.resize(size);
596 		generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
597 	}
598 
599 	switch(m_parameters.operation)
600 	{
601 		case OPERATION_IMAGE_LOAD:
602 		case OPERATION_TEXEL_FETCH:
603 		case OPERATION_TEXTURE:
604 			copyDataToImage(*cmdPool, *cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
605 			break;
606 		case OPERATION_IMAGE_STORE:
607 			copyDataToImage(*cmdPool, *cmdBuffer, imageData[1], mipMapSizes, false);
608 			break;
609 		default:
610 			DE_ASSERT(false);
611 			break;
612 	}
613 
614 	bool pass = true;
615 	std::string failString;
616 	{
617 		Move<VkDescriptorSetLayout>	descriptorSetLayout;
618 		Move<VkDescriptorPool>		descriptorPool;
619 
620 		DescriptorSetLayoutBuilder	descriptorSetLayoutBuilder;
621 		DescriptorPoolBuilder		descriptorPoolBuilder;
622 		for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
623 		{
624 			switch(m_parameters.operation)
625 			{
626 				case OPERATION_IMAGE_LOAD:
627 				case OPERATION_IMAGE_STORE:
628 					descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
629 					descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
630 					break;
631 				case OPERATION_TEXEL_FETCH:
632 				case OPERATION_TEXTURE:
633 					descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
634 					descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
635 					break;
636 				default:
637 					DE_ASSERT(false);
638 					break;
639 			}
640 		}
641 		descriptorSetLayout	= descriptorSetLayoutBuilder.build(vk, device);
642 		descriptorPool		= descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
643 		executeShader(*cmdPool, *cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
644 
645 		{
646 			VkDeviceSize offset = 0ull;
647 			for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
648 			for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
649 			{
650 				const deUint32	imageNdx	= layerNdx + mipNdx * getLayerCount();
651 				const UVec3		size		= UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
652 													imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
653 													imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
654 				if (!copyResultAndCompare(*cmdPool, *cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
655 				{
656 					pass = false;
657 					failString = std::string("Uncompressed output mismatch at offset ") + de::toString(offset) + " even before executing decompression";
658 				}
659 				offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
660 			}
661 		}
662 	}
663 	if (!decompressImage(*cmdPool, *cmdBuffer, imageData, mipMapSizes))
664 	{
665 		pass = false;
666 		failString = "Decompression failed";
667 	}
668 
669 	if (!pass)
670 		return TestStatus::fail(failString);
671 
672 	if (m_bASTCErrorColourMismatch)
673 	{
674 		DE_ASSERT(m_parameters.formatIsASTC);
675 		return TestStatusASTCQualityWarning();
676 	}
677 
678 	return TestStatus::pass("Pass");
679 }
680 
copyDataToImage(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)681 void BasicComputeTestInstance::copyDataToImage (const VkCommandPool&	cmdPool,
682 												const VkCommandBuffer&	cmdBuffer,
683 												ImageData&				imageData,
684 												const vector<UVec3>&	mipMapSizes,
685 												const bool				isCompressed)
686 {
687 	const DeviceInterface&		vk			= m_context.getDeviceInterface();
688 	const VkDevice				device		= m_context.getDevice();
689 	const VkQueue				queue		= m_context.getUniversalQueue();
690 	Allocator&					allocator	= m_context.getDefaultAllocator();
691 
692 	BufferWithMemory			imageBuffer	(vk, device, allocator,
693 												makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
694 												MemoryRequirement::HostVisible);
695 	VkDeviceSize				offset		= 0ull;
696 	{
697 		const Allocation& alloc = imageBuffer.getAllocation();
698 		deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
699 		flushAlloc(vk, device, alloc);
700 	}
701 
702 	beginCommandBuffer(vk, cmdBuffer);
703 	const VkImageSubresourceRange	subresourceRange		=
704 	{
705 		VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
706 		0u,											//deUint32				baseMipLevel
707 		imageData.getImageInfo(0u).mipLevels,		//deUint32				levelCount
708 		0u,											//deUint32				baseArrayLayer
709 		imageData.getImageInfo(0u).arrayLayers		//deUint32				layerCount
710 	};
711 
712 	for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
713 	{
714 		const VkImageMemoryBarrier		preCopyImageBarrier		= makeImageMemoryBarrier(
715 																	0u, VK_ACCESS_TRANSFER_WRITE_BIT,
716 																	VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
717 																	imageData.getImage(imageNdx), subresourceRange);
718 
719 		const VkBufferMemoryBarrier		FlushHostCopyBarrier	= makeBufferMemoryBarrier(
720 																	VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
721 																	imageBuffer.get(), 0ull, m_data.size());
722 
723 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
724 				(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
725 
726 		for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
727 		{
728 			const VkExtent3D				imageExtent				= isCompressed ?
729 																		makeExtent3D(mipMapSizes[mipNdx]) :
730 																		imageData.getImageInfo(imageNdx).extent;
731 			const VkBufferImageCopy			copyRegion				=
732 			{
733 				offset,																												//VkDeviceSize				bufferOffset;
734 				0u,																													//deUint32					bufferRowLength;
735 				0u,																													//deUint32					bufferImageHeight;
736 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers),	//VkImageSubresourceLayers	imageSubresource;
737 				makeOffset3D(0, 0, 0),																								//VkOffset3D				imageOffset;
738 				imageExtent,																										//VkExtent3D				imageExtent;
739 			};
740 
741 			vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
742 			offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
743 						UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
744 						imageData.getImageInfo(imageNdx).arrayLayers;
745 		}
746 	}
747 	endCommandBuffer(vk, cmdBuffer);
748 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
749 	m_context.resetCommandPoolForVKSC(device, cmdPool);
750 }
751 
executeShader(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)752 void BasicComputeTestInstance::executeShader (const VkCommandPool&			cmdPool,
753 											  const VkCommandBuffer&		cmdBuffer,
754 											  const VkDescriptorSetLayout&	descriptorSetLayout,
755 											  const VkDescriptorPool&		descriptorPool,
756 											  vector<ImageData>&			imageData)
757 {
758 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
759 	const VkDevice					device					= m_context.getDevice();
760 	const VkQueue					queue					= m_context.getUniversalQueue();
761 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
762 	vector<SharedVkDescriptorSet>	descriptorSets			(imageData[0].getImageViewCount());
763 	const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, descriptorSetLayout));
764 	const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
765 	Move<VkSampler>					sampler;
766 	{
767 		const VkSamplerCreateInfo createInfo =
768 		{
769 			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,		//VkStructureType		sType;
770 			DE_NULL,									//const void*			pNext;
771 			0u,											//VkSamplerCreateFlags	flags;
772 			VK_FILTER_NEAREST,							//VkFilter				magFilter;
773 			VK_FILTER_NEAREST,							//VkFilter				minFilter;
774 			VK_SAMPLER_MIPMAP_MODE_NEAREST,				//VkSamplerMipmapMode	mipmapMode;
775 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeU;
776 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeV;
777 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeW;
778 			0.0f,										//float					mipLodBias;
779 			VK_FALSE,									//VkBool32				anisotropyEnable;
780 			1.0f,										//float					maxAnisotropy;
781 			VK_FALSE,									//VkBool32				compareEnable;
782 			VK_COMPARE_OP_EQUAL,						//VkCompareOp			compareOp;
783 			0.0f,										//float					minLod;
784 			0.0f,										//float					maxLod;
785 			VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,	//VkBorderColor			borderColor;
786 			VK_FALSE,									//VkBool32				unnormalizedCoordinates;
787 		};
788 		sampler = createSampler(vk, device, &createInfo);
789 	}
790 
791 	vector<VkDescriptorImageInfo>	descriptorImageInfos	(descriptorSets.size() * m_parameters.imagesCount);
792 	for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
793 	{
794 		const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
795 		for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
796 		{
797 			descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
798 															imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
799 		}
800 	}
801 
802 	for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
803 		descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
804 
805 	beginCommandBuffer(vk, cmdBuffer);
806 	{
807 		const VkImageSubresourceRange	compressedRange				=
808 		{
809 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
810 			0u,											//deUint32				baseMipLevel
811 			imageData[0].getImageInfo(0u).mipLevels,	//deUint32				levelCount
812 			0u,											//deUint32				baseArrayLayer
813 			imageData[0].getImageInfo(0u).arrayLayers	//deUint32				layerCount
814 		};
815 		const VkImageSubresourceRange	uncompressedRange			=
816 		{
817 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
818 			0u,											//deUint32				baseMipLevel
819 			1u,											//deUint32				levelCount
820 			0u,											//deUint32				baseArrayLayer
821 			1u											//deUint32				layerCount
822 		};
823 
824 		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
825 
826 		vector<VkImageMemoryBarrier>		preShaderImageBarriers;
827 		preShaderImageBarriers.resize(descriptorSets.size() + 1u);
828 		for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
829 		{
830 			preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
831 												VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
832 												VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
833 												imageData[1].getImage(imageNdx), uncompressedRange);
834 		}
835 
836 		preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
837 															VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
838 															VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
839 															imageData[0].getImage(0), compressedRange);
840 
841 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
842 			(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
843 			static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
844 
845 		for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
846 		{
847 			descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
848 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
849 			vk.cmdDispatch(cmdBuffer,	imageData[1].getImageInfo(ndx).extent.width,
850 										imageData[1].getImageInfo(ndx).extent.height,
851 										imageData[1].getImageInfo(ndx).extent.depth);
852 		}
853 	}
854 	endCommandBuffer(vk, cmdBuffer);
855 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
856 	m_context.resetCommandPoolForVKSC(device, cmdPool);
857 }
858 
copyResultAndCompare(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,const VkImage & uncompressed,const VkDeviceSize offset,const UVec3 & size)859 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandPool&	cmdPool,
860 													 const VkCommandBuffer&	cmdBuffer,
861 													 const VkImage&			uncompressed,
862 													 const VkDeviceSize		offset,
863 													 const UVec3&			size)
864 {
865 	const DeviceInterface&	vk					= m_context.getDeviceInterface();
866 	const VkQueue			queue				= m_context.getUniversalQueue();
867 	const VkDevice			device				= m_context.getDevice();
868 	Allocator&				allocator			= m_context.getDefaultAllocator();
869 
870 	VkDeviceSize			imageResultSize		= getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
871 	BufferWithMemory		imageBufferResult	(vk, device, allocator,
872 													makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
873 													MemoryRequirement::HostVisible);
874 
875 	beginCommandBuffer(vk, cmdBuffer);
876 	{
877 		const VkImageSubresourceRange	subresourceRange	=
878 		{
879 			VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags	aspectMask
880 			0u,																	//deUint32				baseMipLevel
881 			1u,																	//deUint32				levelCount
882 			0u,																	//deUint32				baseArrayLayer
883 			1u																	//deUint32				layerCount
884 		};
885 
886 		const VkBufferImageCopy			copyRegion			=
887 		{
888 			0ull,																//	VkDeviceSize				bufferOffset;
889 			0u,																	//	deUint32					bufferRowLength;
890 			0u,																	//	deUint32					bufferImageHeight;
891 			makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
892 			makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
893 			makeExtent3D(size),													//	VkExtent3D					imageExtent;
894 		};
895 
896 		const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
897 																VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
898 																VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
899 																uncompressed, subresourceRange);
900 
901 		const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
902 													VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
903 													imageBufferResult.get(), 0ull, imageResultSize);
904 
905 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
906 		vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, &copyRegion);
907 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
908 	}
909 	endCommandBuffer(vk, cmdBuffer);
910 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
911 	m_context.resetCommandPoolForVKSC(device, cmdPool);
912 
913 	const Allocation& allocResult = imageBufferResult.getAllocation();
914 	invalidateAlloc(vk, device, allocResult);
915 	if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
916 		return true;
917 	return false;
918 }
919 
descriptorSetUpdate(VkDescriptorSet descriptorSet,const VkDescriptorImageInfo * descriptorImageInfos)920 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
921 {
922 	const DeviceInterface&		vk		= m_context.getDeviceInterface();
923 	const VkDevice				device	= m_context.getDevice();
924 	DescriptorSetUpdateBuilder	descriptorSetUpdateBuilder;
925 
926 	switch(m_parameters.operation)
927 	{
928 		case OPERATION_IMAGE_LOAD:
929 		case OPERATION_IMAGE_STORE:
930 		{
931 			for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
932 				descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
933 
934 			break;
935 		}
936 
937 		case OPERATION_TEXEL_FETCH:
938 		case OPERATION_TEXTURE:
939 		{
940 			for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
941 			{
942 				descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
943 					bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
944 			}
945 
946 			break;
947 		}
948 
949 		default:
950 			DE_ASSERT(false);
951 	}
952 	descriptorSetUpdateBuilder.update(vk, device);
953 }
954 
createImageInfos(ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)955 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
956 {
957 	const VkImageType		imageType			= mapImageType(m_parameters.imageType);
958 
959 	if (isCompressed)
960 	{
961 		const VkExtent3D	extentCompressed	= makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
962 		const VkImageCreateInfo compressedInfo =
963 		{
964 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,					// VkStructureType			sType;
965 			DE_NULL,												// const void*				pNext;
966 			VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
967 			VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT |
968 			VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,						// VkImageCreateFlags		flags;
969 			imageType,												// VkImageType				imageType;
970 			m_parameters.formatCompressed,							// VkFormat					format;
971 			extentCompressed,										// VkExtent3D				extent;
972 			static_cast<deUint32>(mipMapSizes.size()),				// deUint32					mipLevels;
973 			getLayerCount(),										// deUint32					arrayLayers;
974 			VK_SAMPLE_COUNT_1_BIT,									// VkSampleCountFlagBits	samples;
975 			VK_IMAGE_TILING_OPTIMAL,								// VkImageTiling			tiling;
976 			VK_IMAGE_USAGE_SAMPLED_BIT |
977 			VK_IMAGE_USAGE_STORAGE_BIT |
978 			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
979 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,						// VkImageUsageFlags		usage;
980 			VK_SHARING_MODE_EXCLUSIVE,								// VkSharingMode			sharingMode;
981 			0u,														// deUint32					queueFamilyIndexCount;
982 			DE_NULL,												// const deUint32*			pQueueFamilyIndices;
983 			VK_IMAGE_LAYOUT_UNDEFINED,								// VkImageLayout			initialLayout;
984 		};
985 
986 		VkImageFormatProperties imageFormatProperties;
987 		if (m_context.getInstanceInterface().getPhysicalDeviceImageFormatProperties(m_context.getPhysicalDevice(), compressedInfo.format, compressedInfo.imageType, compressedInfo.tiling, compressedInfo.usage, compressedInfo.flags, &imageFormatProperties) != VK_SUCCESS)
988 			TCU_THROW(NotSupportedError, "Image parameters not supported");
989 
990 		imageData.addImageInfo(compressedInfo);
991 	}
992 	else
993 	{
994 		UVec3 size = m_parameters.size;
995 		if (m_parameters.imageType == IMAGE_TYPE_1D) {
996 			size.y() = 1;
997 		}
998 		size.z() = 1;
999 		const VkExtent3D originalResolutionInBlocks = makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, size));
1000 
1001 		for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
1002 		for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
1003 		{
1004 			const VkExtent3D		extentUncompressed	= m_parameters.useMipmaps ?
1005 															makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
1006 															originalResolutionInBlocks;
1007 			const VkImageCreateInfo	uncompressedInfo	=
1008 			{
1009 				VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,				// VkStructureType			sType;
1010 				DE_NULL,											// const void*				pNext;
1011 				0u,													// VkImageCreateFlags		flags;
1012 				imageType,											// VkImageType				imageType;
1013 				m_parameters.formatUncompressed,					// VkFormat					format;
1014 				extentUncompressed,									// VkExtent3D				extent;
1015 				1u,													// deUint32					mipLevels;
1016 				1u,													// deUint32					arrayLayers;
1017 				VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits	samples;
1018 				VK_IMAGE_TILING_OPTIMAL,							// VkImageTiling			tiling;
1019 				m_parameters.uncompressedImageUsage |
1020 				VK_IMAGE_USAGE_SAMPLED_BIT,							// VkImageUsageFlags		usage;
1021 				VK_SHARING_MODE_EXCLUSIVE,							// VkSharingMode			sharingMode;
1022 				0u,													// deUint32					queueFamilyIndexCount;
1023 				DE_NULL,											// const deUint32*			pQueueFamilyIndices;
1024 				VK_IMAGE_LAYOUT_UNDEFINED,							// VkImageLayout			initialLayout;
1025 			};
1026 			imageData.addImageInfo(uncompressedInfo);
1027 		}
1028 	}
1029 }
1030 
decompressImage(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,vector<ImageData> & imageData,const vector<UVec3> & mipMapSizes)1031 bool BasicComputeTestInstance::decompressImage (const VkCommandPool&	cmdPool,
1032 												 const VkCommandBuffer&	cmdBuffer,
1033 												 vector<ImageData>&		imageData,
1034 												 const vector<UVec3>&	mipMapSizes)
1035 {
1036 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
1037 	const VkDevice					device					= m_context.getDevice();
1038 	const VkQueue					queue					= m_context.getUniversalQueue();
1039 	Allocator&						allocator				= m_context.getDefaultAllocator();
1040 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
1041 	const VkImage&					compressed				= imageData[0].getImage(0);
1042 	const VkImageType				imageType				= mapImageType(m_parameters.imageType);
1043 
1044 	for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
1045 		imageData[ndx].resetViews();
1046 
1047 	for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
1048 	for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
1049 	{
1050 		const bool						layoutShaderReadOnly	= (layerNdx % 2u) == 1;
1051 		const deUint32					imageNdx				= layerNdx + mipNdx * getLayerCount();
1052 		const VkExtent3D				extentCompressed		= makeExtent3D(mipMapSizes[mipNdx]);
1053 		const VkImage&					uncompressed			= imageData[m_parameters.imagesCount -1].getImage(imageNdx);
1054 		const VkExtent3D				extentUncompressed		= imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
1055 		const VkDeviceSize				bufferSizeComp			= getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
1056 
1057 		VkFormatProperties properties;
1058 		m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), m_parameters.formatForVerify, &properties);
1059 		if (!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1060 			TCU_THROW(NotSupportedError, "Format storage feature not supported");
1061 
1062 		const VkImageCreateInfo			decompressedImageInfo	=
1063 		{
1064 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,								// VkStructureType			sType;
1065 			DE_NULL,															// const void*				pNext;
1066 			0u,																	// VkImageCreateFlags		flags;
1067 			imageType,															// VkImageType				imageType;
1068 			m_parameters.formatForVerify,										// VkFormat					format;
1069 			extentCompressed,													// VkExtent3D				extent;
1070 			1u,																	// deUint32					mipLevels;
1071 			1u,																	// deUint32					arrayLayers;
1072 			VK_SAMPLE_COUNT_1_BIT,												// VkSampleCountFlagBits	samples;
1073 			VK_IMAGE_TILING_OPTIMAL,											// VkImageTiling			tiling;
1074 			VK_IMAGE_USAGE_SAMPLED_BIT |
1075 			VK_IMAGE_USAGE_STORAGE_BIT |
1076 			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
1077 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,									// VkImageUsageFlags		usage;
1078 			VK_SHARING_MODE_EXCLUSIVE,											// VkSharingMode			sharingMode;
1079 			0u,																	// deUint32					queueFamilyIndexCount;
1080 			DE_NULL,															// const deUint32*			pQueueFamilyIndices;
1081 			VK_IMAGE_LAYOUT_UNDEFINED,											// VkImageLayout			initialLayout;
1082 		};
1083 
1084 		const VkImageCreateInfo			compressedImageInfo		=
1085 		{
1086 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,								// VkStructureType			sType;
1087 			DE_NULL,															// const void*				pNext;
1088 			0u,																	// VkImageCreateFlags		flags;
1089 			imageType,															// VkImageType				imageType;
1090 			m_parameters.formatCompressed,										// VkFormat					format;
1091 			extentCompressed,													// VkExtent3D				extent;
1092 			1u,																	// deUint32					mipLevels;
1093 			1u,																	// deUint32					arrayLayers;
1094 			VK_SAMPLE_COUNT_1_BIT,												// VkSampleCountFlagBits	samples;
1095 			VK_IMAGE_TILING_OPTIMAL,											// VkImageTiling			tiling;
1096 			VK_IMAGE_USAGE_SAMPLED_BIT |
1097 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,									// VkImageUsageFlags		usage;
1098 			VK_SHARING_MODE_EXCLUSIVE,											// VkSharingMode			sharingMode;
1099 			0u,																	// deUint32					queueFamilyIndexCount;
1100 			DE_NULL,															// const deUint32*			pQueueFamilyIndices;
1101 			VK_IMAGE_LAYOUT_UNDEFINED,											// VkImageLayout			initialLayout;
1102 		};
1103 		const VkImageUsageFlags				compressedViewUsageFlags	= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1104 		const VkImageViewUsageCreateInfo	compressedViewUsageCI		=
1105 		{
1106 			VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,						//VkStructureType		sType;
1107 			DE_NULL,															//const void*			pNext;
1108 			compressedViewUsageFlags,											//VkImageUsageFlags		usage;
1109 		};
1110 		const VkImageViewType			imageViewType			(mapImageViewType(m_parameters.imageType));
1111 		Image							resultImage				(vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1112 		Image							referenceImage			(vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1113 		Image							uncompressedImage		(vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
1114 		Move<VkImageView>				resultView				= makeImageView(vk, device, resultImage.get(), imageViewType, decompressedImageInfo.format,
1115 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1116 		Move<VkImageView>				referenceView			= makeImageView(vk, device, referenceImage.get(), imageViewType, decompressedImageInfo.format,
1117 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1118 		Move<VkImageView>				uncompressedView		= makeImageView(vk, device, uncompressedImage.get(), imageViewType, m_parameters.formatCompressed,
1119 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
1120 		bool const						useMultiLayer			= m_parameters.imageType == IMAGE_TYPE_2D && m_parameters.layers > 1u;
1121 		Move<VkImageView>				compressedView			= (useMultiLayer) ?
1122 																	makeImageView(vk, device, compressed, VK_IMAGE_VIEW_TYPE_2D_ARRAY, m_parameters.formatCompressed,
1123 																		makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, static_cast<uint32_t>(mipMapSizes.size()), 0u, m_parameters.layers), &compressedViewUsageCI) :
1124 																	makeImageView(vk, device, compressed, imageViewType, m_parameters.formatCompressed,
1125 																		makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
1126 		Move<VkDescriptorSetLayout>		descriptorSetLayout		= DescriptorSetLayoutBuilder()
1127 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1128 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1129 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1130 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1131 																	.build(vk, device);
1132 		Move<VkDescriptorPool>			descriptorPool			= DescriptorPoolBuilder()
1133 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1134 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1135 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1136 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1137 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
1138 
1139 		Move<VkDescriptorSet>			descriptorSet			= makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
1140 		const struct PushData { uint32_t layer; uint32_t level; }	pushData
1141 																= { layerNdx, mipNdx };
1142 		const VkPushConstantRange		pushConstantRange		= { VK_SHADER_STAGE_COMPUTE_BIT, 0u, static_cast<uint32_t>(sizeof pushData) };
1143 		const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, 1u, &descriptorSetLayout.get(), 1u, &pushConstantRange));
1144 		const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1145 		const VkDeviceSize				bufferSize				= getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), m_parameters.formatForVerify);
1146 		BufferWithMemory				resultBuffer			(vk, device, allocator,
1147 																	makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1148 		BufferWithMemory				referenceBuffer			(vk, device, allocator,
1149 																	makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1150 		BufferWithMemory				transferBuffer			(vk, device, allocator,
1151 																	makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1152 		Move<VkSampler>					sampler;
1153 		{
1154 			const VkSamplerCreateInfo createInfo	=
1155 			{
1156 				VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,							//VkStructureType		sType;
1157 				DE_NULL,														//const void*			pNext;
1158 				0u,																//VkSamplerCreateFlags	flags;
1159 				VK_FILTER_NEAREST,												//VkFilter				magFilter;
1160 				VK_FILTER_NEAREST,												//VkFilter				minFilter;
1161 				VK_SAMPLER_MIPMAP_MODE_NEAREST,									//VkSamplerMipmapMode	mipmapMode;
1162 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeU;
1163 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeV;
1164 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeW;
1165 				0.0f,															//float					mipLodBias;
1166 				VK_FALSE,														//VkBool32				anisotropyEnable;
1167 				1.0f,															//float					maxAnisotropy;
1168 				VK_FALSE,														//VkBool32				compareEnable;
1169 				VK_COMPARE_OP_EQUAL,											//VkCompareOp			compareOp;
1170 				0.0f,															//float					minLod;
1171 				(float)mipMapSizes.size(),										//float					maxLod;
1172 				VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,						//VkBorderColor			borderColor;
1173 				VK_FALSE,														//VkBool32				unnormalizedCoordinates;
1174 			};
1175 			sampler = createSampler(vk, device, &createInfo);
1176 		}
1177 
1178 		VkDescriptorImageInfo			descriptorImageInfos[]	=
1179 		{
1180 			makeDescriptorImageInfo(*sampler,	*uncompressedView,	layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1181 			makeDescriptorImageInfo(*sampler,	*compressedView,	layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1182 			makeDescriptorImageInfo(DE_NULL,	*resultView,		VK_IMAGE_LAYOUT_GENERAL),
1183 			makeDescriptorImageInfo(DE_NULL,	*referenceView,		VK_IMAGE_LAYOUT_GENERAL)
1184 		};
1185 		DescriptorSetUpdateBuilder()
1186 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1187 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1188 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1189 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1190 			.update(vk, device);
1191 
1192 
1193 		beginCommandBuffer(vk, cmdBuffer);
1194 		{
1195 			const VkImageSubresourceRange	subresourceRange		=
1196 			{
1197 				VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags			aspectMask
1198 				0u,																	//deUint32						baseMipLevel
1199 				1u,																	//deUint32						levelCount
1200 				0u,																	//deUint32						baseArrayLayer
1201 				1u																	//deUint32						layerCount
1202 			};
1203 
1204 			const VkImageSubresourceRange	subresourceRangeComp	=
1205 			{
1206 				VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags			aspectMask
1207 				(useMultiLayer) ? 0u : mipNdx,										//deUint32						baseMipLevel
1208 				(useMultiLayer) ? static_cast<uint32_t>(mipMapSizes.size()) : 1u,	//deUint32						levelCount
1209 				(useMultiLayer) ? 0u : layerNdx,									//deUint32						baseArrayLayer
1210 				(useMultiLayer) ? m_parameters.layers : 1u							//deUint32						layerCount
1211 			};
1212 
1213 			const VkBufferImageCopy			copyRegion				=
1214 			{
1215 				0ull,																//	VkDeviceSize				bufferOffset;
1216 				0u,																	//	deUint32					bufferRowLength;
1217 				0u,																	//	deUint32					bufferImageHeight;
1218 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
1219 				makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
1220 				decompressedImageInfo.extent,										//	VkExtent3D					imageExtent;
1221 			};
1222 
1223 			const VkBufferImageCopy			compressedCopyRegion	=
1224 			{
1225 				0ull,																//	VkDeviceSize				bufferOffset;
1226 				0u,																	//	deUint32					bufferRowLength;
1227 				0u,																	//	deUint32					bufferImageHeight;
1228 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
1229 				makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
1230 				extentUncompressed,													//	VkExtent3D					imageExtent;
1231 			};
1232 
1233 			{
1234 
1235 				const VkBufferMemoryBarrier		preCopyBufferBarriers	= makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1236 																			transferBuffer.get(), 0ull, bufferSizeComp);
1237 
1238 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1239 					(VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1240 			}
1241 
1242 			vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1243 
1244 			{
1245 				const VkBufferMemoryBarrier		postCopyBufferBarriers	= makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1246 																			transferBuffer.get(), 0ull, bufferSizeComp);
1247 
1248 				const VkImageMemoryBarrier		preCopyImageBarriers	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1249 																			VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1250 
1251 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1252 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1253 			}
1254 
1255 			vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
1256 
1257 			vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1258 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1259 
1260 			{
1261 				const VkImageMemoryBarrier		preShaderImageBarriers[]	=
1262 				{
1263 
1264 					makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1265 						VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1266 						uncompressedImage.get(), subresourceRange),
1267 
1268 					makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT,
1269 						(useMultiLayer && !layoutShaderReadOnly && layerNdx) ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1270 						compressed, subresourceRangeComp),
1271 
1272 					makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1273 						VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1274 						resultImage.get(), subresourceRange),
1275 
1276 					makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1277 						VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1278 						referenceImage.get(), subresourceRange)
1279 				};
1280 
1281 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1282 					(VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1283 					DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1284 			}
1285 
1286 			vk.cmdPushConstants(cmdBuffer, *pipelineLayout, VK_SHADER_STAGE_COMPUTE_BIT, 0u, sizeof pushData, &pushData);
1287 			vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1288 
1289 			{
1290 				const VkImageMemoryBarrier		postShaderImageBarriers[]	=
1291 				{
1292 					makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1293 					VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1294 					resultImage.get(), subresourceRange),
1295 
1296 					makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1297 						VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1298 						referenceImage.get(), subresourceRange)
1299 				};
1300 
1301 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1302 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1303 					DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1304 			}
1305 
1306 			vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, &copyRegion);
1307 			vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, &copyRegion);
1308 
1309 			{
1310 				const VkBufferMemoryBarrier		postCopyBufferBarrier[]		=
1311 				{
1312 					makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1313 						resultBuffer.get(), 0ull, bufferSize),
1314 
1315 					makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1316 						referenceBuffer.get(), 0ull, bufferSize),
1317 				};
1318 
1319 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT,
1320 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(postCopyBufferBarrier), postCopyBufferBarrier,
1321 					0u, (const VkImageMemoryBarrier*)DE_NULL);
1322 			}
1323 		}
1324 		endCommandBuffer(vk, cmdBuffer);
1325 		submitCommandsAndWait(vk, device, queue, cmdBuffer);
1326 		m_context.resetCommandPoolForVKSC(device, cmdPool);
1327 
1328 		const Allocation&		resultAlloc		= resultBuffer.getAllocation();
1329 		const Allocation&		referenceAlloc	= referenceBuffer.getAllocation();
1330 		invalidateAlloc(vk, device, resultAlloc);
1331 		invalidateAlloc(vk, device, referenceAlloc);
1332 
1333 		BinaryCompareMode compareMode =
1334 			(m_parameters.formatIsASTC)
1335 				?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
1336 				:(COMPARE_MODE_NORMAL);
1337 
1338 		BinaryCompareResult res = BinaryCompare(referenceAlloc.getHostPtr(),
1339 												resultAlloc.getHostPtr(),
1340 												(size_t)bufferSize,
1341 												m_parameters.formatForVerify,
1342 												compareMode);
1343 
1344 		if (res == COMPARE_RESULT_FAILED)
1345 		{
1346 			ConstPixelBufferAccess	resultPixels		(mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1347 			ConstPixelBufferAccess	referencePixels		(mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1348 
1349 			if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1350 				return false;
1351 		}
1352 		else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
1353 		{
1354 			m_bASTCErrorColourMismatch = true;
1355 		}
1356 	}
1357 
1358 	return true;
1359 }
1360 
1361 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1362 {
1363 public:
1364 					ImageStoreComputeTestInstance	(Context&							context,
1365 													 const TestParameters&				parameters);
1366 protected:
1367 	virtual void	executeShader					(const VkCommandPool&				cmdPool,
1368 													 const VkCommandBuffer&				cmdBuffer,
1369 													 const VkDescriptorSetLayout&		descriptorSetLayout,
1370 													 const VkDescriptorPool&			descriptorPool,
1371 													 vector<ImageData>&					imageData);
1372 private:
1373 };
1374 
ImageStoreComputeTestInstance(Context & context,const TestParameters & parameters)1375 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1376 	:BasicComputeTestInstance	(context, parameters)
1377 {
1378 }
1379 
executeShader(const VkCommandPool & cmdPool,const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)1380 void ImageStoreComputeTestInstance::executeShader (const VkCommandPool&			cmdPool,
1381 												   const VkCommandBuffer&		cmdBuffer,
1382 												   const VkDescriptorSetLayout&	descriptorSetLayout,
1383 												   const VkDescriptorPool&		descriptorPool,
1384 												   vector<ImageData>&			imageData)
1385 {
1386 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
1387 	const VkDevice					device					= m_context.getDevice();
1388 	const VkQueue					queue					= m_context.getUniversalQueue();
1389 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1390 	vector<SharedVkDescriptorSet>	descriptorSets			(imageData[0].getImageViewCount());
1391 	const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, descriptorSetLayout));
1392 	const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1393 	Move<VkSampler>					sampler;
1394 	{
1395 		const VkSamplerCreateInfo createInfo =
1396 		{
1397 			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,		//VkStructureType		sType;
1398 			DE_NULL,									//const void*			pNext;
1399 			0u,											//VkSamplerCreateFlags	flags;
1400 			VK_FILTER_NEAREST,							//VkFilter				magFilter;
1401 			VK_FILTER_NEAREST,							//VkFilter				minFilter;
1402 			VK_SAMPLER_MIPMAP_MODE_NEAREST,				//VkSamplerMipmapMode	mipmapMode;
1403 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeU;
1404 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeV;
1405 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeW;
1406 			0.0f,										//float					mipLodBias;
1407 			VK_FALSE,									//VkBool32				anisotropyEnable;
1408 			1.0f,										//float					maxAnisotropy;
1409 			VK_FALSE,									//VkBool32				compareEnable;
1410 			VK_COMPARE_OP_EQUAL,						//VkCompareOp			compareOp;
1411 			0.0f,										//float					minLod;
1412 			0.0f,										//float					maxLod;
1413 			VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,	//VkBorderColor			borderColor;
1414 			VK_TRUE,									//VkBool32				unnormalizedCoordinates;
1415 		};
1416 		sampler = createSampler(vk, device, &createInfo);
1417 	}
1418 
1419 	vector<VkDescriptorImageInfo>	descriptorImageInfos	(descriptorSets.size() * m_parameters.imagesCount);
1420 	for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1421 	{
1422 		const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1423 		for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1424 		{
1425 			descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1426 															imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1427 		}
1428 	}
1429 
1430 	for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1431 		descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1432 
1433 	beginCommandBuffer(vk, cmdBuffer);
1434 	{
1435 		const VkImageSubresourceRange	compressedRange				=
1436 		{
1437 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
1438 			0u,											//deUint32				baseMipLevel
1439 			imageData[0].getImageInfo(0).mipLevels,		//deUint32				levelCount
1440 			0u,											//deUint32				baseArrayLayer
1441 			imageData[0].getImageInfo(0).arrayLayers	//deUint32				layerCount
1442 		};
1443 
1444 		const VkImageSubresourceRange	uncompressedRange			=
1445 		{
1446 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
1447 			0u,											//deUint32				baseMipLevel
1448 			1u,											//deUint32				levelCount
1449 			0u,											//deUint32				baseArrayLayer
1450 			1u											//deUint32				layerCount
1451 		};
1452 
1453 		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1454 
1455 		vector<VkImageMemoryBarrier>		preShaderImageBarriers	(descriptorSets.size() * 2u + 1u);
1456 		for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1457 		{
1458 			preShaderImageBarriers[imageNdx]									= makeImageMemoryBarrier(
1459 																					VK_ACCESS_TRANSFER_WRITE_BIT, (VK_ACCESS_SHADER_READ_BIT|VK_ACCESS_SHADER_WRITE_BIT),
1460 																					VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1461 																					imageData[1].getImage(imageNdx), uncompressedRange);
1462 
1463 			preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()]	= makeImageMemoryBarrier(
1464 																					VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1465 																					VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1466 																					imageData[2].getImage(imageNdx), uncompressedRange);
1467 		}
1468 
1469 		preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1470 																	VK_ACCESS_TRANSFER_WRITE_BIT, (VK_ACCESS_SHADER_READ_BIT|VK_ACCESS_SHADER_WRITE_BIT),
1471 																	VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1472 																	imageData[0].getImage(0u), compressedRange);
1473 
1474 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1475 			(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1476 			static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1477 
1478 		for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1479 		{
1480 			descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1481 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1482 			vk.cmdDispatch(cmdBuffer,	imageData[1].getImageInfo(ndx).extent.width,
1483 										imageData[1].getImageInfo(ndx).extent.height,
1484 										imageData[1].getImageInfo(ndx).extent.depth);
1485 		}
1486 	}
1487 	endCommandBuffer(vk, cmdBuffer);
1488 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
1489 	m_context.resetCommandPoolForVKSC(device, cmdPool);
1490 }
1491 
1492 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1493 {
1494 public:
1495 										GraphicsAttachmentsTestInstance	(Context& context, const TestParameters& parameters);
1496 	virtual TestStatus					iterate							(void);
1497 
1498 protected:
1499 	virtual bool						isWriteToCompressedOperation	();
1500 	VkImageCreateInfo					makeCreateImageInfo				(const VkFormat					format,
1501 																		 const ImageType				type,
1502 																		 const UVec3&					size,
1503 																		 const VkImageUsageFlags		usageFlags,
1504 																		 const VkImageCreateFlags*		createFlags,
1505 																		 const deUint32					levels,
1506 																		 const deUint32					layers);
1507 	VkDeviceSize						getCompressedImageData			(const VkFormat					format,
1508 																		 const UVec3&					size,
1509 																		 std::vector<deUint8>&			data,
1510 																		 const deUint32					layer,
1511 																		 const deUint32					level);
1512 	VkDeviceSize						getUncompressedImageData		(const VkFormat					format,
1513 																		 const UVec3&					size,
1514 																		 std::vector<deUint8>&			data,
1515 																		 const deUint32					layer,
1516 																		 const deUint32					level);
1517 	virtual void						prepareData						();
1518 	virtual void						prepareVertexBuffer				();
1519 	virtual void						transcodeRead					(const VkCommandPool&			cmdPool);
1520 	virtual void						transcodeWrite					(const VkCommandPool&			cmdPool);
1521 	bool								verifyDecompression				(const VkCommandPool&			cmdPool,
1522 																		 const std::vector<deUint8>&	refCompressedData,
1523 																		 const de::MovePtr<Image>&		resCompressedImage,
1524 																		 const deUint32					layer,
1525 																		 const deUint32					level,
1526 																		 const UVec3&					mipmapDims);
1527 
1528 	typedef std::vector<deUint8>		RawDataVector;
1529 	typedef SharedPtr<RawDataVector>	RawDataPtr;
1530 	typedef std::vector<RawDataPtr>		LevelData;
1531 	typedef std::vector<LevelData>		FullImageData;
1532 
1533 	FullImageData						m_srcData;
1534 	FullImageData						m_dstData;
1535 
1536 	typedef SharedPtr<Image>			ImagePtr;
1537 	typedef std::vector<ImagePtr>		LevelImages;
1538 	typedef std::vector<LevelImages>	ImagesArray;
1539 
1540 	ImagesArray							m_uncompressedImages;
1541 	MovePtr<Image>						m_compressedImage;
1542 
1543 	VkImageViewUsageCreateInfo			m_imageViewUsageKHR;
1544 	VkImageViewUsageCreateInfo*			m_srcImageViewUsageKHR;
1545 	VkImageViewUsageCreateInfo*			m_dstImageViewUsageKHR;
1546 	std::vector<tcu::UVec3>				m_compressedImageResVec;
1547 	std::vector<tcu::UVec3>				m_uncompressedImageResVec;
1548 	VkFormat							m_srcFormat;
1549 	VkFormat							m_dstFormat;
1550 	VkImageUsageFlags					m_srcImageUsageFlags;
1551 	VkImageUsageFlags					m_dstImageUsageFlags;
1552 	std::vector<tcu::UVec3>				m_srcImageResolutions;
1553 	std::vector<tcu::UVec3>				m_dstImageResolutions;
1554 
1555 	MovePtr<BufferWithMemory>			m_vertexBuffer;
1556 	deUint32							m_vertexCount;
1557 	VkDeviceSize						m_vertexBufferOffset;
1558 };
1559 
GraphicsAttachmentsTestInstance(Context & context,const TestParameters & parameters)1560 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1561 	: BasicTranscodingTestInstance(context, parameters)
1562 	, m_srcData()
1563 	, m_dstData()
1564 	, m_uncompressedImages()
1565 	, m_compressedImage()
1566 	, m_imageViewUsageKHR()
1567 	, m_srcImageViewUsageKHR()
1568 	, m_dstImageViewUsageKHR()
1569 	, m_compressedImageResVec()
1570 	, m_uncompressedImageResVec()
1571 	, m_srcFormat()
1572 	, m_dstFormat()
1573 	, m_srcImageUsageFlags()
1574 	, m_dstImageUsageFlags()
1575 	, m_srcImageResolutions()
1576 	, m_dstImageResolutions()
1577 	, m_vertexBuffer()
1578 	, m_vertexCount(0u)
1579 	, m_vertexBufferOffset(0ull)
1580 {
1581 }
1582 
iterate(void)1583 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1584 {
1585 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
1586 	const VkDevice						device					= m_context.getDevice();
1587 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
1588 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1589 
1590 	prepareData();
1591 	prepareVertexBuffer();
1592 
1593 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1594 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1595 			DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1596 
1597 	if (isWriteToCompressedOperation())
1598 		transcodeWrite(*cmdPool);
1599 	else
1600 		transcodeRead(*cmdPool);
1601 
1602 	bool pass = true;
1603 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1604 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1605 			if (isWriteToCompressedOperation())
1606 			{
1607 				if (!verifyDecompression(*cmdPool, *m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1608 					pass = false;
1609 			}
1610 			else
1611 			{
1612 				if (!verifyDecompression(*cmdPool, *m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1613 					pass = false;
1614 			}
1615 
1616 	if (!pass)
1617 		return TestStatus::fail("Images difference detected");;
1618 
1619 	if (m_bASTCErrorColourMismatch)
1620 	{
1621 		DE_ASSERT(m_parameters.formatIsASTC);
1622 		return TestStatusASTCQualityWarning();
1623 	}
1624 
1625 	return TestStatus::pass("Pass");
1626 }
1627 
prepareData()1628 void GraphicsAttachmentsTestInstance::prepareData ()
1629 {
1630 	VkImageViewUsageCreateInfo*	imageViewUsageKHRNull	= (VkImageViewUsageCreateInfo*)DE_NULL;
1631 
1632 	m_imageViewUsageKHR			= makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1633 
1634 	m_srcImageViewUsageKHR		= isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1635 	m_dstImageViewUsageKHR		= isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1636 
1637 	m_srcFormat					= isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1638 	m_dstFormat					= isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1639 
1640 	m_srcImageUsageFlags		= isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1641 	m_dstImageUsageFlags		= isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1642 
1643 	m_compressedImageResVec		= getMipLevelSizes(getLayerDims());
1644 	m_uncompressedImageResVec	= getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1645 
1646 	m_srcImageResolutions		= isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1647 	m_dstImageResolutions		= isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1648 
1649 	m_srcData.resize(getLevelCount());
1650 	m_dstData.resize(getLevelCount());
1651 	m_uncompressedImages.resize(getLevelCount());
1652 
1653 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1654 	{
1655 		m_srcData[levelNdx].resize(getLayerCount());
1656 		m_dstData[levelNdx].resize(getLayerCount());
1657 		m_uncompressedImages[levelNdx].resize(getLayerCount());
1658 
1659 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1660 		{
1661 			m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1662 			m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1663 
1664 			if (isWriteToCompressedOperation())
1665 			{
1666 				getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1667 
1668 				m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1669 			}
1670 			else
1671 			{
1672 				getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1673 
1674 				m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1675 			}
1676 
1677 			DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1678 		}
1679 	}
1680 }
1681 
prepareVertexBuffer()1682 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1683 {
1684 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
1685 	const VkDevice					device					= m_context.getDevice();
1686 	Allocator&						allocator				= m_context.getDefaultAllocator();
1687 
1688 	const std::vector<tcu::Vec4>	vertexArray				= createFullscreenQuad();
1689 	const size_t					vertexBufferSizeInBytes	= vertexArray.size() * sizeof(vertexArray[0]);
1690 
1691 	m_vertexCount	= static_cast<deUint32>(vertexArray.size());
1692 	m_vertexBuffer	= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1693 
1694 	// Upload vertex data
1695 	const Allocation&	vertexBufferAlloc	= m_vertexBuffer->getAllocation();
1696 	deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1697 	flushAlloc(vk, device, vertexBufferAlloc);
1698 }
1699 
transcodeRead(const VkCommandPool & cmdPool)1700 void GraphicsAttachmentsTestInstance::transcodeRead (const VkCommandPool&				cmdPool)
1701 {
1702 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
1703 	const VkDevice						device					= m_context.getDevice();
1704 	const VkQueue						queue					= m_context.getUniversalQueue();
1705 	Allocator&							allocator				= m_context.getDefaultAllocator();
1706 
1707 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
1708 
1709 	const VkImageCreateInfo				srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1710 	MovePtr<Image>						srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1711 
1712 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1713 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1714 
1715 	const Unique<VkRenderPass>			renderPass				(vkt::image::makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1716 
1717 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
1718 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1719 																	.build(vk, device));
1720 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
1721 																	.addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1722 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1723 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1724 
1725 	const VkExtent2D					renderSizeUnused		(makeExtent2D(1u, 1u));
1726 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
1727 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 1u, true));
1728 
1729 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1730 
1731 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1732 	{
1733 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
1734 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
1735 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
1736 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
1737 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
1738 		const UVec3					srcImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1739 
1740 		const VkImageCreateInfo		dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1741 
1742 		const VkBufferCreateInfo	srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1743 		const MovePtr<BufferWithMemory>	srcImageBuffer		= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1744 
1745 		const VkBufferCreateInfo	dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1746 		MovePtr<BufferWithMemory>	dstImageBuffer			= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1747 
1748 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1749 		const VkViewport			viewport				= makeViewport(renderSize);
1750 		const VkRect2D				scissor					= makeRect2D(renderSize);
1751 
1752 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1753 		{
1754 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1755 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1756 
1757 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1758 
1759 			de::MovePtr<Image>				dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1760 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1761 
1762 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1763 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1764 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1765 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1766 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1767 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1768 
1769 			const VkImageView				attachmentBindInfos[]	= { *srcImageView, *dstImageView };
1770 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1771 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
1772 
1773 			// Upload source image data
1774 			const Allocation& alloc = srcImageBuffer->getAllocation();
1775 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1776 			flushAlloc(vk, device, alloc);
1777 
1778 			beginCommandBuffer(vk, *cmdBuffer);
1779 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1780 
1781 			// Copy buffer to image
1782 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1783 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1784 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1785 
1786 			// Define destination image layout
1787 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1788 
1789 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1790 
1791 			const VkDescriptorImageInfo	descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1792 			DescriptorSetUpdateBuilder()
1793 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1794 				.update(vk, device);
1795 
1796 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1797 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1798 
1799 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1800 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1801 
1802 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1803 
1804 			endRenderPass(vk, *cmdBuffer);
1805 
1806 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1807 				VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1808 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1809 				dstImage->get(), dstSubresourceRange);
1810 
1811 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1812 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1813 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1814 
1815 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1816 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1817 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1818 
1819 			endCommandBuffer(vk, *cmdBuffer);
1820 
1821 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1822 			m_context.resetCommandPoolForVKSC(device, cmdPool);
1823 
1824 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1825 			invalidateAlloc(vk, device, dstImageBufferAlloc);
1826 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1827 		}
1828 	}
1829 
1830 	m_compressedImage = srcImage;
1831 }
1832 
transcodeWrite(const VkCommandPool & cmdPool)1833 void GraphicsAttachmentsTestInstance::transcodeWrite (const VkCommandPool&				cmdPool)
1834 {
1835 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
1836 	const VkDevice						device					= m_context.getDevice();
1837 	const VkQueue						queue					= m_context.getUniversalQueue();
1838 	Allocator&							allocator				= m_context.getDefaultAllocator();
1839 
1840 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
1841 
1842 	const VkImageCreateInfo				dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1843 	MovePtr<Image>						dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1844 
1845 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1846 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1847 
1848 	const Unique<VkRenderPass>			renderPass				(vkt::image::makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1849 
1850 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
1851 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1852 																	.build(vk, device));
1853 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
1854 																	.addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1855 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1856 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1857 
1858 	const VkExtent2D					renderSizeUnused		(makeExtent2D(1u, 1u));
1859 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
1860 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 1u, true));
1861 
1862 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1863 
1864 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1865 	{
1866 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
1867 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
1868 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
1869 		const UVec3					dstImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1870 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
1871 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
1872 
1873 		const VkImageCreateInfo		srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1874 
1875 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1876 		const VkViewport			viewport				= makeViewport(renderSize);
1877 		const VkRect2D				scissor					= makeRect2D(renderSize);
1878 
1879 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1880 		{
1881 			const VkBufferCreateInfo		srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1882 			const MovePtr<BufferWithMemory>	srcImageBuffer			= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1883 
1884 			const VkBufferCreateInfo		dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1885 			MovePtr<BufferWithMemory>		dstImageBuffer			= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1886 
1887 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1888 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1889 
1890 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1891 
1892 			de::MovePtr<Image>				srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1893 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1894 
1895 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1896 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1897 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1898 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1899 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1900 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1901 
1902 			const VkImageView				attachmentBindInfos[]	= { *srcImageView, *dstImageView };
1903 			const VkExtent2D				framebufferSize			(renderSize);
1904 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
1905 
1906 			// Upload source image data
1907 			const Allocation& alloc = srcImageBuffer->getAllocation();
1908 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1909 			flushAlloc(vk, device, alloc);
1910 
1911 			beginCommandBuffer(vk, *cmdBuffer);
1912 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1913 
1914 			// Copy buffer to image
1915 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1916 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1917 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1918 
1919 			// Define destination image layout
1920 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1921 
1922 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1923 
1924 			const VkDescriptorImageInfo	descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1925 			DescriptorSetUpdateBuilder()
1926 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1927 				.update(vk, device);
1928 
1929 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1930 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1931 
1932 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1933 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1934 
1935 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1936 
1937 			endRenderPass(vk, *cmdBuffer);
1938 
1939 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1940 				VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1941 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1942 				dstImage->get(), dstSubresourceRange);
1943 
1944 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1945 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1946 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1947 
1948 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1949 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1950 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1951 
1952 			endCommandBuffer(vk, *cmdBuffer);
1953 
1954 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1955 			m_context.resetCommandPoolForVKSC(device, cmdPool);
1956 
1957 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1958 			invalidateAlloc(vk, device, dstImageBufferAlloc);
1959 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1960 		}
1961 	}
1962 
1963 	m_compressedImage = dstImage;
1964 }
1965 
isWriteToCompressedOperation()1966 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1967 {
1968 	return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1969 }
1970 
makeCreateImageInfo(const VkFormat format,const ImageType type,const UVec3 & size,const VkImageUsageFlags usageFlags,const VkImageCreateFlags * createFlags,const deUint32 levels,const deUint32 layers)1971 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat				format,
1972 																	    const ImageType				type,
1973 																	    const UVec3&				size,
1974 																	    const VkImageUsageFlags		usageFlags,
1975 																	    const VkImageCreateFlags*	createFlags,
1976 																	    const deUint32				levels,
1977 																	    const deUint32				layers)
1978 {
1979 	const VkImageType			imageType				= mapImageType(type);
1980 	const VkImageCreateFlags	imageCreateFlagsBase	= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1981 	const VkImageCreateFlags	imageCreateFlagsAddOn	= isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT : 0;
1982 	const VkImageCreateFlags	imageCreateFlags		= (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1983 
1984 	VkFormatProperties properties;
1985 	m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), format, &properties);
1986 	if ((usageFlags & VK_IMAGE_USAGE_STORAGE_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1987 		TCU_THROW(NotSupportedError, "Format storage feature not supported");
1988 	if ((usageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
1989 		TCU_THROW(NotSupportedError, "Format color attachment feature not supported");
1990 	if ((usageFlags & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) &&
1991 		!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
1992 		TCU_THROW(NotSupportedError, "Format color/depth/stencil attachment feature not supported for input attachment usage");
1993 
1994 	const VkImageCreateInfo createImageInfo =
1995 	{
1996 		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,			// VkStructureType			sType;
1997 		DE_NULL,										// const void*				pNext;
1998 		imageCreateFlags,								// VkImageCreateFlags		flags;
1999 		imageType,										// VkImageType				imageType;
2000 		format,											// VkFormat					format;
2001 		makeExtent3D(getLayerSize(type, size)),			// VkExtent3D				extent;
2002 		levels,											// deUint32					mipLevels;
2003 		layers,											// deUint32					arrayLayers;
2004 		VK_SAMPLE_COUNT_1_BIT,							// VkSampleCountFlagBits	samples;
2005 		VK_IMAGE_TILING_OPTIMAL,						// VkImageTiling			tiling;
2006 		usageFlags,										// VkImageUsageFlags		usage;
2007 		VK_SHARING_MODE_EXCLUSIVE,						// VkSharingMode			sharingMode;
2008 		0u,												// deUint32					queueFamilyIndexCount;
2009 		DE_NULL,										// const deUint32*			pQueueFamilyIndices;
2010 		VK_IMAGE_LAYOUT_UNDEFINED,						// VkImageLayout			initialLayout;
2011 	};
2012 
2013 	return createImageInfo;
2014 }
2015 
getCompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)2016 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat			format,
2017 																	  const UVec3&				size,
2018 																	  std::vector<deUint8>&		data,
2019 																	  const deUint32			layer,
2020 																	  const deUint32			level)
2021 {
2022 	VkDeviceSize	sizeBytes	= getCompressedImageSizeInBytes(format, size);
2023 
2024 	data.resize((size_t)sizeBytes);
2025 	generateData(&data[0], data.size(), format, layer, level);
2026 
2027 	return sizeBytes;
2028 }
2029 
getUncompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)2030 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat			format,
2031 																		const UVec3&			size,
2032 																		std::vector<deUint8>&	data,
2033 																		const deUint32			layer,
2034 																		const deUint32			level)
2035 {
2036 	tcu::IVec3				sizeAsIVec3	= tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
2037 	VkDeviceSize			sizeBytes	= getImageSizeBytes(sizeAsIVec3, format);
2038 
2039 	data.resize((size_t)sizeBytes);
2040 	generateData(&data[0], data.size(), format, layer, level);
2041 
2042 	return sizeBytes;
2043 }
2044 
verifyDecompression(const VkCommandPool & cmdPool,const std::vector<deUint8> & refCompressedData,const de::MovePtr<Image> & resCompressedImage,const deUint32 level,const deUint32 layer,const UVec3 & mipmapDims)2045 bool GraphicsAttachmentsTestInstance::verifyDecompression (const VkCommandPool&			cmdPool,
2046 														   const std::vector<deUint8>&	refCompressedData,
2047 														   const de::MovePtr<Image>&	resCompressedImage,
2048 														   const deUint32				level,
2049 														   const deUint32				layer,
2050 														   const UVec3&					mipmapDims)
2051 {
2052 	const DeviceInterface&				vk							= m_context.getDeviceInterface();
2053 	const VkDevice						device						= m_context.getDevice();
2054 	const VkQueue						queue						= m_context.getUniversalQueue();
2055 	Allocator&							allocator					= m_context.getDefaultAllocator();
2056 
2057 	const bool							layoutShaderReadOnly		= (layer % 2u) == 1;
2058 	const UVec3							mipmapDimsBlocked			= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
2059 
2060 	const VkImageSubresourceRange		subresourceRange			= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2061 	const VkImageSubresourceRange		resSubresourceRange			= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
2062 
2063 	const VkDeviceSize					dstBufferSize				= getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
2064 	const VkImageUsageFlags				refSrcImageUsageFlags		= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2065 
2066 	const VkBufferCreateInfo			refSrcImageBufferInfo		(makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
2067 	const MovePtr<BufferWithMemory>		refSrcImageBuffer			= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
2068 
2069 	const VkImageCreateFlags			refSrcImageCreateFlags		= 0;
2070 	const VkImageCreateInfo				refSrcImageCreateInfo		= makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2071 	const MovePtr<Image>				refSrcImage					(new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
2072 	Move<VkImageView>					refSrcImageView				(makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
2073 
2074 	const VkImageUsageFlags				resSrcImageUsageFlags		= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2075 	const VkImageViewUsageCreateInfo	resSrcImageViewUsageKHR		= makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
2076 	Move<VkImageView>					resSrcImageView				(makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
2077 
2078 	const VkImageCreateFlags			refDstImageCreateFlags		= 0;
2079 	const VkImageUsageFlags				refDstImageUsageFlags		= VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2080 	const VkImageCreateInfo				refDstImageCreateInfo		= makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2081 	const MovePtr<Image>				refDstImage					(new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
2082 	const Move<VkImageView>				refDstImageView				(makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2083 	const VkImageMemoryBarrier			refDstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
2084 	const VkBufferCreateInfo			refDstBufferInfo			(makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2085 	const MovePtr<BufferWithMemory>		refDstBuffer				= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
2086 
2087 	const VkImageCreateFlags			resDstImageCreateFlags		= 0;
2088 	const VkImageUsageFlags				resDstImageUsageFlags		= VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2089 	const VkImageCreateInfo				resDstImageCreateInfo		= makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2090 	const MovePtr<Image>				resDstImage					(new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
2091 	const Move<VkImageView>				resDstImageView				(makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2092 	const VkImageMemoryBarrier			resDstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
2093 	const VkBufferCreateInfo			resDstBufferInfo			(makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2094 	const MovePtr<BufferWithMemory>		resDstBuffer				= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
2095 
2096 	const Unique<VkShaderModule>		vertShaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2097 	const Unique<VkShaderModule>		fragShaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
2098 
2099 	const Unique<VkRenderPass>			renderPass					(vk::makeRenderPass(vk, device));
2100 
2101 	const Move<VkDescriptorSetLayout>	descriptorSetLayout			(DescriptorSetLayoutBuilder()
2102 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2103 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2104 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2105 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2106 																		.build(vk, device));
2107 	const Move<VkDescriptorPool>		descriptorPool				(DescriptorPoolBuilder()
2108 																		.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2109 																		.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2110 																		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2111 																		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2112 																		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2113 	const Move<VkDescriptorSet>			descriptorSet				(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2114 	const VkSamplerCreateInfo			refSrcSamplerInfo			(makeSamplerCreateInfo());
2115 	const Move<VkSampler>				refSrcSampler				= vk::createSampler(vk, device, &refSrcSamplerInfo);
2116 	const VkSamplerCreateInfo			resSrcSamplerInfo			(makeSamplerCreateInfo());
2117 	const Move<VkSampler>				resSrcSampler				= vk::createSampler(vk, device, &resSrcSamplerInfo);
2118 	const VkDescriptorImageInfo			descriptorRefSrcImage		(makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2119 	const VkDescriptorImageInfo			descriptorResSrcImage		(makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2120 	const VkDescriptorImageInfo			descriptorRefDstImage		(makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2121 	const VkDescriptorImageInfo			descriptorResDstImage		(makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2122 
2123 	const VkExtent2D					renderSize					(makeExtent2D(mipmapDims.x(), mipmapDims.y()));
2124 	const Unique<VkPipelineLayout>		pipelineLayout				(makePipelineLayout(vk, device, *descriptorSetLayout));
2125 	const Unique<VkPipeline>			pipeline					(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
2126 
2127 	const Unique<VkCommandBuffer>		cmdBuffer					(allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2128 
2129 	const VkBufferImageCopy				copyBufferToImageRegion		= makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
2130 	const VkBufferImageCopy				copyRegion					= makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
2131 	const VkBufferMemoryBarrier			refSrcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
2132 	const VkImageMemoryBarrier			refSrcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2133 	const VkImageMemoryBarrier			refSrcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2134 	const VkImageMemoryBarrier			resCompressedImageBarrier	= makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, resCompressedImage->get(), resSubresourceRange);
2135 
2136 	const Move<VkFramebuffer>			framebuffer					(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize.width, renderSize.height, getLayerCount()));
2137 
2138 	// Upload source image data
2139 	{
2140 		const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
2141 		deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
2142 		flushAlloc(vk, device, refSrcImageBufferAlloc);
2143 	}
2144 
2145 	beginCommandBuffer(vk, *cmdBuffer);
2146 	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2147 
2148 	// Copy buffer to image
2149 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
2150 	vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, &copyBufferToImageRegion);
2151 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
2152 
2153 	// Make reference and result images readable
2154 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
2155 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
2156 	{
2157 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resCompressedImageBarrier);
2158 	}
2159 
2160 	beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2161 	{
2162 		DescriptorSetUpdateBuilder()
2163 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
2164 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
2165 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
2166 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
2167 			.update(vk, device);
2168 
2169 		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2170 		vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2171 		vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
2172 	}
2173 	endRenderPass(vk, *cmdBuffer);
2174 
2175 	// Decompress reference image
2176 	{
2177 		const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
2178 			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2179 			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2180 			refDstImage->get(), subresourceRange);
2181 
2182 		const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
2183 			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2184 			refDstBuffer->get(), 0ull, dstBufferSize);
2185 
2186 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
2187 		vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, &copyRegion);
2188 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2189 	}
2190 
2191 	// Decompress result image
2192 	{
2193 		const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
2194 			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2195 			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2196 			resDstImage->get(), subresourceRange);
2197 
2198 		const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
2199 			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2200 			resDstBuffer->get(), 0ull, dstBufferSize);
2201 
2202 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
2203 		vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, &copyRegion);
2204 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2205 	}
2206 
2207 	endCommandBuffer(vk, *cmdBuffer);
2208 
2209 	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2210 	m_context.resetCommandPoolForVKSC(device, cmdPool);
2211 
2212 	// Compare decompressed pixel data in reference and result images
2213 	{
2214 		const Allocation&	refDstBufferAlloc	= refDstBuffer->getAllocation();
2215 		invalidateAlloc(vk, device, refDstBufferAlloc);
2216 
2217 		const Allocation&	resDstBufferAlloc	= resDstBuffer->getAllocation();
2218 		invalidateAlloc(vk, device, resDstBufferAlloc);
2219 
2220 		BinaryCompareMode compareMode =
2221 			(m_parameters.formatIsASTC)
2222 				?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
2223 				:(COMPARE_MODE_NORMAL);
2224 
2225 		BinaryCompareResult res = BinaryCompare(refDstBufferAlloc.getHostPtr(),
2226 												resDstBufferAlloc.getHostPtr(),
2227 												dstBufferSize,
2228 												m_parameters.formatForVerify,
2229 												compareMode);
2230 
2231 		if (res == COMPARE_RESULT_FAILED)
2232 		{
2233 			// Do fuzzy to log error mask
2234 			invalidateAlloc(vk, device, resDstBufferAlloc);
2235 			invalidateAlloc(vk, device, refDstBufferAlloc);
2236 
2237 			tcu::ConstPixelBufferAccess	resPixels	(mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
2238 			tcu::ConstPixelBufferAccess	refPixels	(mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
2239 
2240 			string	comment	= string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
2241 
2242 			if (isWriteToCompressedOperation())
2243 				tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2244 			else
2245 				tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2246 
2247 			return false;
2248 		}
2249 		else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
2250 		{
2251 			m_bASTCErrorColourMismatch = true;
2252 		}
2253 	}
2254 
2255 	return true;
2256 }
2257 
2258 
2259 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2260 {
2261 public:
2262 						GraphicsTextureTestInstance		(Context& context, const TestParameters& parameters);
2263 
2264 protected:
2265 	virtual bool		isWriteToCompressedOperation	();
2266 	virtual void		transcodeRead					(const VkCommandPool&				cmdPool);
2267 	virtual void		transcodeWrite					(const VkCommandPool&				cmdPool);
2268 };
2269 
GraphicsTextureTestInstance(Context & context,const TestParameters & parameters)2270 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2271 	: GraphicsAttachmentsTestInstance(context, parameters)
2272 {
2273 }
2274 
isWriteToCompressedOperation()2275 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2276 {
2277 	return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2278 }
2279 
transcodeRead(const VkCommandPool & cmdPool)2280 void GraphicsTextureTestInstance::transcodeRead (const VkCommandPool&				cmdPool)
2281 {
2282 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
2283 	const VkDevice						device					= m_context.getDevice();
2284 	const VkQueue						queue					= m_context.getUniversalQueue();
2285 	Allocator&							allocator				= m_context.getDefaultAllocator();
2286 
2287 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
2288 
2289 	const VkImageCreateInfo				srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2290 	MovePtr<Image>						srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2291 
2292 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2293 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2294 
2295 	const Unique<VkRenderPass>			renderPass				(vk::makeRenderPass(vk, device));
2296 
2297 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
2298 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2299 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2300 																	.build(vk, device));
2301 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
2302 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2303 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2304 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2305 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2306 
2307 	const VkExtent2D					renderSizeUnused			(makeExtent2D(1u, 1u));
2308 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
2309 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 0u, true));
2310 
2311 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2312 
2313 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2314 	{
2315 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
2316 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
2317 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
2318 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
2319 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
2320 		const UVec3					srcImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2321 
2322 		const VkImageCreateInfo		dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2323 
2324 		const VkBufferCreateInfo	srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2325 		const MovePtr<BufferWithMemory>	srcImageBuffer		= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2326 
2327 		const VkBufferCreateInfo	dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2328 		MovePtr<BufferWithMemory>	dstImageBuffer			= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2329 
2330 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2331 		const VkViewport			viewport				= makeViewport(renderSize);
2332 		const VkRect2D				scissor					= makeRect2D(renderSize);
2333 
2334 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2335 		{
2336 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2337 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2338 
2339 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2340 
2341 			de::MovePtr<Image>				dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2342 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2343 
2344 			const VkSamplerCreateInfo		srcSamplerInfo			(makeSamplerCreateInfo());
2345 			const Move<VkSampler>			srcSampler				= vk::createSampler(vk, device, &srcSamplerInfo);
2346 			const VkDescriptorImageInfo		descriptorSrcImage		(makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2347 			const VkDescriptorImageInfo		descriptorDstImage		(makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2348 
2349 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2350 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2351 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2352 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2353 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2354 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, (VK_ACCESS_SHADER_READ_BIT|VK_ACCESS_SHADER_WRITE_BIT), VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2355 
2356 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2357 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
2358 
2359 			// Upload source image data
2360 			const Allocation& alloc = srcImageBuffer->getAllocation();
2361 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2362 			flushAlloc(vk, device, alloc);
2363 
2364 			beginCommandBuffer(vk, *cmdBuffer);
2365 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2366 
2367 			// Copy buffer to image
2368 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2369 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2370 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2371 
2372 			// Define destination image layout
2373 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2374 
2375 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2376 
2377 			DescriptorSetUpdateBuilder()
2378 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2379 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2380 				.update(vk, device);
2381 
2382 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2383 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2384 
2385 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2386 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2387 
2388 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2389 
2390 			endRenderPass(vk, *cmdBuffer);
2391 
2392 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2393 				VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2394 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2395 				dstImage->get(), dstSubresourceRange);
2396 
2397 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2398 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2399 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2400 
2401 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2402 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2403 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2404 
2405 			endCommandBuffer(vk, *cmdBuffer);
2406 
2407 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2408 			m_context.resetCommandPoolForVKSC(device, cmdPool);
2409 
2410 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2411 			invalidateAlloc(vk, device, dstImageBufferAlloc);
2412 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2413 		}
2414 	}
2415 
2416 	m_compressedImage = srcImage;
2417 }
2418 
transcodeWrite(const VkCommandPool & cmdPool)2419 void GraphicsTextureTestInstance::transcodeWrite (const VkCommandPool&				cmdPool)
2420 {
2421 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
2422 	const VkDevice						device					= m_context.getDevice();
2423 	const VkQueue						queue					= m_context.getUniversalQueue();
2424 	Allocator&							allocator				= m_context.getDefaultAllocator();
2425 
2426 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
2427 
2428 	const VkImageCreateInfo				dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2429 	MovePtr<Image>						dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2430 
2431 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2432 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2433 
2434 	const Unique<VkRenderPass>			renderPass				(vk::makeRenderPass(vk, device));
2435 
2436 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
2437 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2438 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2439 																	.build(vk, device));
2440 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
2441 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2442 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2443 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2444 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2445 
2446 	const VkExtent2D					renderSizeUnused		(makeExtent2D(1u, 1u));
2447 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
2448 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeUnused, 0u, true));
2449 
2450 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2451 
2452 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2453 	{
2454 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
2455 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
2456 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
2457 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
2458 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
2459 		const UVec3					dstImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2460 
2461 		const VkImageCreateInfo		srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2462 
2463 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2464 		const VkViewport			viewport				= makeViewport(renderSize);
2465 		const VkRect2D				scissor					= makeRect2D(renderSize);
2466 
2467 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2468 		{
2469 			const VkBufferCreateInfo		srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2470 			const MovePtr<BufferWithMemory>	srcImageBuffer			= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2471 
2472 			const VkBufferCreateInfo		dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2473 			MovePtr<BufferWithMemory>		dstImageBuffer			= MovePtr<BufferWithMemory>(new BufferWithMemory(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2474 
2475 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2476 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2477 
2478 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2479 
2480 			de::MovePtr<Image>				srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2481 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2482 
2483 			const VkSamplerCreateInfo		srcSamplerInfo			(makeSamplerCreateInfo());
2484 			const Move<VkSampler>			srcSampler				= vk::createSampler(vk, device, &srcSamplerInfo);
2485 			const VkDescriptorImageInfo		descriptorSrcImage		(makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2486 			const VkDescriptorImageInfo		descriptorDstImage		(makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2487 
2488 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2489 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2490 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2491 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2492 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2493 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2494 
2495 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2496 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize.width, framebufferSize.height, SINGLE_LAYER));
2497 
2498 			// Upload source image data
2499 			const Allocation& alloc = srcImageBuffer->getAllocation();
2500 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2501 			flushAlloc(vk, device, alloc);
2502 
2503 			beginCommandBuffer(vk, *cmdBuffer);
2504 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2505 
2506 			// Copy buffer to image
2507 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2508 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2509 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2510 
2511 			// Define destination image layout
2512 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2513 
2514 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2515 
2516 			DescriptorSetUpdateBuilder()
2517 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2518 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2519 				.update(vk, device);
2520 
2521 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2522 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2523 
2524 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2525 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2526 
2527 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2528 
2529 			endRenderPass(vk, *cmdBuffer);
2530 
2531 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2532 				VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2533 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2534 				dstImage->get(), dstSubresourceRange);
2535 
2536 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2537 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2538 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2539 
2540 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2541 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2542 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2543 
2544 			endCommandBuffer(vk, *cmdBuffer);
2545 
2546 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2547 			m_context.resetCommandPoolForVKSC(device, cmdPool);
2548 
2549 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2550 			invalidateAlloc(vk, device, dstImageBufferAlloc);
2551 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2552 		}
2553 	}
2554 
2555 	m_compressedImage = dstImage;
2556 }
2557 
2558 class TexelViewCompatibleCase : public TestCase
2559 {
2560 public:
2561 							TexelViewCompatibleCase		(TestContext&				testCtx,
2562 														 const std::string&			name,
2563 														 const TestParameters&		parameters);
2564 	void					initPrograms				(SourceCollections&			programCollection) const;
2565 	TestInstance*			createInstance				(Context&					context) const;
2566 	virtual void			checkSupport				(Context&					context) const;
2567 protected:
2568 	const TestParameters	m_parameters;
2569 };
2570 
TexelViewCompatibleCase(TestContext & testCtx,const std::string & name,const TestParameters & parameters)2571 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const TestParameters& parameters)
2572 	: TestCase				(testCtx, name)
2573 	, m_parameters			(parameters)
2574 {
2575 }
2576 
initPrograms(vk::SourceCollections & programCollection) const2577 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections&	programCollection) const
2578 {
2579 	DE_ASSERT(m_parameters.size.x() > 0);
2580 	DE_ASSERT(m_parameters.size.y() > 0);
2581 
2582 	const unsigned int imageTypeIndex =
2583 		(m_parameters.imageType == IMAGE_TYPE_2D) +
2584 		(m_parameters.imageType == IMAGE_TYPE_3D) * 2;
2585 
2586 	switch (m_parameters.shader)
2587 	{
2588 		case SHADER_TYPE_COMPUTE:
2589 		{
2590 			const std::string	imageTypeStr		= getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2591 			const std::string	formatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2592 			std::ostringstream	src;
2593 			std::ostringstream	src_decompress;
2594 
2595 			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2596 				<< "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2597 			src_decompress << src.str();
2598 
2599 			switch(m_parameters.operation)
2600 			{
2601 				case OPERATION_IMAGE_LOAD:
2602 				{
2603 					const char* posDefinitions[3] =
2604 					{
2605 						// IMAGE_TYPE_1D
2606 						"    highp int pos = int(gl_GlobalInvocationID.x);\n",
2607 						// IMAGE_TYPE_2D
2608 						"    ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2609 						// IMAGE_TYPE_3D
2610 						"    ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2611 					};
2612 
2613 					src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2614 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2615 						<< "void main (void)\n"
2616 						<< "{\n"
2617 						<< posDefinitions[imageTypeIndex]
2618 						<< "    imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2619 						<< "}\n";
2620 
2621 					break;
2622 				}
2623 
2624 				case OPERATION_TEXEL_FETCH:
2625 				{
2626 					const char* storeDefinitions[3] =
2627 					{
2628 						// IMAGE_TYPE_1D
2629 						"    imageStore(u_image1, pos.x, texelFetch(u_image0, pos.x, pos.z));\n",
2630 						// IMAGE_TYPE_2D
2631 						"    imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n",
2632 						// IMAGE_TYPE_3D
2633 						"    imageStore(u_image1, pos, texelFetch(u_image0, pos, pos.z));\n",
2634 					};
2635 
2636 					src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2637 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2638 						<< "void main (void)\n"
2639 						<< "{\n"
2640 						<< "    ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2641 						<< storeDefinitions[imageTypeIndex]
2642 						<< "}\n";
2643 
2644 					break;
2645 				}
2646 
2647 				case OPERATION_TEXTURE:
2648 				{
2649 					const char* coordDefinitions[3] =
2650 					{
2651 						// IMAGE_TYPE_1D
2652 						"    const int     pos = int(gl_GlobalInvocationID.x);\n"
2653 						"    const float coord = (float(gl_GlobalInvocationID.x) + 0.5) / pixels_resolution.x;\n",
2654 						// IMAGE_TYPE_2D
2655 						"    const ivec2  pos = ivec2(gl_GlobalInvocationID.xy);\n"
2656 						"    const vec2 coord = (vec2(gl_GlobalInvocationID.xy) + 0.5) / vec2(pixels_resolution);\n",
2657 						// IMAGE_TYPE_3D
2658 						"    const ivec3  pos = ivec3(gl_GlobalInvocationID.xy, 0);\n"
2659 						"    const vec2    v2 = (vec2(gl_GlobalInvocationID.xy) + 0.5) / vec2(pixels_resolution);\n"
2660 						"    const vec3 coord = vec3(v2, 0.0);\n",
2661 					};
2662 
2663 					src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2664 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2665 						<< "void main (void)\n"
2666 						<< "{\n"
2667 						<< "    const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x, gl_NumWorkGroups.y);\n"
2668 						<< coordDefinitions[imageTypeIndex]
2669 						<< "    imageStore(u_image1, pos, texture(u_image0, coord));\n"
2670 						<< "}\n";
2671 
2672 					break;
2673 				}
2674 
2675 				case OPERATION_IMAGE_STORE:
2676 				{
2677 					const char* posDefinitions[3] =
2678 					{
2679 						// IMAGE_TYPE_1D
2680 						"    highp int pos = int(gl_GlobalInvocationID.x);\n",
2681 						// IMAGE_TYPE_2D
2682 						"    ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2683 						// IMAGE_TYPE_3D
2684 						"    ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2685 					};
2686 
2687 					src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<"           u_image0;\n"
2688 						<< "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<"  u_image1;\n"
2689 						<< "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2690 						<< "void main (void)\n"
2691 						<< "{\n"
2692 						<< posDefinitions[imageTypeIndex]
2693 						<< "    imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2694 						<< "    imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2695 						<< "}\n";
2696 
2697 					break;
2698 				}
2699 
2700 				default:
2701 					DE_ASSERT(false);
2702 			}
2703 
2704 			const ImageType compressedReferenceImageType = (m_parameters.imageType == IMAGE_TYPE_2D && m_parameters.layers > 1u) ? IMAGE_TYPE_2D_ARRAY : m_parameters.imageType;
2705 			const char* cordDefinitions[3] =
2706 			{
2707 				// IMAGE_TYPE_1D
2708 				"    const highp float cord = float(gl_GlobalInvocationID.x) / pixels_resolution.x;\n"
2709 				"    const highp int    pos = int(gl_GlobalInvocationID.x); \n",
2710 				// IMAGE_TYPE_2D
2711 				"    const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2712 				"    const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n",
2713 				// IMAGE_TYPE_3D
2714 				"    const vec2 v2 = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2715 				"    const vec3 cord = vec3(v2, 0.0);\n"
2716 				"    const ivec3 pos = ivec3(gl_GlobalInvocationID); \n",
2717 			};
2718 			src_decompress	<< "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2719 							<< "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(compressedReferenceImageType))<<" compressed_reference;\n"
2720 							<< "layout (binding = 2, "<<getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify))<<") writeonly uniform "<<getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType)<<" decompressed_result;\n"
2721 							<< "layout (binding = 3, "<<getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify))<<") writeonly uniform "<<getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType)<<" decompressed_reference;\n"
2722 							<< "layout (push_constant, std430) uniform PushConstants { uint layer; uint level; };\n\n"
2723 							<< "void main (void)\n"
2724 							<< "{\n"
2725 							<< "    const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2726 							<< cordDefinitions[imageTypeIndex]
2727 							<< "    imageStore(decompressed_result, pos, texture(compressed_result, cord));\n";
2728 			if (compressedReferenceImageType == IMAGE_TYPE_2D_ARRAY)
2729 				src_decompress	<< "    imageStore(decompressed_reference, pos, textureLod(compressed_reference, vec3(cord, layer), level));\n";
2730 			else
2731 				src_decompress	<< "    imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n";
2732 			src_decompress	<< "}\n";
2733 			programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2734 			programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2735 
2736 			break;
2737 		}
2738 
2739 		case SHADER_TYPE_FRAGMENT:
2740 		{
2741 			ImageType	imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2742 
2743 			// Vertex shader
2744 			{
2745 				std::ostringstream src;
2746 				src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2747 					<< "layout(location = 0) in vec4 v_in_position;\n"
2748 					<< "\n"
2749 					<< "void main (void)\n"
2750 					<< "{\n"
2751 					<< "    gl_Position = v_in_position;\n"
2752 					<< "}\n";
2753 
2754 				programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2755 			}
2756 
2757 			// Fragment shader
2758 			{
2759 				switch(m_parameters.operation)
2760 				{
2761 					case OPERATION_ATTACHMENT_READ:
2762 					case OPERATION_ATTACHMENT_WRITE:
2763 					{
2764 						std::ostringstream	src;
2765 
2766 						const std::string	dstTypeStr	= getGlslFormatType(m_parameters.formatUncompressed);
2767 						const std::string	srcTypeStr	= getGlslInputFormatType(m_parameters.formatUncompressed);
2768 
2769 						src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2770 							<< "precision highp int;\n"
2771 							<< "precision highp float;\n"
2772 							<< "\n"
2773 							<< "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2774 							<< "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2775 							<< "\n"
2776 							<< "void main (void)\n"
2777 							<< "{\n"
2778 							<< "    o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2779 							<< "}\n";
2780 
2781 						programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2782 
2783 						break;
2784 					}
2785 
2786 					case OPERATION_TEXTURE_READ:
2787 					case OPERATION_TEXTURE_WRITE:
2788 					{
2789 						std::ostringstream	src;
2790 
2791 						const std::string	srcSamplerTypeStr		= getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2792 						const std::string	dstImageTypeStr			= getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2793 						const std::string	dstFormatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2794 
2795 						const char* inDefinitions[3] =
2796 						{
2797 							// IMAGE_TYPE_1D
2798 							"    const highp int out_pos = int(gl_FragCoord.x);\n"
2799 							"    const highp float pixels_resolution = textureSize(u_imageIn, 0);\n"
2800 							"    const highp float in_pos = gl_FragCoord.x / pixels_resolution;\n",
2801 							// IMAGE_TYPE_2D
2802 							"    const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2803 							"    const vec2 pixels_resolution = vec2(textureSize(u_imageIn, 0));\n"
2804 							"    const vec2 in_pos = vec2(gl_FragCoord.xy) / vec2(pixels_resolution);\n",
2805 							// IMAGE_TYPE_3D
2806 							"    const ivec3 out_pos = ivec3(gl_FragCoord.xy, 0);\n"
2807 							"    const vec3 pixels_resolution = vec3(textureSize(u_imageIn, 0));\n"
2808 							"    const vec3 in_pos = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution.xy, 1.0);\n",
2809 						};
2810 
2811 						src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2812 							<< "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2813 							<< "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2814 							<< "\n"
2815 							<< "void main (void)\n"
2816 							<< "{\n"
2817 							<< inDefinitions[imageTypeIndex]
2818 							<< "    imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2819 							<< "}\n";
2820 
2821 						programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2822 
2823 						break;
2824 					}
2825 
2826 					default:
2827 						DE_ASSERT(false);
2828 				}
2829 			}
2830 
2831 			// Verification fragment shader
2832 			{
2833 				std::ostringstream	src;
2834 
2835 				const std::string	samplerType			= getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2836 				const std::string	imageTypeStr		= getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2837 				const std::string	formatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2838 
2839 				const char* pos0Definitions[3] =
2840 				{
2841 					// IMAGE_TYPE_1D
2842 					"    const highp int out_pos = int(gl_FragCoord.x);\n"
2843 					"    const highp float pixels_resolution0 = textureSize(u_imageIn0, 0);\n"
2844 					"    const highp float in_pos0 = gl_FragCoord.x / pixels_resolution0;\n",
2845 					// IMAGE_TYPE_2D
2846 					"    const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2847 					"    const vec2 pixels_resolution0 = vec2(textureSize(u_imageIn0, 0));\n"
2848 					"    const vec2 in_pos0 = vec2(gl_FragCoord.xy) / vec2(pixels_resolution0);\n",
2849 					// IMAGE_TYPE_3D
2850 					"    const ivec3 out_pos = ivec3(ivec2(gl_FragCoord.xy), 0);\n"
2851 					"    const vec3 pixels_resolution0 = vec3(textureSize(u_imageIn0, 0));\n"
2852 					"    const vec3 in_pos0 = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution0.xy, 1.0);\n",
2853 				};
2854 				const char* pos1Definitions[3] =
2855 				{
2856 					// IMAGE_TYPE_1D
2857 					"    const highp float pixels_resolution1 = textureSize(u_imageIn1, 0);\n"
2858 					"    const highp float in_pos1 = gl_FragCoord.x / pixels_resolution1;\n",
2859 					// IMAGE_TYPE_2D
2860 					"    const vec2 pixels_resolution1 = vec2(textureSize(u_imageIn1, 0));\n"
2861 					"    const vec2 in_pos1 = vec2(gl_FragCoord.xy) / vec2(pixels_resolution1);\n",
2862 					// IMAGE_TYPE_3D
2863 					"    const vec3 pixels_resolution1 = vec3(textureSize(u_imageIn1, 0));\n"
2864 					"    const vec3 in_pos1 = vec3(gl_FragCoord.xy, 0) / vec3(pixels_resolution1.xy, 1.0);\n",
2865 				};
2866 
2867 				src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2868 					<< "layout (binding = 0) uniform " << getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(m_parameters.imageType)) << " u_imageIn0;\n"
2869 					<< "layout (binding = 1) uniform " << getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(m_parameters.imageType)) << " u_imageIn1;\n"
2870 					<< "layout (binding = 2, " << getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify)) << ") writeonly uniform " << getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType) << " u_imageOut0;\n"
2871 					<< "layout (binding = 3, " << getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify)) << ") writeonly uniform " << getShaderImageType(mapVkFormat(m_parameters.formatForVerify), m_parameters.imageType) << " u_imageOut1;\n"
2872 					<< "\n"
2873 					<< "void main (void)\n"
2874 					<< "{\n"
2875 					<< pos0Definitions[imageTypeIndex]
2876 					<< "    imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2877 					<< "\n"
2878 					<< pos1Definitions[imageTypeIndex]
2879 					<< "    imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2880 					<< "}\n";
2881 
2882 				programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2883 			}
2884 
2885 			break;
2886 		}
2887 
2888 		default:
2889 			DE_ASSERT(false);
2890 	}
2891 }
2892 
checkSupport(Context & context) const2893 void TexelViewCompatibleCase::checkSupport (Context& context) const
2894 {
2895 	const VkPhysicalDevice			physicalDevice			= context.getPhysicalDevice();
2896 	const InstanceInterface&		vk						= context.getInstanceInterface();
2897 
2898 	context.requireDeviceFunctionality("VK_KHR_maintenance2");
2899 
2900 	{
2901 		VkImageFormatProperties imageFormatProperties;
2902 
2903 		if (vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2904 													  mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2905 													  m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties) == VK_ERROR_FORMAT_NOT_SUPPORTED)
2906 			TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2907 
2908 		if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2909 												mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2910 												m_parameters.compressedImageUsage,
2911 												VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
2912 												&imageFormatProperties))
2913 			TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2914 	}
2915 
2916 	{
2917 		const VkPhysicalDeviceFeatures	physicalDeviceFeatures	= getPhysicalDeviceFeatures(vk, physicalDevice);
2918 
2919 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2920 			!physicalDeviceFeatures.textureCompressionBC)
2921 			TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2922 
2923 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2924 			!physicalDeviceFeatures.textureCompressionETC2)
2925 			TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2926 
2927 		if (m_parameters.formatIsASTC &&
2928 			!physicalDeviceFeatures.textureCompressionASTC_LDR)
2929 			TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2930 
2931 		if (m_parameters.uncompressedImageUsage & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT)
2932 		{
2933 			const VkFormatProperties p = getPhysicalDeviceFormatProperties(vk, physicalDevice, m_parameters.formatUncompressed);
2934 			if ((p.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) == 0)
2935 				TCU_THROW(NotSupportedError, "Storage view format not supported");
2936 		}
2937 	}
2938 }
2939 
createInstance(Context & context) const2940 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2941 {
2942 	if (!m_parameters.useMipmaps)
2943 		DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size) == 1u);
2944 
2945 	DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() > 0u);
2946 	DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() > 0u);
2947 
2948 	switch (m_parameters.shader)
2949 	{
2950 		case SHADER_TYPE_COMPUTE:
2951 		{
2952 			switch (m_parameters.operation)
2953 			{
2954 				case OPERATION_IMAGE_LOAD:
2955 				case OPERATION_TEXEL_FETCH:
2956 				case OPERATION_TEXTURE:
2957 					return new BasicComputeTestInstance(context, m_parameters);
2958 				case OPERATION_IMAGE_STORE:
2959 					return new ImageStoreComputeTestInstance(context, m_parameters);
2960 				default:
2961 					TCU_THROW(InternalError, "Impossible");
2962 			}
2963 		}
2964 
2965 		case SHADER_TYPE_FRAGMENT:
2966 		{
2967 			switch (m_parameters.operation)
2968 			{
2969 				case OPERATION_ATTACHMENT_READ:
2970 				case OPERATION_ATTACHMENT_WRITE:
2971 					return new GraphicsAttachmentsTestInstance(context, m_parameters);
2972 
2973 				case OPERATION_TEXTURE_READ:
2974 				case OPERATION_TEXTURE_WRITE:
2975 					return new GraphicsTextureTestInstance(context, m_parameters);
2976 
2977 				default:
2978 					TCU_THROW(InternalError, "Impossible");
2979 			}
2980 		}
2981 
2982 		default:
2983 			TCU_THROW(InternalError, "Impossible");
2984 	}
2985 }
2986 
2987 } // anonymous ns
2988 
getUnniceResolution(const VkFormat format,const deUint32 layers)2989 static tcu::UVec3 getUnniceResolution (const VkFormat format, const deUint32 layers)
2990 {
2991 	const deUint32	unniceMipmapTextureSize[]	= { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2992 	const deUint32	baseTextureWidth			= unniceMipmapTextureSize[getBlockWidth(format)];
2993 	const deUint32	baseTextureHeight			= unniceMipmapTextureSize[getBlockHeight(format)];
2994 	const deUint32	baseTextureWidthLevels		= deLog2Floor32(baseTextureWidth);
2995 	const deUint32	baseTextureHeightLevels		= deLog2Floor32(baseTextureHeight);
2996 	const deUint32	widthMultiplier				= (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2997 	const deUint32	heightMultiplier			= (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2998 	const deUint32	width						= baseTextureWidth * widthMultiplier;
2999 	const deUint32	height						= baseTextureHeight * heightMultiplier;
3000 
3001 	// Number of levels should be same on both axises
3002 	DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
3003 
3004 	return tcu::UVec3(width, height, layers);
3005 }
3006 
createImageCompressionTranscodingTests(tcu::TestContext & testCtx)3007 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
3008 {
3009 	struct FormatsArray
3010 	{
3011 		const VkFormat*	formats;
3012 		deUint32		count;
3013 	};
3014 
3015 	const bool					mipmapness[]									=
3016 	{
3017 		false,
3018 		true,
3019 	};
3020 
3021 	const std::string			pipelineName[SHADER_TYPE_LAST]					=
3022 	{
3023 		"compute",
3024 		"graphic",
3025 	};
3026 
3027 	const std::string			mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)]	=
3028 	{
3029 		"basic",
3030 		"extended",
3031 	};
3032 
3033 	const std::string			operationName[OPERATION_LAST]					=
3034 	{
3035 		"image_load",
3036 		"texel_fetch",
3037 		"texture",
3038 		"image_store",
3039 		"attachment_read",
3040 		"attachment_write",
3041 		"texture_read",
3042 		"texture_write",
3043 	};
3044 
3045 	struct ImageTypeName
3046 	{
3047 		ImageType		type;
3048 		std::string		name;
3049 	};
3050 	ImageTypeName imageTypes[] =
3051 	{
3052 		{ IMAGE_TYPE_1D, "1d_image" },
3053 		{ IMAGE_TYPE_2D, "2d_image" },
3054 		{ IMAGE_TYPE_3D, "3d_image" },
3055 	};
3056 
3057 	const VkImageUsageFlags		baseImageUsageFlagSet							= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3058 	const VkImageUsageFlags		compressedImageUsageFlags[OPERATION_LAST]		=
3059 	{
3060 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT),											// "image_load"
3061 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texel_fetch"
3062 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texture"
3063 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "image_store"
3064 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),	// "attachment_read"
3065 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),	// "attachment_write"
3066 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT),											// "texture_read"
3067 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texture_write"
3068 	};
3069 
3070 	const VkImageUsageFlags		compressedImageViewUsageFlags[OPERATION_LAST]	=
3071 	{
3072 		compressedImageUsageFlags[0],																									//"image_load"
3073 		compressedImageUsageFlags[1],																									//"texel_fetch"
3074 		compressedImageUsageFlags[2],																									//"texture"
3075 		compressedImageUsageFlags[3],																									//"image_store"
3076 		compressedImageUsageFlags[4],																									//"attachment_read"
3077 		compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,																//"attachment_write"
3078 		compressedImageUsageFlags[6],																									//"texture_read"
3079 		compressedImageUsageFlags[7],																									//"texture_write"
3080 	};
3081 
3082 	const VkImageUsageFlags		uncompressedImageUsageFlags[OPERATION_LAST]		=
3083 	{
3084 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT),											//"image_load"
3085 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"texel_fetch"
3086 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"texture"
3087 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"image_store"
3088 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),	//"attachment_read"
3089 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT),									//"attachment_write"
3090 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),				//"texture_read"
3091 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT),											//"texture_write"
3092 	};
3093 
3094 	const VkFormat				compressedFormats64bit[]						=
3095 	{
3096 		VK_FORMAT_BC1_RGB_UNORM_BLOCK,
3097 		VK_FORMAT_BC1_RGB_SRGB_BLOCK,
3098 		VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
3099 		VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
3100 		VK_FORMAT_BC4_UNORM_BLOCK,
3101 		VK_FORMAT_BC4_SNORM_BLOCK,
3102 		VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
3103 		VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
3104 		VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
3105 		VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
3106 		VK_FORMAT_EAC_R11_UNORM_BLOCK,
3107 		VK_FORMAT_EAC_R11_SNORM_BLOCK,
3108 	};
3109 
3110 	const VkFormat				compressedFormats128bit[]						=
3111 	{
3112 		VK_FORMAT_BC2_UNORM_BLOCK,
3113 		VK_FORMAT_BC2_SRGB_BLOCK,
3114 		VK_FORMAT_BC3_UNORM_BLOCK,
3115 		VK_FORMAT_BC3_SRGB_BLOCK,
3116 		VK_FORMAT_BC5_UNORM_BLOCK,
3117 		VK_FORMAT_BC5_SNORM_BLOCK,
3118 		VK_FORMAT_BC6H_UFLOAT_BLOCK,
3119 		VK_FORMAT_BC6H_SFLOAT_BLOCK,
3120 		VK_FORMAT_BC7_UNORM_BLOCK,
3121 		VK_FORMAT_BC7_SRGB_BLOCK,
3122 		VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
3123 		VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
3124 		VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
3125 		VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
3126 		VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
3127 		VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
3128 		VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
3129 		VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
3130 		VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
3131 		VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
3132 		VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
3133 		VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
3134 		VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
3135 		VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
3136 		VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
3137 		VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
3138 		VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
3139 		VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
3140 		VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
3141 		VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
3142 		VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
3143 		VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
3144 		VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
3145 		VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
3146 		VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
3147 		VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
3148 		VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
3149 		VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
3150 		VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
3151 		VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
3152 		VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
3153 		VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
3154 	};
3155 
3156 	const VkFormat				uncompressedFormats64bit[]						=
3157 	{
3158 		VK_FORMAT_R16G16B16A16_UNORM,
3159 		VK_FORMAT_R16G16B16A16_SNORM,
3160 		VK_FORMAT_R16G16B16A16_USCALED,
3161 		VK_FORMAT_R16G16B16A16_SSCALED,
3162 		VK_FORMAT_R16G16B16A16_UINT,
3163 		VK_FORMAT_R16G16B16A16_SINT,
3164 		//VK_FORMAT_R16G16B16A16_SFLOAT,	removed as float views can't preserve NAN/INF/Denorm values
3165 		VK_FORMAT_R32G32_UINT,
3166 		VK_FORMAT_R32G32_SINT,
3167 		//VK_FORMAT_R32G32_SFLOAT,			removed as float views can't preserve NAN/INF/Denorm values
3168 		//VK_FORMAT_R64_UINT,				remove from the test it couldn't be used
3169 		//VK_FORMAT_R64_SINT,				remove from the test it couldn't be used
3170 		//VK_FORMAT_R64_SFLOAT,				remove from the test it couldn't be used
3171 	};
3172 
3173 	const VkFormat				uncompressedFormats128bit[]						=
3174 	{
3175 		VK_FORMAT_R32G32B32A32_UINT,
3176 		VK_FORMAT_R32G32B32A32_SINT,
3177 		//VK_FORMAT_R32G32B32A32_SFLOAT,	removed as float views can't preserve NAN/INF/Denorm values
3178 		//VK_FORMAT_R64G64_UINT,			remove from the test it couldn't be used
3179 		//VK_FORMAT_R64G64_SINT,			remove from the test it couldn't be used
3180 		//VK_FORMAT_R64G64_SFLOAT,			remove from the test it couldn't be used
3181 	};
3182 
3183 	const FormatsArray			formatsCompressedSets[]							=
3184 	{
3185 		{
3186 			compressedFormats64bit,
3187 			DE_LENGTH_OF_ARRAY(compressedFormats64bit)
3188 		},
3189 		{
3190 			compressedFormats128bit,
3191 			DE_LENGTH_OF_ARRAY(compressedFormats128bit)
3192 		},
3193 	};
3194 
3195 	// Uncompressed formats - floating point formats should not be used in these
3196 	// tests as they cannot be relied upon to preserve all possible values in the
3197 	// underlying texture data. Refer to the note under the 'VkImageViewCreateInfo'
3198 	// section of the specification.
3199 	const FormatsArray			formatsUncompressedSets[]						=
3200 	{
3201 		{
3202 			uncompressedFormats64bit,
3203 			DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
3204 		},
3205 		{
3206 			uncompressedFormats128bit,
3207 			DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
3208 		},
3209 	};
3210 
3211 	DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
3212 
3213 	MovePtr<tcu::TestCaseGroup>	texelViewCompatibleTests							(new tcu::TestCaseGroup(testCtx, "texel_view_compatible"));
3214 
3215 	for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
3216 	{
3217 		MovePtr<tcu::TestCaseGroup>	pipelineTypeGroup	(new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str()));
3218 
3219 		for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
3220 		{
3221 			const bool mipmapTest = mipmapness[mipmapTestNdx];
3222 
3223 			MovePtr<tcu::TestCaseGroup>	mipmapTypeGroup	(new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str()));
3224 
3225 			for (int imageTypeNdx = 0; imageTypeNdx < DE_LENGTH_OF_ARRAY(imageTypes); imageTypeNdx++)
3226 			{
3227 				MovePtr<tcu::TestCaseGroup> imageTypeGroup	(new tcu::TestCaseGroup(testCtx, imageTypes[imageTypeNdx].name.c_str()));
3228 				ImageType					imageType = imageTypes[imageTypeNdx].type;
3229 
3230 				for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
3231 				{
3232 					if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
3233 						continue;
3234 
3235 					if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
3236 						continue;
3237 
3238 					if (imageType == IMAGE_TYPE_3D &&
3239 						(operationNdx == OPERATION_ATTACHMENT_READ || operationNdx == OPERATION_ATTACHMENT_WRITE))
3240 						continue;
3241 
3242 					MovePtr<tcu::TestCaseGroup>	imageOperationGroup	(new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str()));
3243 
3244 					deUint32 depth		= 1u + 2 * (imageType == IMAGE_TYPE_3D);
3245 					deUint32 imageCount	= 2u + (operationNdx == OPERATION_IMAGE_STORE);
3246 
3247 					// Iterate through bitness groups (64 bit, 128 bit, etc)
3248 					for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
3249 					{
3250 						for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
3251 						{
3252 							const VkFormat				formatCompressed			= formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
3253 							const std::string			compressedFormatGroupName	= getFormatShortString(formatCompressed);
3254 							MovePtr<tcu::TestCaseGroup>	compressedFormatGroup		(new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str()));
3255 
3256 							for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
3257 							{
3258 								const VkFormat			formatUncompressed			= formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
3259 								const std::string		uncompressedFormatGroupName	= getFormatShortString(formatUncompressed);
3260 
3261 								const TestParameters	parameters					=
3262 								{
3263 									static_cast<Operation>(operationNdx),
3264 									static_cast<ShaderType>(shaderType),
3265 									mipmapTest ? getUnniceResolution(formatCompressed, 1u) : UVec3(64u, 64u, depth),
3266 									1u + 2u * mipmapTest * (imageType != IMAGE_TYPE_3D),		// 1 or 3 if mipmapTest is true but image is not 3d
3267 									imageType,
3268 									formatCompressed,
3269 									formatUncompressed,
3270 									imageCount,
3271 									compressedImageUsageFlags[operationNdx],
3272 									compressedImageViewUsageFlags[operationNdx],
3273 									uncompressedImageUsageFlags[operationNdx],
3274 									mipmapTest,
3275 									VK_FORMAT_R8G8B8A8_UNORM,
3276 									FormatIsASTC(formatCompressed)
3277 								};
3278 
3279 								compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, parameters));
3280 							}
3281 
3282 							imageOperationGroup->addChild(compressedFormatGroup.release());
3283 						}
3284 					}
3285 
3286 					imageTypeGroup->addChild(imageOperationGroup.release());
3287 				}
3288 
3289 				mipmapTypeGroup->addChild(imageTypeGroup.release());
3290 			}
3291 
3292 			pipelineTypeGroup->addChild(mipmapTypeGroup.release());
3293 		}
3294 
3295 		texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
3296 	}
3297 
3298 	return texelViewCompatibleTests.release();
3299 }
3300 
3301 } // image
3302 } // vkt
3303