• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*------------------------------------------------------------------------
2  * Vulkan Conformance Tests
3  * ------------------------
4  *
5  * Copyright (c) 2017 The Khronos Group Inc.
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *      http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  *
19  *//*!
20  * \file  vktImageCompressionTranscodingSupport.cpp
21  * \brief Compression transcoding support
22  *//*--------------------------------------------------------------------*/
23 
24 #include "vktImageCompressionTranscodingSupport.hpp"
25 
26 #include "deUniquePtr.hpp"
27 #include "deStringUtil.hpp"
28 #include "deSharedPtr.hpp"
29 #include "deRandom.hpp"
30 
31 #include "vktTestCaseUtil.hpp"
32 #include "vkPrograms.hpp"
33 #include "vkImageUtil.hpp"
34 #include "vktImageTestsUtil.hpp"
35 #include "vkBuilderUtil.hpp"
36 #include "vkRef.hpp"
37 #include "vkRefUtil.hpp"
38 #include "vkTypeUtil.hpp"
39 #include "vkQueryUtil.hpp"
40 
41 #include "tcuTextureUtil.hpp"
42 #include "tcuTexture.hpp"
43 #include "tcuCompressedTexture.hpp"
44 #include "tcuVectorType.hpp"
45 #include "tcuResource.hpp"
46 #include "tcuImageIO.hpp"
47 #include "tcuImageCompare.hpp"
48 #include "tcuTestLog.hpp"
49 #include "tcuRGBA.hpp"
50 #include "tcuSurface.hpp"
51 
52 #include <vector>
53 using namespace vk;
54 namespace vkt
55 {
56 namespace image
57 {
58 namespace
59 {
60 using std::string;
61 using std::vector;
62 using tcu::TestContext;
63 using tcu::TestStatus;
64 using tcu::UVec3;
65 using tcu::IVec3;
66 using tcu::CompressedTexFormat;
67 using tcu::CompressedTexture;
68 using tcu::Resource;
69 using tcu::Archive;
70 using tcu::ConstPixelBufferAccess;
71 using de::MovePtr;
72 using de::SharedPtr;
73 using de::Random;
74 
75 typedef SharedPtr<MovePtr<Image> >			ImageSp;
76 typedef SharedPtr<Move<VkImageView> >		ImageViewSp;
77 typedef SharedPtr<Move<VkDescriptorSet> >	SharedVkDescriptorSet;
78 
79 enum ShaderType
80 {
81 	SHADER_TYPE_COMPUTE,
82 	SHADER_TYPE_FRAGMENT,
83 	SHADER_TYPE_LAST
84 };
85 
86 enum Operation
87 {
88 	OPERATION_IMAGE_LOAD,
89 	OPERATION_TEXEL_FETCH,
90 	OPERATION_TEXTURE,
91 	OPERATION_IMAGE_STORE,
92 	OPERATION_ATTACHMENT_READ,
93 	OPERATION_ATTACHMENT_WRITE,
94 	OPERATION_TEXTURE_READ,
95 	OPERATION_TEXTURE_WRITE,
96 	OPERATION_LAST
97 };
98 
99 struct TestParameters
100 {
101 	Operation			operation;
102 	ShaderType			shader;
103 	UVec3				size;
104 	ImageType			imageType;
105 	VkFormat			formatCompressed;
106 	VkFormat			formatUncompressed;
107 	deUint32			imagesCount;
108 	VkImageUsageFlags	compressedImageUsage;
109 	VkImageUsageFlags	compressedImageViewUsage;
110 	VkImageUsageFlags	uncompressedImageUsage;
111 	bool				useMipmaps;
112 	VkFormat			formatForVerify;
113 };
114 
115 template<typename T>
makeVkSharedPtr(Move<T> move)116 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
117 {
118 	return SharedPtr<Move<T> >(new Move<T>(move));
119 }
120 
121 template<typename T>
makeVkSharedPtr(MovePtr<T> movePtr)122 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
123 {
124 	return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
125 }
126 
127 const deUint32 SINGLE_LEVEL = 1u;
128 const deUint32 SINGLE_LAYER = 1u;
129 
130 class BasicTranscodingTestInstance : public TestInstance
131 {
132 public:
133 							BasicTranscodingTestInstance	(Context&						context,
134 															 const TestParameters&			parameters);
135 	virtual TestStatus		iterate							(void) = 0;
136 protected:
137 	void					generateData					(deUint8*						toFill,
138 															 const size_t					size,
139 															 const VkFormat					format,
140 															 const deUint32					layer = 0u,
141 															 const deUint32					level = 0u);
142 	deUint32				getLevelCount					();
143 	deUint32				getLayerCount					();
144 	UVec3					getLayerDims					();
145 	vector<UVec3>			getMipLevelSizes				(UVec3							baseSize);
146 	vector<UVec3>			getCompressedMipLevelSizes		(const VkFormat					compressedFormat,
147 															 const vector<UVec3>&			uncompressedSizes);
148 
149 	const TestParameters	m_parameters;
150 	const deUint32			m_blockWidth;
151 	const deUint32			m_blockHeight;
152 	const deUint32			m_levelCount;
153 	const UVec3				m_layerSize;
154 
155 private:
156 	deUint32				findMipMapLevelCount			();
157 };
158 
findMipMapLevelCount()159 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
160 {
161 	deUint32 levelCount = 1;
162 
163 	// We cannot use mipmap levels which have resolution below block size.
164 	// Reduce number of mipmap levels
165 	if (m_parameters.useMipmaps)
166 	{
167 		deUint32 w = m_parameters.size.x();
168 		deUint32 h = m_parameters.size.y();
169 
170 		DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
171 
172 		while (w > m_blockWidth && h > m_blockHeight)
173 		{
174 			w >>= 1;
175 			h >>= 1;
176 
177 			if (w > m_blockWidth && h > m_blockHeight)
178 				levelCount++;
179 		}
180 
181 		DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
182 		DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
183 	}
184 
185 	return levelCount;
186 }
187 
BasicTranscodingTestInstance(Context & context,const TestParameters & parameters)188 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
189 	: TestInstance	(context)
190 	, m_parameters	(parameters)
191 	, m_blockWidth	(getBlockWidth(m_parameters.formatCompressed))
192 	, m_blockHeight	(getBlockHeight(m_parameters.formatCompressed))
193 	, m_levelCount	(findMipMapLevelCount())
194 	, m_layerSize	(getLayerSize(m_parameters.imageType, m_parameters.size))
195 {
196 	DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
197 }
198 
getLevelCount()199 deUint32 BasicTranscodingTestInstance::getLevelCount()
200 {
201 	return m_levelCount;
202 }
203 
getLayerCount()204 deUint32 BasicTranscodingTestInstance::getLayerCount()
205 {
206 	return m_parameters.size.z();
207 }
208 
getLayerDims()209 UVec3 BasicTranscodingTestInstance::getLayerDims()
210 {
211 	return m_layerSize;
212 }
213 
getMipLevelSizes(UVec3 baseSize)214 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
215 {
216 	vector<UVec3>	levelSizes;
217 	const deUint32	levelCount = getLevelCount();
218 
219 	DE_ASSERT(m_parameters.imageType == IMAGE_TYPE_2D || m_parameters.imageType == IMAGE_TYPE_2D_ARRAY);
220 
221 	baseSize.z() = 1u;
222 
223 	levelSizes.push_back(baseSize);
224 
225 	while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
226 	{
227 		baseSize.x() = deMax32(baseSize.x() >> 1, 1);
228 		baseSize.y() = deMax32(baseSize.y() >> 1, 1);
229 		levelSizes.push_back(baseSize);
230 	}
231 
232 	DE_ASSERT(levelSizes.size() == getLevelCount());
233 
234 	return levelSizes;
235 }
236 
getCompressedMipLevelSizes(const VkFormat compressedFormat,const vector<UVec3> & uncompressedSizes)237 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
238 {
239 	vector<UVec3> levelSizes;
240 	vector<UVec3>::const_iterator it;
241 
242 	for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
243 		levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
244 
245 	return levelSizes;
246 }
247 
generateData(deUint8 * toFill,const size_t size,const VkFormat format,const deUint32 layer,const deUint32 level)248 void BasicTranscodingTestInstance::generateData (deUint8*		toFill,
249 												 const size_t	size,
250 												 const VkFormat format,
251 												 const deUint32 layer,
252 												 const deUint32 level)
253 {
254 	const deUint8 pattern[] =
255 	{
256 		// 64-bit values
257 		0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
258 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
259 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
260 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
261 		0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
262 		0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
263 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
264 		0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
265 		0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
266 		0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
267 		0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Positive infinity
268 		0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Negative infinity
269 		0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,		// Start of a signalling NaN (NANS)
270 		0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a signalling NaN (NANS)
271 		0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,		// Start of a signalling NaN (NANS)
272 		0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a signalling NaN (NANS)
273 		0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Start of a quiet NaN (NANQ)
274 		0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of of a quiet NaN (NANQ)
275 		0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,		// Start of a quiet NaN (NANQ)
276 		0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,		// End of a quiet NaN (NANQ)
277 		// 32-bit values
278 		0x7F, 0x80, 0x00, 0x00,								// Positive infinity
279 		0xFF, 0x80, 0x00, 0x00,								// Negative infinity
280 		0x7F, 0x80, 0x00, 0x01,								// Start of a signalling NaN (NANS)
281 		0x7F, 0xBF, 0xFF, 0xFF,								// End of a signalling NaN (NANS)
282 		0xFF, 0x80, 0x00, 0x01,								// Start of a signalling NaN (NANS)
283 		0xFF, 0xBF, 0xFF, 0xFF,								// End of a signalling NaN (NANS)
284 		0x7F, 0xC0, 0x00, 0x00,								// Start of a quiet NaN (NANQ)
285 		0x7F, 0xFF, 0xFF, 0xFF,								// End of of a quiet NaN (NANQ)
286 		0xFF, 0xC0, 0x00, 0x00,								// Start of a quiet NaN (NANQ)
287 		0xFF, 0xFF, 0xFF, 0xFF,								// End of a quiet NaN (NANQ)
288 		0xAA, 0xAA, 0xAA, 0xAA,
289 		0x55, 0x55, 0x55, 0x55,
290 	};
291 
292 	deUint8*	start		= toFill;
293 	size_t		sizeToRnd	= size;
294 
295 	// Pattern part
296 	if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
297 	{
298 		// Rotated pattern
299 		for (size_t i = 0; i < sizeof(pattern); i++)
300 			start[sizeof(pattern) - i - 1] = pattern[i];
301 
302 		start		+= sizeof(pattern);
303 		sizeToRnd	-= sizeof(pattern);
304 
305 		// Direct pattern
306 		deMemcpy(start, pattern, sizeof(pattern));
307 
308 		start		+= sizeof(pattern);
309 		sizeToRnd	-= sizeof(pattern);
310 	}
311 
312 	// Random part
313 	{
314 		DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
315 
316 		deUint32*	start32		= reinterpret_cast<deUint32*>(start);
317 		size_t		sizeToRnd32	= sizeToRnd / sizeof(deUint32);
318 		deUint32	seed		= (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
319 		Random		rnd			(seed);
320 
321 		for (size_t i = 0; i < sizeToRnd32; i++)
322 			start32[i] = rnd.getUint32();
323 	}
324 
325 	{
326 		// Remove certain values that may not be preserved based on the uncompressed view format
327 		if (isSnormFormat(m_parameters.formatUncompressed))
328 		{
329 			for (size_t i = 0; i < size; i += 2)
330 			{
331 				// SNORM fix: due to write operation in SNORM format
332 				// replaces 0x00 0x80 to 0x01 0x80
333 				if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
334 					toFill[i+1] = 0x81;
335 			}
336 		}
337 		else if (isFloatFormat(m_parameters.formatUncompressed))
338 		{
339 			tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
340 
341 			if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
342 			{
343 				for (size_t i = 0; i < size; i += 2)
344 				{
345 					// HALF_FLOAT fix: remove INF and NaN
346 					if ((toFill[i+1] & 0x7C) == 0x7C)
347 						toFill[i+1] = 0x00;
348 				}
349 			}
350 			else if (textureFormat.type == tcu::TextureFormat::FLOAT)
351 			{
352 				for (size_t i = 0; i < size; i += 4)
353 				{
354 					// HALF_FLOAT fix: remove INF and NaN
355 					if ((toFill[i+1] & 0x7C) == 0x7C)
356 						toFill[i+1] = 0x00;
357 				}
358 
359 				for (size_t i = 0; i < size; i += 4)
360 				{
361 					// FLOAT fix: remove INF, NaN, and denorm
362 					// Little endian fix
363 					if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
364 						toFill[i+3] = 0x01;
365 					// Big endian fix
366 					if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
367 						toFill[i+0] = 0x01;
368 				}
369 			}
370 		}
371 	}
372 }
373 
374 class BasicComputeTestInstance : public BasicTranscodingTestInstance
375 {
376 public:
377 					BasicComputeTestInstance	(Context&							context,
378 												const TestParameters&				parameters);
379 	TestStatus		iterate						(void);
380 protected:
381 	struct ImageData
382 	{
getImagesCountvkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData383 		deUint32			getImagesCount		(void)									{ return static_cast<deUint32>(images.size());		}
getImageViewCountvkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData384 		deUint32			getImageViewCount	(void)									{ return static_cast<deUint32>(imagesViews.size());	}
getImageInfoCountvkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData385 		deUint32			getImageInfoCount	(void)									{ return static_cast<deUint32>(imagesInfos.size());	}
getImagevkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData386 		VkImage				getImage			(const deUint32				ndx)		{ return **images[ndx]->get();						}
getImageViewvkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData387 		VkImageView			getImageView		(const deUint32				ndx)		{ return **imagesViews[ndx];						}
getImageInfovkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData388 		VkImageCreateInfo	getImageInfo		(const deUint32				ndx)		{ return imagesInfos[ndx];							}
addImagevkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData389 		void				addImage			(MovePtr<Image>				image)		{ images.push_back(makeVkSharedPtr(image));			}
addImageViewvkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData390 		void				addImageView		(Move<VkImageView>			imageView)	{ imagesViews.push_back(makeVkSharedPtr(imageView));}
addImageInfovkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData391 		void				addImageInfo		(const VkImageCreateInfo	imageInfo)	{ imagesInfos.push_back(imageInfo);					}
resetViewsvkt::image::__anon804fb4130111::BasicComputeTestInstance::ImageData392 		void				resetViews			()										{ imagesViews.clear();								}
393 	private:
394 		vector<ImageSp>				images;
395 		vector<ImageViewSp>			imagesViews;
396 		vector<VkImageCreateInfo>	imagesInfos;
397 	};
398 	void			copyDataToImage				(const VkCommandBuffer&				cmdBuffer,
399 												 ImageData&							imageData,
400 												 const vector<UVec3>&				mipMapSizes,
401 												 const bool							isCompressed);
402 	virtual void	executeShader				(const VkCommandBuffer&				cmdBuffer,
403 												 const VkDescriptorSetLayout&		descriptorSetLayout,
404 												 const VkDescriptorPool&			descriptorPool,
405 												vector<ImageData>&					imageData);
406 	bool			copyResultAndCompare		(const VkCommandBuffer&				cmdBuffer,
407 												 const VkImage&						uncompressed,
408 												 const VkDeviceSize					offset,
409 												 const UVec3&						size);
410 	void			descriptorSetUpdate			(VkDescriptorSet					descriptorSet,
411 												 const VkDescriptorImageInfo*		descriptorImageInfos);
412 	void			createImageInfos			(ImageData&							imageData,
413 												 const vector<UVec3>&				mipMapSizes,
414 												 const bool							isCompressed);
415 	bool			decompressImage				(const VkCommandBuffer&				cmdBuffer,
416 												 vector<ImageData>&					imageData,
417 												 const vector<UVec3>&				mipMapSizes);
418 	vector<deUint8>	m_data;
419 };
420 
421 
BasicComputeTestInstance(Context & context,const TestParameters & parameters)422 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
423 	:BasicTranscodingTestInstance	(context, parameters)
424 {
425 }
426 
iterate(void)427 TestStatus BasicComputeTestInstance::iterate (void)
428 {
429 	const DeviceInterface&					vk					= m_context.getDeviceInterface();
430 	const VkDevice							device				= m_context.getDevice();
431 	const deUint32							queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
432 	Allocator&								allocator			= m_context.getDefaultAllocator();
433 	const Unique<VkCommandPool>				cmdPool				(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
434 	const Unique<VkCommandBuffer>			cmdBuffer			(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
435 	const vector<UVec3>						mipMapSizes			= m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, m_parameters.size);
436 	vector<ImageData>						imageData			(m_parameters.imagesCount);
437 	const deUint32							compressedNdx		= 0u;
438 	const deUint32							resultImageNdx		= m_parameters.imagesCount -1u;
439 
440 	for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
441 	{
442 		const bool isCompressed = compressedNdx == imageNdx ? true : false;
443 		createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
444 		for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
445 		{
446 			imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
447 			if (isCompressed)
448 			{
449 				const VkImageViewUsageCreateInfoKHR	imageViewUsageKHR	=
450 				{
451 					VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,				//VkStructureType		sType;
452 					DE_NULL,														//const void*			pNext;
453 					m_parameters.compressedImageUsage,								//VkImageUsageFlags		usage;
454 				};
455 				for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
456 				for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
457 				{
458 					imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
459 														mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
460 														makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
461 														&imageViewUsageKHR));
462 				}
463 			}
464 			else
465 			{
466 				imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
467 													mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
468 													makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
469 			}
470 		}
471 	}
472 
473 	{
474 		size_t size = 0ull;
475 		for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
476 		{
477 			size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
478 		}
479 		m_data.resize(size);
480 		generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
481 	}
482 
483 	switch(m_parameters.operation)
484 	{
485 		case OPERATION_IMAGE_LOAD:
486 		case OPERATION_TEXEL_FETCH:
487 		case OPERATION_TEXTURE:
488 			copyDataToImage(*cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
489 			break;
490 		case OPERATION_IMAGE_STORE:
491 			copyDataToImage(*cmdBuffer, imageData[1], mipMapSizes, false);
492 			break;
493 		default:
494 			DE_ASSERT(false);
495 			break;
496 	}
497 
498 	{
499 		Move<VkDescriptorSetLayout>	descriptorSetLayout;
500 		Move<VkDescriptorPool>		descriptorPool;
501 
502 		DescriptorSetLayoutBuilder	descriptorSetLayoutBuilder;
503 		DescriptorPoolBuilder		descriptorPoolBuilder;
504 		for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
505 		{
506 			switch(m_parameters.operation)
507 			{
508 				case OPERATION_IMAGE_LOAD:
509 				case OPERATION_IMAGE_STORE:
510 					descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
511 					descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
512 					break;
513 				case OPERATION_TEXEL_FETCH:
514 				case OPERATION_TEXTURE:
515 					descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
516 					descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
517 					break;
518 				default:
519 					DE_ASSERT(false);
520 					break;
521 			}
522 		}
523 		descriptorSetLayout	= descriptorSetLayoutBuilder.build(vk, device);
524 		descriptorPool		= descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
525 		executeShader(*cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
526 
527 		{
528 			VkDeviceSize offset = 0ull;
529 			for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
530 			for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
531 			{
532 				const deUint32	imageNdx	= layerNdx + mipNdx * getLayerCount();
533 				const UVec3		size		= UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
534 													imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
535 													imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
536 				if (!copyResultAndCompare(*cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
537 					return TestStatus::fail("Fail");
538 				offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
539 			}
540 		}
541 	};
542 	if (!decompressImage(*cmdBuffer, imageData, mipMapSizes))
543 			return TestStatus::fail("Fail");
544 	return TestStatus::pass("Pass");
545 }
546 
copyDataToImage(const VkCommandBuffer & cmdBuffer,ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)547 void BasicComputeTestInstance::copyDataToImage (const VkCommandBuffer&	cmdBuffer,
548 												ImageData&				imageData,
549 												const vector<UVec3>&	mipMapSizes,
550 												const bool				isCompressed)
551 {
552 	const DeviceInterface&		vk			= m_context.getDeviceInterface();
553 	const VkDevice				device		= m_context.getDevice();
554 	const VkQueue				queue		= m_context.getUniversalQueue();
555 	Allocator&					allocator	= m_context.getDefaultAllocator();
556 
557 	Buffer						imageBuffer	(vk, device, allocator,
558 												makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
559 												MemoryRequirement::HostVisible);
560 	VkDeviceSize				offset		= 0ull;
561 	{
562 		const Allocation& alloc = imageBuffer.getAllocation();
563 		deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
564 		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_data.size());
565 	}
566 
567 	beginCommandBuffer(vk, cmdBuffer);
568 	const VkImageSubresourceRange	subresourceRange		=
569 	{
570 		VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
571 		0u,											//deUint32				baseMipLevel
572 		imageData.getImageInfo(0u).mipLevels,		//deUint32				levelCount
573 		0u,											//deUint32				baseArrayLayer
574 		imageData.getImageInfo(0u).arrayLayers		//deUint32				layerCount
575 	};
576 
577 	for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
578 	{
579 		const VkImageMemoryBarrier		preCopyImageBarrier		= makeImageMemoryBarrier(
580 																	0u, VK_ACCESS_TRANSFER_WRITE_BIT,
581 																	VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
582 																	imageData.getImage(imageNdx), subresourceRange);
583 
584 		const VkBufferMemoryBarrier		FlushHostCopyBarrier	= makeBufferMemoryBarrier(
585 																	VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
586 																	imageBuffer.get(), 0ull, m_data.size());
587 
588 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
589 				(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
590 
591 		for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
592 		{
593 			const VkExtent3D				imageExtent				= isCompressed ?
594 																		makeExtent3D(mipMapSizes[mipNdx]) :
595 																		imageData.getImageInfo(imageNdx).extent;
596 			const VkBufferImageCopy			copyRegion				=
597 			{
598 				offset,																												//VkDeviceSize				bufferOffset;
599 				0u,																													//deUint32					bufferRowLength;
600 				0u,																													//deUint32					bufferImageHeight;
601 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers),	//VkImageSubresourceLayers	imageSubresource;
602 				makeOffset3D(0, 0, 0),																								//VkOffset3D				imageOffset;
603 				imageExtent,																										//VkExtent3D				imageExtent;
604 			};
605 
606 			vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
607 			offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
608 						UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
609 						imageData.getImageInfo(imageNdx).arrayLayers;
610 		}
611 	}
612 	endCommandBuffer(vk, cmdBuffer);
613 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
614 }
615 
executeShader(const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)616 void BasicComputeTestInstance::executeShader (const VkCommandBuffer&		cmdBuffer,
617 											  const VkDescriptorSetLayout&	descriptorSetLayout,
618 											  const VkDescriptorPool&		descriptorPool,
619 											  vector<ImageData>&			imageData)
620 {
621 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
622 	const VkDevice					device					= m_context.getDevice();
623 	const VkQueue					queue					= m_context.getUniversalQueue();
624 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
625 	vector<SharedVkDescriptorSet>	descriptorSets			(imageData[0].getImageViewCount());
626 	const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, descriptorSetLayout));
627 	const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
628 	Move<VkSampler>					sampler;
629 	{
630 		const VkSamplerCreateInfo createInfo =
631 		{
632 			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,		//VkStructureType		sType;
633 			DE_NULL,									//const void*			pNext;
634 			0u,											//VkSamplerCreateFlags	flags;
635 			VK_FILTER_NEAREST,							//VkFilter				magFilter;
636 			VK_FILTER_NEAREST,							//VkFilter				minFilter;
637 			VK_SAMPLER_MIPMAP_MODE_NEAREST,				//VkSamplerMipmapMode	mipmapMode;
638 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeU;
639 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeV;
640 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeW;
641 			0.0f,										//float					mipLodBias;
642 			VK_FALSE,									//VkBool32				anisotropyEnable;
643 			1.0f,										//float					maxAnisotropy;
644 			VK_FALSE,									//VkBool32				compareEnable;
645 			VK_COMPARE_OP_EQUAL,						//VkCompareOp			compareOp;
646 			0.0f,										//float					minLod;
647 			0.0f,										//float					maxLod;
648 			VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,	//VkBorderColor			borderColor;
649 			VK_FALSE,									//VkBool32				unnormalizedCoordinates;
650 		};
651 		sampler = createSampler(vk, device, &createInfo);
652 	}
653 
654 	vector<VkDescriptorImageInfo>	descriptorImageInfos	(descriptorSets.size() * m_parameters.imagesCount);
655 	for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
656 	{
657 		const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
658 		for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
659 		{
660 			descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
661 															imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
662 		}
663 	}
664 
665 	for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
666 		descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
667 
668 	beginCommandBuffer(vk, cmdBuffer);
669 	{
670 		const VkImageSubresourceRange	compressedRange				=
671 		{
672 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
673 			0u,											//deUint32				baseMipLevel
674 			imageData[0].getImageInfo(0u).mipLevels,	//deUint32				levelCount
675 			0u,											//deUint32				baseArrayLayer
676 			imageData[0].getImageInfo(0u).arrayLayers	//deUint32				layerCount
677 		};
678 		const VkImageSubresourceRange	uncompressedRange			=
679 		{
680 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
681 			0u,											//deUint32				baseMipLevel
682 			1u,											//deUint32				levelCount
683 			0u,											//deUint32				baseArrayLayer
684 			1u											//deUint32				layerCount
685 		};
686 
687 		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
688 
689 		vector<VkImageMemoryBarrier>		preShaderImageBarriers;
690 		preShaderImageBarriers.resize(descriptorSets.size() + 1u);
691 		for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
692 		{
693 			preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
694 												VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
695 												VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
696 												imageData[1].getImage(imageNdx), uncompressedRange);
697 		}
698 
699 		preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
700 															VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
701 															VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
702 															imageData[0].getImage(0), compressedRange);
703 
704 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
705 			(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
706 			static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
707 
708 		for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
709 		{
710 			descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
711 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
712 			vk.cmdDispatch(cmdBuffer,	imageData[1].getImageInfo(ndx).extent.width,
713 										imageData[1].getImageInfo(ndx).extent.height,
714 										imageData[1].getImageInfo(ndx).extent.depth);
715 		}
716 	}
717 	endCommandBuffer(vk, cmdBuffer);
718 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
719 }
720 
copyResultAndCompare(const VkCommandBuffer & cmdBuffer,const VkImage & uncompressed,const VkDeviceSize offset,const UVec3 & size)721 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandBuffer&	cmdBuffer,
722 													 const VkImage&			uncompressed,
723 													 const VkDeviceSize		offset,
724 													 const UVec3&			size)
725 {
726 	const DeviceInterface&	vk					= m_context.getDeviceInterface();
727 	const VkQueue			queue				= m_context.getUniversalQueue();
728 	const VkDevice			device				= m_context.getDevice();
729 	Allocator&				allocator			= m_context.getDefaultAllocator();
730 
731 	VkDeviceSize			imageResultSize		= getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
732 	Buffer					imageBufferResult	(vk, device, allocator,
733 													makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
734 													MemoryRequirement::HostVisible);
735 
736 	beginCommandBuffer(vk, cmdBuffer);
737 	{
738 		const VkImageSubresourceRange	subresourceRange	=
739 		{
740 			VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags	aspectMask
741 			0u,																	//deUint32				baseMipLevel
742 			1u,																	//deUint32				levelCount
743 			0u,																	//deUint32				baseArrayLayer
744 			1u																	//deUint32				layerCount
745 		};
746 
747 		const VkBufferImageCopy			copyRegion			=
748 		{
749 			0ull,																//	VkDeviceSize				bufferOffset;
750 			0u,																	//	deUint32					bufferRowLength;
751 			0u,																	//	deUint32					bufferImageHeight;
752 			makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
753 			makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
754 			makeExtent3D(size),													//	VkExtent3D					imageExtent;
755 		};
756 
757 		const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
758 																VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
759 																VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
760 																uncompressed, subresourceRange);
761 
762 		const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
763 													VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
764 													imageBufferResult.get(), 0ull, imageResultSize);
765 
766 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
767 		vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, &copyRegion);
768 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
769 	}
770 	endCommandBuffer(vk, cmdBuffer);
771 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
772 
773 	const Allocation& allocResult = imageBufferResult.getAllocation();
774 	invalidateMappedMemoryRange(vk, device, allocResult.getMemory(), allocResult.getOffset(), imageResultSize);
775 	if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
776 		return true;
777 	return false;
778 }
779 
descriptorSetUpdate(VkDescriptorSet descriptorSet,const VkDescriptorImageInfo * descriptorImageInfos)780 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
781 {
782 	const DeviceInterface&		vk		= m_context.getDeviceInterface();
783 	const VkDevice				device	= m_context.getDevice();
784 	DescriptorSetUpdateBuilder	descriptorSetUpdateBuilder;
785 
786 	switch(m_parameters.operation)
787 	{
788 		case OPERATION_IMAGE_LOAD:
789 		case OPERATION_IMAGE_STORE:
790 		{
791 			for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
792 				descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
793 
794 			break;
795 		}
796 
797 		case OPERATION_TEXEL_FETCH:
798 		case OPERATION_TEXTURE:
799 		{
800 			for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
801 			{
802 				descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
803 					bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
804 			}
805 
806 			break;
807 		}
808 
809 		default:
810 			DE_ASSERT(false);
811 	}
812 	descriptorSetUpdateBuilder.update(vk, device);
813 }
814 
createImageInfos(ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)815 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
816 {
817 	const VkImageType			imageType			= mapImageType(m_parameters.imageType);
818 
819 	if (isCompressed)
820 	{
821 		const VkExtent3D			extentCompressed	= makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
822 		const VkImageCreateInfo compressedInfo =
823 		{
824 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,					// VkStructureType			sType;
825 			DE_NULL,												// const void*				pNext;
826 			VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
827 			VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR |
828 			VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,					// VkImageCreateFlags		flags;
829 			imageType,												// VkImageType				imageType;
830 			m_parameters.formatCompressed,							// VkFormat					format;
831 			extentCompressed,										// VkExtent3D				extent;
832 			static_cast<deUint32>(mipMapSizes.size()),				// deUint32					mipLevels;
833 			getLayerCount(),										// deUint32					arrayLayers;
834 			VK_SAMPLE_COUNT_1_BIT,									// VkSampleCountFlagBits	samples;
835 			VK_IMAGE_TILING_OPTIMAL,								// VkImageTiling			tiling;
836 			VK_IMAGE_USAGE_SAMPLED_BIT |
837 			VK_IMAGE_USAGE_STORAGE_BIT |
838 			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
839 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,						// VkImageUsageFlags		usage;
840 			VK_SHARING_MODE_EXCLUSIVE,								// VkSharingMode			sharingMode;
841 			0u,														// deUint32					queueFamilyIndexCount;
842 			DE_NULL,												// const deUint32*			pQueueFamilyIndices;
843 			VK_IMAGE_LAYOUT_UNDEFINED,								// VkImageLayout			initialLayout;
844 		};
845 		imageData.addImageInfo(compressedInfo);
846 	}
847 	else
848 	{
849 		for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
850 		for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
851 		{
852 			const VkExtent3D		extentUncompressed	= m_parameters.useMipmaps ?
853 															makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
854 															makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, m_parameters.size));
855 			const VkImageCreateInfo	uncompressedInfo	=
856 			{
857 				VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,				// VkStructureType			sType;
858 				DE_NULL,											// const void*				pNext;
859 				0u,													// VkImageCreateFlags		flags;
860 				imageType,											// VkImageType				imageType;
861 				m_parameters.formatUncompressed,					// VkFormat					format;
862 				extentUncompressed,									// VkExtent3D				extent;
863 				1u,													// deUint32					mipLevels;
864 				1u,													// deUint32					arrayLayers;
865 				VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits	samples;
866 				VK_IMAGE_TILING_OPTIMAL,							// VkImageTiling			tiling;
867 				m_parameters.uncompressedImageUsage |
868 				VK_IMAGE_USAGE_SAMPLED_BIT,							// VkImageUsageFlags		usage;
869 				VK_SHARING_MODE_EXCLUSIVE,							// VkSharingMode			sharingMode;
870 				0u,													// deUint32					queueFamilyIndexCount;
871 				DE_NULL,											// const deUint32*			pQueueFamilyIndices;
872 				VK_IMAGE_LAYOUT_UNDEFINED,							// VkImageLayout			initialLayout;
873 			};
874 			imageData.addImageInfo(uncompressedInfo);
875 		}
876 	}
877 }
878 
decompressImage(const VkCommandBuffer & cmdBuffer,vector<ImageData> & imageData,const vector<UVec3> & mipMapSizes)879 bool BasicComputeTestInstance::decompressImage (const VkCommandBuffer&	cmdBuffer,
880 												 vector<ImageData>&		imageData,
881 												 const vector<UVec3>&	mipMapSizes)
882 {
883 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
884 	const VkDevice					device					= m_context.getDevice();
885 	const VkQueue					queue					= m_context.getUniversalQueue();
886 	Allocator&						allocator				= m_context.getDefaultAllocator();
887 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
888 	const VkImage&					compressed				= imageData[0].getImage(0);
889 
890 	for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
891 		imageData[ndx].resetViews();
892 
893 	for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
894 	for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
895 	{
896 		const bool						layoutShaderReadOnly	= (layerNdx % 2u) == 1;
897 		const deUint32					imageNdx				= layerNdx + mipNdx * getLayerCount();
898 		const VkExtent3D				extentCompressed		= makeExtent3D(mipMapSizes[mipNdx]);
899 		const VkImage&					uncompressed			= imageData[m_parameters.imagesCount -1].getImage(imageNdx);
900 		const VkExtent3D				extentUncompressed		= imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
901 		const VkDeviceSize				bufferSizeComp			= getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
902 
903 		const VkImageCreateInfo			decompressedImageInfo	=
904 		{
905 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,								// VkStructureType			sType;
906 			DE_NULL,															// const void*				pNext;
907 			0u,																	// VkImageCreateFlags		flags;
908 			VK_IMAGE_TYPE_2D,													// VkImageType				imageType;
909 			VK_FORMAT_R8G8B8A8_UNORM,											// VkFormat					format;
910 			extentCompressed,													// VkExtent3D				extent;
911 			1u,																	// deUint32					mipLevels;
912 			1u,																	// deUint32					arrayLayers;
913 			VK_SAMPLE_COUNT_1_BIT,												// VkSampleCountFlagBits	samples;
914 			VK_IMAGE_TILING_OPTIMAL,											// VkImageTiling			tiling;
915 			VK_IMAGE_USAGE_SAMPLED_BIT |
916 			VK_IMAGE_USAGE_STORAGE_BIT |
917 			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
918 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,									// VkImageUsageFlags		usage;
919 			VK_SHARING_MODE_EXCLUSIVE,											// VkSharingMode			sharingMode;
920 			0u,																	// deUint32					queueFamilyIndexCount;
921 			DE_NULL,															// const deUint32*			pQueueFamilyIndices;
922 			VK_IMAGE_LAYOUT_UNDEFINED,											// VkImageLayout			initialLayout;
923 		};
924 
925 		const VkImageCreateInfo			compressedImageInfo		=
926 		{
927 			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,								// VkStructureType			sType;
928 			DE_NULL,															// const void*				pNext;
929 			0u,																	// VkImageCreateFlags		flags;
930 			VK_IMAGE_TYPE_2D,													// VkImageType				imageType;
931 			m_parameters.formatCompressed,										// VkFormat					format;
932 			extentCompressed,													// VkExtent3D				extent;
933 			1u,																	// deUint32					mipLevels;
934 			1u,																	// deUint32					arrayLayers;
935 			VK_SAMPLE_COUNT_1_BIT,												// VkSampleCountFlagBits	samples;
936 			VK_IMAGE_TILING_OPTIMAL,											// VkImageTiling			tiling;
937 			VK_IMAGE_USAGE_SAMPLED_BIT |
938 			VK_IMAGE_USAGE_TRANSFER_DST_BIT,									// VkImageUsageFlags		usage;
939 			VK_SHARING_MODE_EXCLUSIVE,											// VkSharingMode			sharingMode;
940 			0u,																	// deUint32					queueFamilyIndexCount;
941 			DE_NULL,															// const deUint32*			pQueueFamilyIndices;
942 			VK_IMAGE_LAYOUT_UNDEFINED,											// VkImageLayout			initialLayout;
943 		};
944 		const VkImageUsageFlags				compressedViewUsageFlags	= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
945 		const VkImageViewUsageCreateInfoKHR	compressedViewUsageCI		=
946 		{
947 			VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,					//VkStructureType		sType;
948 			DE_NULL,															//const void*			pNext;
949 			compressedViewUsageFlags,											//VkImageUsageFlags		usage;
950 		};
951 		Image							resultImage				(vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
952 		Image							referenceImage			(vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
953 		Image							uncompressedImage		(vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
954 		Move<VkImageView>				resultView				= makeImageView(vk, device, resultImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
955 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
956 		Move<VkImageView>				referenceView			= makeImageView(vk, device, referenceImage.get(), mapImageViewType(m_parameters.imageType), decompressedImageInfo.format,
957 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
958 		Move<VkImageView>				uncompressedView		= makeImageView(vk, device, uncompressedImage.get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
959 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
960 		Move<VkImageView>				compressedView			= makeImageView(vk, device, compressed, mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed,
961 																	makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
962 		Move<VkDescriptorSetLayout>		descriptorSetLayout		= DescriptorSetLayoutBuilder()
963 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
964 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
965 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
966 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
967 																	.build(vk, device);
968 		Move<VkDescriptorPool>			descriptorPool			= DescriptorPoolBuilder()
969 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
970 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
971 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
972 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
973 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
974 
975 		Move<VkDescriptorSet>			descriptorSet			= makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
976 		const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
977 		const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
978 		const VkDeviceSize				bufferSize				= getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), VK_FORMAT_R8G8B8A8_UNORM);
979 		Buffer							resultBuffer			(vk, device, allocator,
980 																	makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
981 		Buffer							referenceBuffer			(vk, device, allocator,
982 																	makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
983 		Buffer							transferBuffer			(vk, device, allocator,
984 																	makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
985 		Move<VkSampler>					sampler;
986 		{
987 			const VkSamplerCreateInfo createInfo	=
988 			{
989 				VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,							//VkStructureType		sType;
990 				DE_NULL,														//const void*			pNext;
991 				0u,																//VkSamplerCreateFlags	flags;
992 				VK_FILTER_NEAREST,												//VkFilter				magFilter;
993 				VK_FILTER_NEAREST,												//VkFilter				minFilter;
994 				VK_SAMPLER_MIPMAP_MODE_NEAREST,									//VkSamplerMipmapMode	mipmapMode;
995 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeU;
996 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeV;
997 				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,							//VkSamplerAddressMode	addressModeW;
998 				0.0f,															//float					mipLodBias;
999 				VK_FALSE,														//VkBool32				anisotropyEnable;
1000 				1.0f,															//float					maxAnisotropy;
1001 				VK_FALSE,														//VkBool32				compareEnable;
1002 				VK_COMPARE_OP_EQUAL,											//VkCompareOp			compareOp;
1003 				0.0f,															//float					minLod;
1004 				1.0f,															//float					maxLod;
1005 				VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,						//VkBorderColor			borderColor;
1006 				VK_FALSE,														//VkBool32				unnormalizedCoordinates;
1007 			};
1008 			sampler = createSampler(vk, device, &createInfo);
1009 		}
1010 
1011 		VkDescriptorImageInfo			descriptorImageInfos[]	=
1012 		{
1013 			makeDescriptorImageInfo(*sampler,	*uncompressedView,	layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1014 			makeDescriptorImageInfo(*sampler,	*compressedView,	layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1015 			makeDescriptorImageInfo(DE_NULL,	*resultView,		VK_IMAGE_LAYOUT_GENERAL),
1016 			makeDescriptorImageInfo(DE_NULL,	*referenceView,		VK_IMAGE_LAYOUT_GENERAL)
1017 		};
1018 		DescriptorSetUpdateBuilder()
1019 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1020 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1021 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1022 			.writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1023 			.update(vk, device);
1024 
1025 
1026 		beginCommandBuffer(vk, cmdBuffer);
1027 		{
1028 			const VkImageSubresourceRange	subresourceRange		=
1029 			{
1030 				VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags			aspectMask
1031 				0u,																	//deUint32						baseMipLevel
1032 				1u,																	//deUint32						levelCount
1033 				0u,																	//deUint32						baseArrayLayer
1034 				1u																	//deUint32						layerCount
1035 			};
1036 
1037 			const VkImageSubresourceRange	subresourceRangeComp	=
1038 			{
1039 				VK_IMAGE_ASPECT_COLOR_BIT,											//VkImageAspectFlags			aspectMask
1040 				mipNdx,																//deUint32						baseMipLevel
1041 				1u,																	//deUint32						levelCount
1042 				layerNdx,															//deUint32						baseArrayLayer
1043 				1u																	//deUint32						layerCount
1044 			};
1045 
1046 			const VkBufferImageCopy			copyRegion				=
1047 			{
1048 				0ull,																//	VkDeviceSize				bufferOffset;
1049 				0u,																	//	deUint32					bufferRowLength;
1050 				0u,																	//	deUint32					bufferImageHeight;
1051 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
1052 				makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
1053 				decompressedImageInfo.extent,										//	VkExtent3D					imageExtent;
1054 			};
1055 
1056 			const VkBufferImageCopy			compressedCopyRegion	=
1057 			{
1058 				0ull,																//	VkDeviceSize				bufferOffset;
1059 				0u,																	//	deUint32					bufferRowLength;
1060 				0u,																	//	deUint32					bufferImageHeight;
1061 				makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u),	//	VkImageSubresourceLayers	imageSubresource;
1062 				makeOffset3D(0, 0, 0),												//	VkOffset3D					imageOffset;
1063 				extentUncompressed,													//	VkExtent3D					imageExtent;
1064 			};
1065 
1066 			{
1067 
1068 				const VkBufferMemoryBarrier		preCopyBufferBarriers	= makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1069 																			transferBuffer.get(), 0ull, bufferSizeComp);
1070 
1071 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1072 					(VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1073 			}
1074 
1075 			vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1076 
1077 			{
1078 				const VkBufferMemoryBarrier		postCopyBufferBarriers	= makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1079 																			transferBuffer.get(), 0ull, bufferSizeComp);
1080 
1081 				const VkImageMemoryBarrier		preCopyImageBarriers	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1082 																			VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1083 
1084 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1085 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1086 			}
1087 
1088 			vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
1089 
1090 			vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1091 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1092 
1093 			{
1094 				const VkImageMemoryBarrier		preShaderImageBarriers[]	=
1095 				{
1096 
1097 					makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1098 						VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1099 						uncompressedImage.get(), subresourceRange),
1100 
1101 					makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT,
1102 						VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1103 						compressed, subresourceRangeComp),
1104 
1105 					makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1106 						VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1107 						resultImage.get(), subresourceRange),
1108 
1109 					makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1110 						VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1111 						referenceImage.get(), subresourceRange)
1112 				};
1113 
1114 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1115 					(VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1116 					DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1117 			}
1118 
1119 			vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1120 
1121 			{
1122 				const VkImageMemoryBarrier		postShaderImageBarriers[]	=
1123 				{
1124 					makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1125 					VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1126 					resultImage.get(), subresourceRange),
1127 
1128 					makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1129 						VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1130 						referenceImage.get(), subresourceRange)
1131 				};
1132 
1133 				 const VkBufferMemoryBarrier		preCopyBufferBarrier[]		=
1134 				{
1135 					makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1136 						resultBuffer.get(), 0ull, bufferSize),
1137 
1138 					makeBufferMemoryBarrier( 0, VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1139 						referenceBuffer.get(), 0ull, bufferSize),
1140 				};
1141 
1142 				vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1143 					(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(preCopyBufferBarrier), preCopyBufferBarrier,
1144 					DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1145 			}
1146 			vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, &copyRegion);
1147 			vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, &copyRegion);
1148 		}
1149 		endCommandBuffer(vk, cmdBuffer);
1150 		submitCommandsAndWait(vk, device, queue, cmdBuffer);
1151 
1152 		const Allocation&		resultAlloc		= resultBuffer.getAllocation();
1153 		const Allocation&		referenceAlloc	= referenceBuffer.getAllocation();
1154 		invalidateMappedMemoryRange(vk, device, resultAlloc.getMemory(), resultAlloc.getOffset(), bufferSize);
1155 		invalidateMappedMemoryRange(vk, device, referenceAlloc.getMemory(), referenceAlloc.getOffset(), bufferSize);
1156 
1157 		if (deMemCmp(resultAlloc.getHostPtr(), referenceAlloc.getHostPtr(), (size_t)bufferSize) != 0)
1158 		{
1159 			ConstPixelBufferAccess	resultPixels		(mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1160 			ConstPixelBufferAccess	referencePixels		(mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1161 
1162 			if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1163 				return false;
1164 		}
1165 	}
1166 
1167 	return true;
1168 }
1169 
1170 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1171 {
1172 public:
1173 					ImageStoreComputeTestInstance	(Context&							context,
1174 													 const TestParameters&				parameters);
1175 protected:
1176 	virtual void	executeShader					(const VkCommandBuffer&				cmdBuffer,
1177 													 const VkDescriptorSetLayout&		descriptorSetLayout,
1178 													 const VkDescriptorPool&			descriptorPool,
1179 													 vector<ImageData>&					imageData);
1180 private:
1181 };
1182 
ImageStoreComputeTestInstance(Context & context,const TestParameters & parameters)1183 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1184 	:BasicComputeTestInstance	(context, parameters)
1185 {
1186 }
1187 
executeShader(const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)1188 void ImageStoreComputeTestInstance::executeShader (const VkCommandBuffer&		cmdBuffer,
1189 												   const VkDescriptorSetLayout&	descriptorSetLayout,
1190 												   const VkDescriptorPool&		descriptorPool,
1191 												   vector<ImageData>&			imageData)
1192 {
1193 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
1194 	const VkDevice					device					= m_context.getDevice();
1195 	const VkQueue					queue					= m_context.getUniversalQueue();
1196 	const Unique<VkShaderModule>	shaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1197 	vector<SharedVkDescriptorSet>	descriptorSets			(imageData[0].getImageViewCount());
1198 	const Unique<VkPipelineLayout>	pipelineLayout			(makePipelineLayout(vk, device, descriptorSetLayout));
1199 	const Unique<VkPipeline>		pipeline				(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1200 	Move<VkSampler>					sampler;
1201 	{
1202 		const VkSamplerCreateInfo createInfo =
1203 		{
1204 			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,		//VkStructureType		sType;
1205 			DE_NULL,									//const void*			pNext;
1206 			0u,											//VkSamplerCreateFlags	flags;
1207 			VK_FILTER_NEAREST,							//VkFilter				magFilter;
1208 			VK_FILTER_NEAREST,							//VkFilter				minFilter;
1209 			VK_SAMPLER_MIPMAP_MODE_NEAREST,				//VkSamplerMipmapMode	mipmapMode;
1210 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeU;
1211 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeV;
1212 			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		//VkSamplerAddressMode	addressModeW;
1213 			0.0f,										//float					mipLodBias;
1214 			VK_FALSE,									//VkBool32				anisotropyEnable;
1215 			1.0f,										//float					maxAnisotropy;
1216 			VK_FALSE,									//VkBool32				compareEnable;
1217 			VK_COMPARE_OP_EQUAL,						//VkCompareOp			compareOp;
1218 			0.0f,										//float					minLod;
1219 			0.0f,										//float					maxLod;
1220 			VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,	//VkBorderColor			borderColor;
1221 			VK_TRUE,									//VkBool32				unnormalizedCoordinates;
1222 		};
1223 		sampler = createSampler(vk, device, &createInfo);
1224 	}
1225 
1226 	vector<VkDescriptorImageInfo>	descriptorImageInfos	(descriptorSets.size() * m_parameters.imagesCount);
1227 	for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1228 	{
1229 		const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1230 		for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1231 		{
1232 			descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1233 															imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1234 		}
1235 	}
1236 
1237 	for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1238 		descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1239 
1240 	beginCommandBuffer(vk, cmdBuffer);
1241 	{
1242 		const VkImageSubresourceRange	compressedRange				=
1243 		{
1244 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
1245 			0u,											//deUint32				baseMipLevel
1246 			imageData[0].getImageInfo(0).mipLevels,		//deUint32				levelCount
1247 			0u,											//deUint32				baseArrayLayer
1248 			imageData[0].getImageInfo(0).arrayLayers	//deUint32				layerCount
1249 		};
1250 
1251 		const VkImageSubresourceRange	uncompressedRange			=
1252 		{
1253 			VK_IMAGE_ASPECT_COLOR_BIT,					//VkImageAspectFlags	aspectMask
1254 			0u,											//deUint32				baseMipLevel
1255 			1u,											//deUint32				levelCount
1256 			0u,											//deUint32				baseArrayLayer
1257 			1u											//deUint32				layerCount
1258 		};
1259 
1260 		vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1261 
1262 		vector<VkImageMemoryBarrier>		preShaderImageBarriers	(descriptorSets.size() * 2u + 1u);
1263 		for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1264 		{
1265 			preShaderImageBarriers[imageNdx]									= makeImageMemoryBarrier(
1266 																					VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1267 																					VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1268 																					imageData[1].getImage(imageNdx), uncompressedRange);
1269 
1270 			preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()]	= makeImageMemoryBarrier(
1271 																					VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1272 																					VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1273 																					imageData[2].getImage(imageNdx), uncompressedRange);
1274 		}
1275 
1276 		preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1277 																	VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1278 																	VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1279 																	imageData[0].getImage(0u), compressedRange);
1280 
1281 		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1282 			(VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1283 			static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1284 
1285 		for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1286 		{
1287 			descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1288 			vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1289 			vk.cmdDispatch(cmdBuffer,	imageData[1].getImageInfo(ndx).extent.width,
1290 										imageData[1].getImageInfo(ndx).extent.height,
1291 										imageData[1].getImageInfo(ndx).extent.depth);
1292 		}
1293 	}
1294 	endCommandBuffer(vk, cmdBuffer);
1295 	submitCommandsAndWait(vk, device, queue, cmdBuffer);
1296 }
1297 
1298 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1299 {
1300 public:
1301 										GraphicsAttachmentsTestInstance	(Context& context, const TestParameters& parameters);
1302 	virtual TestStatus					iterate							(void);
1303 
1304 protected:
1305 	virtual bool						isWriteToCompressedOperation	();
1306 	VkImageCreateInfo					makeCreateImageInfo				(const VkFormat					format,
1307 																		 const ImageType				type,
1308 																		 const UVec3&					size,
1309 																		 const VkImageUsageFlags		usageFlags,
1310 																		 const VkImageCreateFlags*		createFlags,
1311 																		 const deUint32					levels,
1312 																		 const deUint32					layers);
1313 	VkDeviceSize						getCompressedImageData			(const VkFormat					format,
1314 																		 const UVec3&					size,
1315 																		 std::vector<deUint8>&			data,
1316 																		 const deUint32					layer,
1317 																		 const deUint32					level);
1318 	VkDeviceSize						getUncompressedImageData		(const VkFormat					format,
1319 																		 const UVec3&					size,
1320 																		 std::vector<deUint8>&			data,
1321 																		 const deUint32					layer,
1322 																		 const deUint32					level);
1323 	virtual void						prepareData						();
1324 	virtual void						prepareVertexBuffer				();
1325 	virtual void						transcodeRead					();
1326 	virtual void						transcodeWrite					();
1327 	bool								verifyDecompression				(const std::vector<deUint8>&	refCompressedData,
1328 																		 const de::MovePtr<Image>&		resCompressedImage,
1329 																		 const deUint32					layer,
1330 																		 const deUint32					level,
1331 																		 const UVec3&					mipmapDims);
1332 
1333 	typedef std::vector<deUint8>		RawDataVector;
1334 	typedef SharedPtr<RawDataVector>	RawDataPtr;
1335 	typedef std::vector<RawDataPtr>		LevelData;
1336 	typedef std::vector<LevelData>		FullImageData;
1337 
1338 	FullImageData						m_srcData;
1339 	FullImageData						m_dstData;
1340 
1341 	typedef SharedPtr<Image>			ImagePtr;
1342 	typedef std::vector<ImagePtr>		LevelImages;
1343 	typedef std::vector<LevelImages>	ImagesArray;
1344 
1345 	ImagesArray							m_uncompressedImages;
1346 	MovePtr<Image>						m_compressedImage;
1347 
1348 	VkImageViewUsageCreateInfoKHR		m_imageViewUsageKHR;
1349 	VkImageViewUsageCreateInfoKHR*		m_srcImageViewUsageKHR;
1350 	VkImageViewUsageCreateInfoKHR*		m_dstImageViewUsageKHR;
1351 	std::vector<tcu::UVec3>				m_compressedImageResVec;
1352 	std::vector<tcu::UVec3>				m_uncompressedImageResVec;
1353 	VkFormat							m_srcFormat;
1354 	VkFormat							m_dstFormat;
1355 	VkImageUsageFlags					m_srcImageUsageFlags;
1356 	VkImageUsageFlags					m_dstImageUsageFlags;
1357 	std::vector<tcu::UVec3>				m_srcImageResolutions;
1358 	std::vector<tcu::UVec3>				m_dstImageResolutions;
1359 
1360 	MovePtr<Buffer>						m_vertexBuffer;
1361 	deUint32							m_vertexCount;
1362 	VkDeviceSize						m_vertexBufferOffset;
1363 };
1364 
GraphicsAttachmentsTestInstance(Context & context,const TestParameters & parameters)1365 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1366 	: BasicTranscodingTestInstance(context, parameters)
1367 	, m_srcData()
1368 	, m_dstData()
1369 	, m_uncompressedImages()
1370 	, m_compressedImage()
1371 	, m_imageViewUsageKHR()
1372 	, m_srcImageViewUsageKHR()
1373 	, m_dstImageViewUsageKHR()
1374 	, m_compressedImageResVec()
1375 	, m_uncompressedImageResVec()
1376 	, m_srcFormat()
1377 	, m_dstFormat()
1378 	, m_srcImageUsageFlags()
1379 	, m_dstImageUsageFlags()
1380 	, m_srcImageResolutions()
1381 	, m_dstImageResolutions()
1382 	, m_vertexBuffer()
1383 	, m_vertexCount(0u)
1384 	, m_vertexBufferOffset(0ull)
1385 {
1386 }
1387 
iterate(void)1388 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1389 {
1390 	prepareData();
1391 	prepareVertexBuffer();
1392 
1393 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1394 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1395 			DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1396 
1397 	if (isWriteToCompressedOperation())
1398 		transcodeWrite();
1399 	else
1400 		transcodeRead();
1401 
1402 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1403 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1404 			if (isWriteToCompressedOperation())
1405 			{
1406 				if (!verifyDecompression(*m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1407 					return TestStatus::fail("Images difference detected");
1408 			}
1409 			else
1410 			{
1411 				if (!verifyDecompression(*m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1412 					return TestStatus::fail("Images difference detected");
1413 			}
1414 
1415 	return TestStatus::pass("Pass");
1416 }
1417 
prepareData()1418 void GraphicsAttachmentsTestInstance::prepareData ()
1419 {
1420 	VkImageViewUsageCreateInfoKHR*	imageViewUsageKHRNull	= (VkImageViewUsageCreateInfoKHR*)DE_NULL;
1421 
1422 	m_imageViewUsageKHR			= makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1423 
1424 	m_srcImageViewUsageKHR		= isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1425 	m_dstImageViewUsageKHR		= isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1426 
1427 	m_srcFormat					= isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1428 	m_dstFormat					= isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1429 
1430 	m_srcImageUsageFlags		= isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1431 	m_dstImageUsageFlags		= isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1432 
1433 	m_compressedImageResVec		= getMipLevelSizes(getLayerDims());
1434 	m_uncompressedImageResVec	= getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1435 
1436 	m_srcImageResolutions		= isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1437 	m_dstImageResolutions		= isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1438 
1439 	m_srcData.resize(getLevelCount());
1440 	m_dstData.resize(getLevelCount());
1441 	m_uncompressedImages.resize(getLevelCount());
1442 
1443 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1444 	{
1445 		m_srcData[levelNdx].resize(getLayerCount());
1446 		m_dstData[levelNdx].resize(getLayerCount());
1447 		m_uncompressedImages[levelNdx].resize(getLayerCount());
1448 
1449 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1450 		{
1451 			m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1452 			m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1453 
1454 			if (isWriteToCompressedOperation())
1455 			{
1456 				getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1457 
1458 				m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1459 			}
1460 			else
1461 			{
1462 				getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1463 
1464 				m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1465 			}
1466 
1467 			DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1468 		}
1469 	}
1470 }
1471 
prepareVertexBuffer()1472 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1473 {
1474 	const DeviceInterface&			vk						= m_context.getDeviceInterface();
1475 	const VkDevice					device					= m_context.getDevice();
1476 	Allocator&						allocator				= m_context.getDefaultAllocator();
1477 
1478 	const std::vector<tcu::Vec4>	vertexArray				= createFullscreenQuad();
1479 	const size_t					vertexBufferSizeInBytes	= vertexArray.size() * sizeof(vertexArray[0]);
1480 
1481 	m_vertexCount	= static_cast<deUint32>(vertexArray.size());
1482 	m_vertexBuffer	= MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1483 
1484 	// Upload vertex data
1485 	const Allocation&	vertexBufferAlloc	= m_vertexBuffer->getAllocation();
1486 	deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1487 	flushMappedMemoryRange(vk, device, vertexBufferAlloc.getMemory(), vertexBufferAlloc.getOffset(), vertexBufferSizeInBytes);
1488 }
1489 
transcodeRead()1490 void GraphicsAttachmentsTestInstance::transcodeRead ()
1491 {
1492 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
1493 	const VkDevice						device					= m_context.getDevice();
1494 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
1495 	const VkQueue						queue					= m_context.getUniversalQueue();
1496 	Allocator&							allocator				= m_context.getDefaultAllocator();
1497 
1498 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
1499 
1500 	const VkImageCreateInfo				srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1501 	MovePtr<Image>						srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1502 
1503 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1504 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1505 
1506 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1507 
1508 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
1509 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1510 																	.build(vk, device));
1511 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
1512 																	.addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1513 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1514 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1515 
1516 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
1517 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
1518 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1519 
1520 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1521 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1522 
1523 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1524 	{
1525 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
1526 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
1527 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
1528 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
1529 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
1530 		const UVec3					srcImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1531 
1532 		const VkImageCreateInfo		dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1533 
1534 		const VkBufferCreateInfo	srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1535 		const MovePtr<Buffer>		srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1536 
1537 		const VkBufferCreateInfo	dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1538 		MovePtr<Buffer>				dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1539 
1540 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1541 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1542 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
1543 
1544 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1545 		{
1546 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1547 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1548 
1549 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1550 
1551 			de::MovePtr<Image>				dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1552 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1553 
1554 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1555 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1556 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1557 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1558 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1559 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1560 
1561 			const VkImageView				attachmentBindInfos[]	= { *srcImageView, *dstImageView };
1562 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1563 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1564 
1565 			// Upload source image data
1566 			const Allocation& alloc = srcImageBuffer->getAllocation();
1567 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1568 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1569 
1570 			beginCommandBuffer(vk, *cmdBuffer);
1571 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1572 
1573 			// Copy buffer to image
1574 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1575 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1576 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1577 
1578 			// Define destination image layout
1579 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1580 
1581 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1582 
1583 			const VkDescriptorImageInfo	descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1584 			DescriptorSetUpdateBuilder()
1585 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1586 				.update(vk, device);
1587 
1588 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1589 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1590 
1591 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1592 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1593 
1594 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1595 
1596 			vk.cmdEndRenderPass(*cmdBuffer);
1597 
1598 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1599 				VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1600 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1601 				dstImage->get(), dstSubresourceRange);
1602 
1603 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1604 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1605 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1606 
1607 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1608 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1609 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1610 
1611 			endCommandBuffer(vk, *cmdBuffer);
1612 
1613 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1614 
1615 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1616 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1617 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1618 		}
1619 	}
1620 
1621 	m_compressedImage = srcImage;
1622 }
1623 
transcodeWrite()1624 void GraphicsAttachmentsTestInstance::transcodeWrite ()
1625 {
1626 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
1627 	const VkDevice						device					= m_context.getDevice();
1628 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
1629 	const VkQueue						queue					= m_context.getUniversalQueue();
1630 	Allocator&							allocator				= m_context.getDefaultAllocator();
1631 
1632 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
1633 
1634 	const VkImageCreateInfo				dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1635 	MovePtr<Image>						dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1636 
1637 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1638 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1639 
1640 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1641 
1642 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
1643 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1644 																	.build(vk, device));
1645 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
1646 																	.addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1647 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1648 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1649 
1650 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
1651 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
1652 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1653 
1654 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1655 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1656 
1657 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1658 	{
1659 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
1660 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
1661 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
1662 		const UVec3					dstImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1663 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
1664 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
1665 
1666 		const VkImageCreateInfo		srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1667 
1668 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1669 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
1670 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
1671 
1672 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1673 		{
1674 			const VkBufferCreateInfo		srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1675 			const MovePtr<Buffer>			srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1676 
1677 			const VkBufferCreateInfo		dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1678 			MovePtr<Buffer>					dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1679 
1680 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1681 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1682 
1683 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1684 
1685 			de::MovePtr<Image>				srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1686 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1687 
1688 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1689 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1690 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1691 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1692 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1693 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1694 
1695 			const VkImageView				attachmentBindInfos[]	= { *srcImageView, *dstImageView };
1696 			const VkExtent2D				framebufferSize			(renderSize);
1697 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1698 
1699 			// Upload source image data
1700 			const Allocation& alloc = srcImageBuffer->getAllocation();
1701 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1702 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
1703 
1704 			beginCommandBuffer(vk, *cmdBuffer);
1705 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1706 
1707 			// Copy buffer to image
1708 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1709 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1710 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1711 
1712 			// Define destination image layout
1713 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1714 
1715 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1716 
1717 			const VkDescriptorImageInfo	descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1718 			DescriptorSetUpdateBuilder()
1719 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1720 				.update(vk, device);
1721 
1722 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1723 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1724 
1725 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1726 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1727 
1728 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1729 
1730 			vk.cmdEndRenderPass(*cmdBuffer);
1731 
1732 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1733 				VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1734 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1735 				dstImage->get(), dstSubresourceRange);
1736 
1737 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1738 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1739 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1740 
1741 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1742 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1743 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1744 
1745 			endCommandBuffer(vk, *cmdBuffer);
1746 
1747 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1748 
1749 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1750 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
1751 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1752 		}
1753 	}
1754 
1755 	m_compressedImage = dstImage;
1756 }
1757 
isWriteToCompressedOperation()1758 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1759 {
1760 	return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1761 }
1762 
makeCreateImageInfo(const VkFormat format,const ImageType type,const UVec3 & size,const VkImageUsageFlags usageFlags,const VkImageCreateFlags * createFlags,const deUint32 levels,const deUint32 layers)1763 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat				format,
1764 																	    const ImageType				type,
1765 																	    const UVec3&				size,
1766 																	    const VkImageUsageFlags		usageFlags,
1767 																	    const VkImageCreateFlags*	createFlags,
1768 																	    const deUint32				levels,
1769 																	    const deUint32				layers)
1770 {
1771 	const VkImageType			imageType				= mapImageType(type);
1772 	const VkImageCreateFlags	imageCreateFlagsBase	= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1773 	const VkImageCreateFlags	imageCreateFlagsAddOn	= isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR : 0;
1774 	const VkImageCreateFlags	imageCreateFlags		= (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1775 
1776 	const VkImageCreateInfo createImageInfo =
1777 	{
1778 		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,			// VkStructureType			sType;
1779 		DE_NULL,										// const void*				pNext;
1780 		imageCreateFlags,								// VkImageCreateFlags		flags;
1781 		imageType,										// VkImageType				imageType;
1782 		format,											// VkFormat					format;
1783 		makeExtent3D(getLayerSize(type, size)),			// VkExtent3D				extent;
1784 		levels,											// deUint32					mipLevels;
1785 		layers,											// deUint32					arrayLayers;
1786 		VK_SAMPLE_COUNT_1_BIT,							// VkSampleCountFlagBits	samples;
1787 		VK_IMAGE_TILING_OPTIMAL,						// VkImageTiling			tiling;
1788 		usageFlags,										// VkImageUsageFlags		usage;
1789 		VK_SHARING_MODE_EXCLUSIVE,						// VkSharingMode			sharingMode;
1790 		0u,												// deUint32					queueFamilyIndexCount;
1791 		DE_NULL,										// const deUint32*			pQueueFamilyIndices;
1792 		VK_IMAGE_LAYOUT_UNDEFINED,						// VkImageLayout			initialLayout;
1793 	};
1794 
1795 	return createImageInfo;
1796 }
1797 
getCompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)1798 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat			format,
1799 																	  const UVec3&				size,
1800 																	  std::vector<deUint8>&		data,
1801 																	  const deUint32			layer,
1802 																	  const deUint32			level)
1803 {
1804 	VkDeviceSize	sizeBytes	= getCompressedImageSizeInBytes(format, size);
1805 
1806 	data.resize((size_t)sizeBytes);
1807 	generateData(&data[0], data.size(), format, layer, level);
1808 
1809 	return sizeBytes;
1810 }
1811 
getUncompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)1812 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat			format,
1813 																		const UVec3&			size,
1814 																		std::vector<deUint8>&	data,
1815 																		const deUint32			layer,
1816 																		const deUint32			level)
1817 {
1818 	tcu::IVec3				sizeAsIVec3	= tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
1819 	VkDeviceSize			sizeBytes	= getImageSizeBytes(sizeAsIVec3, format);
1820 
1821 	data.resize((size_t)sizeBytes);
1822 	generateData(&data[0], data.size(), format, layer, level);
1823 
1824 	return sizeBytes;
1825 }
1826 
verifyDecompression(const std::vector<deUint8> & refCompressedData,const de::MovePtr<Image> & resCompressedImage,const deUint32 level,const deUint32 layer,const UVec3 & mipmapDims)1827 bool GraphicsAttachmentsTestInstance::verifyDecompression (const std::vector<deUint8>&	refCompressedData,
1828 														   const de::MovePtr<Image>&	resCompressedImage,
1829 														   const deUint32				level,
1830 														   const deUint32				layer,
1831 														   const UVec3&					mipmapDims)
1832 {
1833 	const DeviceInterface&				vk							= m_context.getDeviceInterface();
1834 	const VkDevice						device						= m_context.getDevice();
1835 	const deUint32						queueFamilyIndex			= m_context.getUniversalQueueFamilyIndex();
1836 	const VkQueue						queue						= m_context.getUniversalQueue();
1837 	Allocator&							allocator					= m_context.getDefaultAllocator();
1838 
1839 	const bool							layoutShaderReadOnly		= (layer % 2u) == 1;
1840 	const UVec3							mipmapDimsBlocked			= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
1841 
1842 	const VkImageSubresourceRange		subresourceRange			= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1843 	const VkImageSubresourceRange		resSubresourceRange			= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
1844 
1845 	const VkDeviceSize					dstBufferSize				= getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
1846 	const VkImageUsageFlags				refSrcImageUsageFlags		= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1847 
1848 	const VkBufferCreateInfo			refSrcImageBufferInfo		(makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
1849 	const MovePtr<Buffer>				refSrcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
1850 
1851 	const VkImageCreateFlags			refSrcImageCreateFlags		= 0;
1852 	const VkImageCreateInfo				refSrcImageCreateInfo		= makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1853 	const MovePtr<Image>				refSrcImage					(new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
1854 	Move<VkImageView>					refSrcImageView				(makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
1855 
1856 	const VkImageUsageFlags				resSrcImageUsageFlags		= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1857 	const VkImageViewUsageCreateInfoKHR	resSrcImageViewUsageKHR		= makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
1858 	Move<VkImageView>					resSrcImageView				(makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
1859 
1860 	const VkImageCreateFlags			refDstImageCreateFlags		= 0;
1861 	const VkImageUsageFlags				refDstImageUsageFlags		= VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1862 	const VkImageCreateInfo				refDstImageCreateInfo		= makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1863 	const MovePtr<Image>				refDstImage					(new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
1864 	const Move<VkImageView>				refDstImageView				(makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1865 	const VkImageMemoryBarrier			refDstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
1866 	const VkBufferCreateInfo			refDstBufferInfo			(makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1867 	const MovePtr<Buffer>				refDstBuffer				= MovePtr<Buffer>(new Buffer(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
1868 
1869 	const VkImageCreateFlags			resDstImageCreateFlags		= 0;
1870 	const VkImageUsageFlags				resDstImageUsageFlags		= VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1871 	const VkImageCreateInfo				resDstImageCreateInfo		= makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
1872 	const MovePtr<Image>				resDstImage					(new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
1873 	const Move<VkImageView>				resDstImageView				(makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
1874 	const VkImageMemoryBarrier			resDstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
1875 	const VkBufferCreateInfo			resDstBufferInfo			(makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
1876 	const MovePtr<Buffer>				resDstBuffer				= MovePtr<Buffer>(new Buffer(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
1877 
1878 	const Unique<VkShaderModule>		vertShaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1879 	const Unique<VkShaderModule>		fragShaderModule			(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
1880 
1881 	const Unique<VkRenderPass>			renderPass					(makeRenderPass(vk, device));
1882 
1883 	const Move<VkDescriptorSetLayout>	descriptorSetLayout			(DescriptorSetLayoutBuilder()
1884 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1885 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
1886 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1887 																		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
1888 																		.build(vk, device));
1889 	const Move<VkDescriptorPool>		descriptorPool				(DescriptorPoolBuilder()
1890 																		.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1891 																		.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
1892 																		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1893 																		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1894 																		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1895 	const Move<VkDescriptorSet>			descriptorSet				(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1896 	const VkSamplerCreateInfo			refSrcSamplerInfo			(makeSamplerCreateInfo());
1897 	const Move<VkSampler>				refSrcSampler				= vk::createSampler(vk, device, &refSrcSamplerInfo);
1898 	const VkSamplerCreateInfo			resSrcSamplerInfo			(makeSamplerCreateInfo());
1899 	const Move<VkSampler>				resSrcSampler				= vk::createSampler(vk, device, &resSrcSamplerInfo);
1900 	const VkDescriptorImageInfo			descriptorRefSrcImage		(makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
1901 	const VkDescriptorImageInfo			descriptorResSrcImage		(makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
1902 	const VkDescriptorImageInfo			descriptorRefDstImage		(makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1903 	const VkDescriptorImageInfo			descriptorResDstImage		(makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
1904 
1905 	const VkExtent2D					renderSize					(makeExtent2D(mipmapDims.x(), mipmapDims.y()));
1906 	const Unique<VkPipelineLayout>		pipelineLayout				(makePipelineLayout(vk, device, *descriptorSetLayout));
1907 	const Unique<VkPipeline>			pipeline					(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
1908 	const Unique<VkCommandPool>			cmdPool						(createCommandPool(vk, device, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT, queueFamilyIndex));
1909 	const Unique<VkCommandBuffer>		cmdBuffer					(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1910 
1911 	const VkBufferImageCopy				copyBufferToImageRegion		= makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
1912 	const VkBufferImageCopy				copyRegion					= makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
1913 	const VkBufferMemoryBarrier			refSrcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
1914 	const VkImageMemoryBarrier			refSrcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1915 	const VkImageMemoryBarrier			refSrcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
1916 	const VkImageMemoryBarrier			resCompressedImageBarrier	= makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, resCompressedImage->get(), resSubresourceRange);
1917 
1918 	const Move<VkFramebuffer>			framebuffer					(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize, getLayerCount()));
1919 
1920 	// Upload source image data
1921 	{
1922 		const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
1923 		deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
1924 		flushMappedMemoryRange(vk, device, refSrcImageBufferAlloc.getMemory(), refSrcImageBufferAlloc.getOffset(), refCompressedData.size());
1925 	}
1926 
1927 	beginCommandBuffer(vk, *cmdBuffer);
1928 	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1929 
1930 	// Copy buffer to image
1931 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
1932 	vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, &copyBufferToImageRegion);
1933 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
1934 
1935 	// Make reference and result images readable
1936 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
1937 	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
1938 	{
1939 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resCompressedImageBarrier);
1940 	}
1941 
1942 	beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1943 	{
1944 		DescriptorSetUpdateBuilder()
1945 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
1946 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
1947 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
1948 			.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
1949 			.update(vk, device);
1950 
1951 		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1952 		vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1953 		vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
1954 	}
1955 	vk.cmdEndRenderPass(*cmdBuffer);
1956 
1957 	// Decompress reference image
1958 	{
1959 		const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
1960 			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1961 			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1962 			refDstImage->get(), subresourceRange);
1963 
1964 		const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
1965 			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1966 			refDstBuffer->get(), 0ull, dstBufferSize);
1967 
1968 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
1969 		vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, &copyRegion);
1970 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1971 	}
1972 
1973 	// Decompress result image
1974 	{
1975 		const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
1976 			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1977 			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1978 			resDstImage->get(), subresourceRange);
1979 
1980 		const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
1981 			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1982 			resDstBuffer->get(), 0ull, dstBufferSize);
1983 
1984 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
1985 		vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, &copyRegion);
1986 		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1987 	}
1988 
1989 	endCommandBuffer(vk, *cmdBuffer);
1990 
1991 	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1992 
1993 	// Compare decompressed pixel data in reference and result images
1994 	{
1995 		const Allocation&	refDstBufferAlloc	= refDstBuffer->getAllocation();
1996 		invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
1997 
1998 		const Allocation&	resDstBufferAlloc	= resDstBuffer->getAllocation();
1999 		invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
2000 
2001 		if (deMemCmp(refDstBufferAlloc.getHostPtr(), resDstBufferAlloc.getHostPtr(), (size_t)dstBufferSize) != 0)
2002 		{
2003 			// Do fuzzy to log error mask
2004 			invalidateMappedMemoryRange(vk, device, resDstBufferAlloc.getMemory(), resDstBufferAlloc.getOffset(), dstBufferSize);
2005 			invalidateMappedMemoryRange(vk, device, refDstBufferAlloc.getMemory(), refDstBufferAlloc.getOffset(), dstBufferSize);
2006 
2007 			tcu::ConstPixelBufferAccess	resPixels	(mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
2008 			tcu::ConstPixelBufferAccess	refPixels	(mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
2009 
2010 			string	comment	= string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
2011 
2012 			if (isWriteToCompressedOperation())
2013 				tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2014 			else
2015 				tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2016 
2017 			return false;
2018 		}
2019 	}
2020 
2021 	return true;
2022 }
2023 
2024 
2025 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2026 {
2027 public:
2028 						GraphicsTextureTestInstance		(Context& context, const TestParameters& parameters);
2029 
2030 protected:
2031 	virtual bool		isWriteToCompressedOperation	();
2032 	virtual void		transcodeRead					();
2033 	virtual void		transcodeWrite					();
2034 };
2035 
GraphicsTextureTestInstance(Context & context,const TestParameters & parameters)2036 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2037 	: GraphicsAttachmentsTestInstance(context, parameters)
2038 {
2039 }
2040 
isWriteToCompressedOperation()2041 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2042 {
2043 	return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2044 }
2045 
transcodeRead()2046 void GraphicsTextureTestInstance::transcodeRead ()
2047 {
2048 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
2049 	const VkDevice						device					= m_context.getDevice();
2050 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
2051 	const VkQueue						queue					= m_context.getUniversalQueue();
2052 	Allocator&							allocator				= m_context.getDefaultAllocator();
2053 
2054 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
2055 
2056 	const VkImageCreateInfo				srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2057 	MovePtr<Image>						srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2058 
2059 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2060 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2061 
2062 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device));
2063 
2064 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
2065 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2066 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2067 																	.build(vk, device));
2068 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
2069 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2070 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2071 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2072 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2073 
2074 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
2075 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
2076 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2077 
2078 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2079 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2080 
2081 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2082 	{
2083 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
2084 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
2085 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
2086 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
2087 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
2088 		const UVec3					srcImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2089 
2090 		const VkImageCreateInfo		dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2091 
2092 		const VkBufferCreateInfo	srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2093 		const MovePtr<Buffer>		srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2094 
2095 		const VkBufferCreateInfo	dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2096 		MovePtr<Buffer>				dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2097 
2098 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2099 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2100 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
2101 
2102 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2103 		{
2104 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2105 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2106 
2107 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2108 
2109 			de::MovePtr<Image>				dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2110 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2111 
2112 			const VkSamplerCreateInfo		srcSamplerInfo			(makeSamplerCreateInfo());
2113 			const Move<VkSampler>			srcSampler				= vk::createSampler(vk, device, &srcSamplerInfo);
2114 			const VkDescriptorImageInfo		descriptorSrcImage		(makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2115 			const VkDescriptorImageInfo		descriptorDstImage		(makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2116 
2117 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2118 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2119 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2120 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2121 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2122 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2123 
2124 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2125 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2126 
2127 			// Upload source image data
2128 			const Allocation& alloc = srcImageBuffer->getAllocation();
2129 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2130 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2131 
2132 			beginCommandBuffer(vk, *cmdBuffer);
2133 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2134 
2135 			// Copy buffer to image
2136 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2137 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2138 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2139 
2140 			// Define destination image layout
2141 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2142 
2143 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2144 
2145 			DescriptorSetUpdateBuilder()
2146 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2147 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2148 				.update(vk, device);
2149 
2150 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2151 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2152 
2153 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2154 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2155 
2156 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2157 
2158 			vk.cmdEndRenderPass(*cmdBuffer);
2159 
2160 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2161 				VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2162 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2163 				dstImage->get(), dstSubresourceRange);
2164 
2165 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2166 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2167 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2168 
2169 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2170 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2171 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2172 
2173 			endCommandBuffer(vk, *cmdBuffer);
2174 
2175 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2176 
2177 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2178 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2179 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2180 		}
2181 	}
2182 
2183 	m_compressedImage = srcImage;
2184 }
2185 
transcodeWrite()2186 void GraphicsTextureTestInstance::transcodeWrite ()
2187 {
2188 	const DeviceInterface&				vk						= m_context.getDeviceInterface();
2189 	const VkDevice						device					= m_context.getDevice();
2190 	const deUint32						queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
2191 	const VkQueue						queue					= m_context.getUniversalQueue();
2192 	Allocator&							allocator				= m_context.getDefaultAllocator();
2193 
2194 	const VkImageCreateFlags*			imgCreateFlagsOverride	= DE_NULL;
2195 
2196 	const VkImageCreateInfo				dstImageCreateInfo		= makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2197 	MovePtr<Image>						dstImage				(new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2198 
2199 	const Unique<VkShaderModule>		vertShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2200 	const Unique<VkShaderModule>		fragShaderModule		(createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2201 
2202 	const Unique<VkRenderPass>			renderPass				(makeRenderPass(vk, device));
2203 
2204 	const Move<VkDescriptorSetLayout>	descriptorSetLayout		(DescriptorSetLayoutBuilder()
2205 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2206 																	.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2207 																	.build(vk, device));
2208 	const Move<VkDescriptorPool>		descriptorPool			(DescriptorPoolBuilder()
2209 																	.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2210 																	.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2211 																	.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2212 	const Move<VkDescriptorSet>			descriptorSet			(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2213 
2214 	const VkExtent2D					renderSizeDummy			(makeExtent2D(1u, 1u));
2215 	const Unique<VkPipelineLayout>		pipelineLayout			(makePipelineLayout(vk, device, *descriptorSetLayout));
2216 	const Unique<VkPipeline>			pipeline				(makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2217 
2218 	const Unique<VkCommandPool>			cmdPool					(createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2219 	const Unique<VkCommandBuffer>		cmdBuffer				(allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2220 
2221 	for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2222 	{
2223 		const UVec3&				uncompressedImageRes	= m_uncompressedImageResVec[levelNdx];
2224 		const UVec3&				srcImageResolution		= m_srcImageResolutions[levelNdx];
2225 		const UVec3&				dstImageResolution		= m_dstImageResolutions[levelNdx];
2226 		const size_t				srcImageSizeInBytes		= m_srcData[levelNdx][0]->size();
2227 		const size_t				dstImageSizeInBytes		= m_dstData[levelNdx][0]->size();
2228 		const UVec3					dstImageResBlocked		= getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2229 
2230 		const VkImageCreateInfo		srcImageCreateInfo		= makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2231 
2232 		const VkExtent2D			renderSize				(makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2233 		const VkViewport			viewport				= makeViewport(0.0f, 0.0f, static_cast<float>(renderSize.width), static_cast<float>(renderSize.height), 0.0f, 1.0f);
2234 		const VkRect2D				scissor					= makeScissor(renderSize.width, renderSize.height);
2235 
2236 		for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2237 		{
2238 			const VkBufferCreateInfo		srcImageBufferInfo		= makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2239 			const MovePtr<Buffer>			srcImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2240 
2241 			const VkBufferCreateInfo		dstImageBufferInfo		= makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2242 			MovePtr<Buffer>					dstImageBuffer			= MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2243 
2244 			const VkImageSubresourceRange	srcSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2245 			const VkImageSubresourceRange	dstSubresourceRange		= makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2246 
2247 			Move<VkImageView>				dstImageView			(makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2248 
2249 			de::MovePtr<Image>				srcImage				(new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2250 			Move<VkImageView>				srcImageView			(makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2251 
2252 			const VkSamplerCreateInfo		srcSamplerInfo			(makeSamplerCreateInfo());
2253 			const Move<VkSampler>			srcSampler				= vk::createSampler(vk, device, &srcSamplerInfo);
2254 			const VkDescriptorImageInfo		descriptorSrcImage		(makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2255 			const VkDescriptorImageInfo		descriptorDstImage		(makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2256 
2257 			const VkBufferImageCopy			srcCopyRegion			= makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2258 			const VkBufferMemoryBarrier		srcCopyBufferBarrierPre	= makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2259 			const VkImageMemoryBarrier		srcCopyImageBarrierPre	= makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2260 			const VkImageMemoryBarrier		srcCopyImageBarrierPost	= makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2261 			const VkBufferImageCopy			dstCopyRegion			= makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2262 			const VkImageMemoryBarrier		dstInitImageBarrier		= makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2263 
2264 			const VkExtent2D				framebufferSize			(makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2265 			const Move<VkFramebuffer>		framebuffer				(makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2266 
2267 			// Upload source image data
2268 			const Allocation& alloc = srcImageBuffer->getAllocation();
2269 			deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2270 			flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), srcImageSizeInBytes);
2271 
2272 			beginCommandBuffer(vk, *cmdBuffer);
2273 			vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2274 
2275 			// Copy buffer to image
2276 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2277 			vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2278 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2279 
2280 			// Define destination image layout
2281 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2282 
2283 			beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2284 
2285 			DescriptorSetUpdateBuilder()
2286 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2287 				.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2288 				.update(vk, device);
2289 
2290 			vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2291 			vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2292 
2293 			vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2294 			vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2295 
2296 			vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2297 
2298 			vk.cmdEndRenderPass(*cmdBuffer);
2299 
2300 			const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2301 				VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2302 				VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2303 				dstImage->get(), dstSubresourceRange);
2304 
2305 			const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2306 				VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2307 				dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2308 
2309 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2310 			vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2311 			vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2312 
2313 			endCommandBuffer(vk, *cmdBuffer);
2314 
2315 			submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2316 
2317 			const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2318 			invalidateMappedMemoryRange(vk, device, dstImageBufferAlloc.getMemory(), dstImageBufferAlloc.getOffset(), dstImageSizeInBytes);
2319 			deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2320 		}
2321 	}
2322 
2323 	m_compressedImage = dstImage;
2324 }
2325 
2326 class TexelViewCompatibleCase : public TestCase
2327 {
2328 public:
2329 							TexelViewCompatibleCase		(TestContext&				testCtx,
2330 														 const std::string&			name,
2331 														 const std::string&			desc,
2332 														 const TestParameters&		parameters);
2333 	void					initPrograms				(SourceCollections&			programCollection) const;
2334 	TestInstance*			createInstance				(Context&					context) const;
2335 protected:
2336 	const TestParameters	m_parameters;
2337 };
2338 
TexelViewCompatibleCase(TestContext & testCtx,const std::string & name,const std::string & desc,const TestParameters & parameters)2339 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
2340 	: TestCase				(testCtx, name, desc)
2341 	, m_parameters			(parameters)
2342 {
2343 }
2344 
initPrograms(vk::SourceCollections & programCollection) const2345 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections&	programCollection) const
2346 {
2347 	DE_ASSERT(m_parameters.size.x() > 0);
2348 	DE_ASSERT(m_parameters.size.y() > 0);
2349 
2350 	switch (m_parameters.shader)
2351 	{
2352 		case SHADER_TYPE_COMPUTE:
2353 		{
2354 			const std::string	imageTypeStr		= getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2355 			const std::string	formatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2356 			std::ostringstream	src;
2357 			std::ostringstream	src_decompress;
2358 
2359 			src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2360 				<< "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2361 			src_decompress << src.str();
2362 
2363 			switch(m_parameters.operation)
2364 			{
2365 				case OPERATION_IMAGE_LOAD:
2366 				{
2367 					src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2368 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2369 						<< "void main (void)\n"
2370 						<< "{\n"
2371 						<< "    ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2372 						<< "    imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2373 						<< "}\n";
2374 
2375 					break;
2376 				}
2377 
2378 				case OPERATION_TEXEL_FETCH:
2379 				{
2380 					src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2381 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2382 						<< "void main (void)\n"
2383 						<< "{\n"
2384 						<< "    ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2385 						<< "    imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n"
2386 						<< "}\n";
2387 
2388 					break;
2389 				}
2390 
2391 				case OPERATION_TEXTURE:
2392 				{
2393 					src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2394 						<< "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2395 						<< "void main (void)\n"
2396 						<< "{\n"
2397 						<< "    const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x - 1, gl_NumWorkGroups.y - 1);\n"
2398 						<< "    const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2399 						<< "    const vec2 coord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2400 						<< "    imageStore(u_image1, pos, texture(u_image0, coord));\n"
2401 						<< "}\n";
2402 
2403 					break;
2404 				}
2405 
2406 				case OPERATION_IMAGE_STORE:
2407 				{
2408 					src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<"           u_image0;\n"
2409 						<< "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<"  u_image1;\n"
2410 						<< "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2411 						<< "void main (void)\n"
2412 						<< "{\n"
2413 						<< "    ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2414 						<< "    imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2415 						<< "    imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2416 						<< "}\n";
2417 
2418 					break;
2419 				}
2420 
2421 				default:
2422 					DE_ASSERT(false);
2423 			}
2424 
2425 			src_decompress	<< "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2426 							<< "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_reference;\n"
2427 							<< "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_result;\n"
2428 							<< "layout (binding = 3, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_reference;\n\n"
2429 							<< "void main (void)\n"
2430 							<< "{\n"
2431 							<< "    const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2432 							<< "    const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2433 							<< "    const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n"
2434 							<< "    imageStore(decompressed_result, pos, texture(compressed_result, cord));\n"
2435 							<< "    imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n"
2436 							<< "}\n";
2437 			programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2438 			programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2439 
2440 			break;
2441 		}
2442 
2443 		case SHADER_TYPE_FRAGMENT:
2444 		{
2445 			ImageType	imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2446 
2447 			// Vertex shader
2448 			{
2449 				std::ostringstream src;
2450 				src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2451 					<< "layout(location = 0) in vec4 v_in_position;\n"
2452 					<< "\n"
2453 					<< "void main (void)\n"
2454 					<< "{\n"
2455 					<< "    gl_Position = v_in_position;\n"
2456 					<< "}\n";
2457 
2458 				programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2459 			}
2460 
2461 			// Fragment shader
2462 			{
2463 				switch(m_parameters.operation)
2464 				{
2465 					case OPERATION_ATTACHMENT_READ:
2466 					case OPERATION_ATTACHMENT_WRITE:
2467 					{
2468 						std::ostringstream	src;
2469 
2470 						const std::string	dstTypeStr	= getGlslFormatType(m_parameters.formatUncompressed);
2471 						const std::string	srcTypeStr	= getGlslInputFormatType(m_parameters.formatUncompressed);
2472 
2473 						src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2474 							<< "precision highp int;\n"
2475 							<< "precision highp float;\n"
2476 							<< "\n"
2477 							<< "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2478 							<< "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2479 							<< "\n"
2480 							<< "void main (void)\n"
2481 							<< "{\n"
2482 							<< "    o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2483 							<< "}\n";
2484 
2485 						programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2486 
2487 						break;
2488 					}
2489 
2490 					case OPERATION_TEXTURE_READ:
2491 					case OPERATION_TEXTURE_WRITE:
2492 					{
2493 						std::ostringstream	src;
2494 
2495 						const std::string	srcSamplerTypeStr		= getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2496 						const std::string	dstImageTypeStr			= getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2497 						const std::string	dstFormatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2498 
2499 						src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2500 							<< "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2501 							<< "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2502 							<< "\n"
2503 							<< "void main (void)\n"
2504 							<< "{\n"
2505 							<< "    const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2506 							<< "    const ivec2 pixels_resolution = ivec2(textureSize(u_imageIn, 0)) - ivec2(1,1);\n"
2507 							<< "    const vec2 in_pos = vec2(out_pos) / vec2(pixels_resolution);\n"
2508 							<< "    imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2509 							<< "}\n";
2510 
2511 						programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2512 
2513 						break;
2514 					}
2515 
2516 					default:
2517 						DE_ASSERT(false);
2518 				}
2519 			}
2520 
2521 			// Verification fragment shader
2522 			{
2523 				std::ostringstream	src;
2524 
2525 				const std::string	samplerType			= getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2526 				const std::string	imageTypeStr		= getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2527 				const std::string	formatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2528 
2529 				src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2530 					<< "layout (binding = 0) uniform " << samplerType << " u_imageIn0;\n"
2531 					<< "layout (binding = 1) uniform " << samplerType << " u_imageIn1;\n"
2532 					<< "layout (binding = 2, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut0;\n"
2533 					<< "layout (binding = 3, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut1;\n"
2534 					<< "\n"
2535 					<< "void main (void)\n"
2536 					<< "{\n"
2537 					<< "    const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2538 					<< "\n"
2539 					<< "    const ivec2 pixels_resolution0 = ivec2(textureSize(u_imageIn0, 0)) - ivec2(1,1);\n"
2540 					<< "    const vec2 in_pos0 = vec2(out_pos) / vec2(pixels_resolution0);\n"
2541 					<< "    imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2542 					<< "\n"
2543 					<< "    const ivec2 pixels_resolution1 = ivec2(textureSize(u_imageIn1, 0)) - ivec2(1,1);\n"
2544 					<< "    const vec2 in_pos1 = vec2(out_pos) / vec2(pixels_resolution1);\n"
2545 					<< "    imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2546 					<< "}\n";
2547 
2548 				programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2549 			}
2550 
2551 			break;
2552 		}
2553 
2554 		default:
2555 			DE_ASSERT(false);
2556 	}
2557 }
2558 
createInstance(Context & context) const2559 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2560 {
2561 	const VkPhysicalDevice			physicalDevice			= context.getPhysicalDevice();
2562 	const InstanceInterface&		vk						= context.getInstanceInterface();
2563 
2564 	if (!m_parameters.useMipmaps)
2565 	{
2566 		DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size)     == 1u);
2567 		DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).z() == 1u);
2568 	}
2569 
2570 	DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() >  0u);
2571 	DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() >  0u);
2572 
2573 	if (std::find(context.getDeviceExtensions().begin(), context.getDeviceExtensions().end(), "VK_KHR_maintenance2") == context.getDeviceExtensions().end())
2574 		TCU_THROW(NotSupportedError, "Extension VK_KHR_maintenance2 not supported");
2575 
2576 	{
2577 		VkImageFormatProperties imageFormatProperties;
2578 
2579 		if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2580 												mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2581 												m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties))
2582 			TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2583 
2584 		if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2585 												mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2586 												VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2587 												VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
2588 												&imageFormatProperties))
2589 			TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2590 	}
2591 
2592 	{
2593 		const VkPhysicalDeviceFeatures	physicalDeviceFeatures	= getPhysicalDeviceFeatures (vk, physicalDevice);
2594 
2595 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2596 			!physicalDeviceFeatures.textureCompressionBC)
2597 			TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2598 
2599 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2600 			!physicalDeviceFeatures.textureCompressionETC2)
2601 			TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2602 
2603 		if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK) &&
2604 			!physicalDeviceFeatures.textureCompressionASTC_LDR)
2605 			TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2606 	}
2607 
2608 	switch (m_parameters.shader)
2609 	{
2610 		case SHADER_TYPE_COMPUTE:
2611 		{
2612 			switch (m_parameters.operation)
2613 			{
2614 				case OPERATION_IMAGE_LOAD:
2615 				case OPERATION_TEXEL_FETCH:
2616 				case OPERATION_TEXTURE:
2617 					return new BasicComputeTestInstance(context, m_parameters);
2618 				case OPERATION_IMAGE_STORE:
2619 					return new ImageStoreComputeTestInstance(context, m_parameters);
2620 				default:
2621 					TCU_THROW(InternalError, "Impossible");
2622 			}
2623 		}
2624 
2625 		case SHADER_TYPE_FRAGMENT:
2626 		{
2627 			switch (m_parameters.operation)
2628 			{
2629 				case OPERATION_ATTACHMENT_READ:
2630 				case OPERATION_ATTACHMENT_WRITE:
2631 					return new GraphicsAttachmentsTestInstance(context, m_parameters);
2632 
2633 				case OPERATION_TEXTURE_READ:
2634 				case OPERATION_TEXTURE_WRITE:
2635 					return new GraphicsTextureTestInstance(context, m_parameters);
2636 
2637 				default:
2638 					TCU_THROW(InternalError, "Impossible");
2639 			}
2640 		}
2641 
2642 		default:
2643 			TCU_THROW(InternalError, "Impossible");
2644 	}
2645 }
2646 
2647 } // anonymous ns
2648 
getUnniceResolution(const VkFormat format,const deUint32 layers)2649 static tcu::UVec3 getUnniceResolution(const VkFormat format, const deUint32 layers)
2650 {
2651 	const deUint32	unniceMipmapTextureSize[]	= { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2652 	const deUint32	baseTextureWidth			= unniceMipmapTextureSize[getBlockWidth(format)];
2653 	const deUint32	baseTextureHeight			= unniceMipmapTextureSize[getBlockHeight(format)];
2654 	const deUint32	baseTextureWidthLevels		= deLog2Floor32(baseTextureWidth);
2655 	const deUint32	baseTextureHeightLevels		= deLog2Floor32(baseTextureHeight);
2656 	const deUint32	widthMultiplier				= (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2657 	const deUint32	heightMultiplier			= (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2658 	const deUint32	width						= baseTextureWidth * widthMultiplier;
2659 	const deUint32	height						= baseTextureHeight * heightMultiplier;
2660 
2661 	// Number of levels should be same on both axises
2662 	DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
2663 
2664 	return tcu::UVec3(width, height, layers);
2665 }
2666 
createImageCompressionTranscodingTests(tcu::TestContext & testCtx)2667 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
2668 {
2669 	struct FormatsArray
2670 	{
2671 		const VkFormat*	formats;
2672 		deUint32		count;
2673 	};
2674 
2675 	const bool					mipmapness[]									=
2676 	{
2677 		false,
2678 		true,
2679 	};
2680 
2681 	const std::string			pipelineName[SHADER_TYPE_LAST]					=
2682 	{
2683 		"compute",
2684 		"graphic",
2685 	};
2686 
2687 	const std::string			mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)]	=
2688 	{
2689 		"basic",
2690 		"extended",
2691 	};
2692 
2693 	const std::string			operationName[OPERATION_LAST]					=
2694 	{
2695 		"image_load",
2696 		"texel_fetch",
2697 		"texture",
2698 		"image_store",
2699 		"attachment_read",
2700 		"attachment_write",
2701 		"texture_read",
2702 		"texture_write",
2703 	};
2704 
2705 	const VkImageUsageFlags		baseImageUsageFlagSet							= VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2706 	const VkImageUsageFlags		compressedImageUsageFlags[OPERATION_LAST]		=
2707 	{
2708 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT),											// "image_load"
2709 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texel_fetch"
2710 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texture"
2711 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "image_store"
2712 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),	// "attachment_read"
2713 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),	// "attachment_write"
2714 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT),											// "texture_read"
2715 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				// "texture_write"
2716 	};
2717 
2718 	const VkImageUsageFlags		compressedImageViewUsageFlags[OPERATION_LAST]	=
2719 	{
2720 		compressedImageUsageFlags[0],																									//"image_load"
2721 		compressedImageUsageFlags[1],																									//"texel_fetch"
2722 		compressedImageUsageFlags[2],																									//"texture"
2723 		compressedImageUsageFlags[3],																									//"image_store"
2724 		compressedImageUsageFlags[4],																									//"attachment_read"
2725 		compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,																//"attachment_write"
2726 		compressedImageUsageFlags[6],																									//"texture_read"
2727 		compressedImageUsageFlags[7],																									//"texture_write"
2728 	};
2729 
2730 	const VkImageUsageFlags		uncompressedImageUsageFlags[OPERATION_LAST]		=
2731 	{
2732 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT),											//"image_load"
2733 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"texel_fetch"
2734 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"texture"
2735 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT),				//"image_store"
2736 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),	//"attachment_read"
2737 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT),									//"attachment_write"
2738 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),				//"texture_read"
2739 		baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT),											//"texture_write"
2740 	};
2741 
2742 	const VkFormat				compressedFormats64bit[]						=
2743 	{
2744 		VK_FORMAT_BC1_RGB_UNORM_BLOCK,
2745 		VK_FORMAT_BC1_RGB_SRGB_BLOCK,
2746 		VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
2747 		VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
2748 		VK_FORMAT_BC4_UNORM_BLOCK,
2749 		VK_FORMAT_BC4_SNORM_BLOCK,
2750 		VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
2751 		VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
2752 		VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
2753 		VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
2754 		VK_FORMAT_EAC_R11_UNORM_BLOCK,
2755 		VK_FORMAT_EAC_R11_SNORM_BLOCK,
2756 	};
2757 
2758 	const VkFormat				compressedFormats128bit[]						=
2759 	{
2760 		VK_FORMAT_BC2_UNORM_BLOCK,
2761 		VK_FORMAT_BC2_SRGB_BLOCK,
2762 		VK_FORMAT_BC3_UNORM_BLOCK,
2763 		VK_FORMAT_BC3_SRGB_BLOCK,
2764 		VK_FORMAT_BC5_UNORM_BLOCK,
2765 		VK_FORMAT_BC5_SNORM_BLOCK,
2766 		VK_FORMAT_BC6H_UFLOAT_BLOCK,
2767 		VK_FORMAT_BC6H_SFLOAT_BLOCK,
2768 		VK_FORMAT_BC7_UNORM_BLOCK,
2769 		VK_FORMAT_BC7_SRGB_BLOCK,
2770 		VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
2771 		VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
2772 		VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
2773 		VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
2774 		VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
2775 		VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
2776 		VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
2777 		VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
2778 		VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
2779 		VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
2780 		VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
2781 		VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
2782 		VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
2783 		VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
2784 		VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
2785 		VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
2786 		VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
2787 		VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
2788 		VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
2789 		VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
2790 		VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
2791 		VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
2792 		VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
2793 		VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
2794 		VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
2795 		VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
2796 		VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
2797 		VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
2798 		VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
2799 		VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
2800 		VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
2801 		VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
2802 	};
2803 
2804 	const VkFormat				uncompressedFormats64bit[]						=
2805 	{
2806 		VK_FORMAT_R16G16B16A16_UNORM,
2807 		VK_FORMAT_R16G16B16A16_SNORM,
2808 		VK_FORMAT_R16G16B16A16_USCALED,
2809 		VK_FORMAT_R16G16B16A16_SSCALED,
2810 		VK_FORMAT_R16G16B16A16_UINT,
2811 		VK_FORMAT_R16G16B16A16_SINT,
2812 		VK_FORMAT_R16G16B16A16_SFLOAT,
2813 		VK_FORMAT_R32G32_UINT,
2814 		VK_FORMAT_R32G32_SINT,
2815 		VK_FORMAT_R32G32_SFLOAT,
2816 		//VK_FORMAT_R64_UINT, remove from the test it couln'd not be use
2817 		//VK_FORMAT_R64_SINT, remove from the test it couln'd not be use
2818 		//VK_FORMAT_R64_SFLOAT, remove from the test it couln'd not be use
2819 	};
2820 
2821 	const VkFormat				uncompressedFormats128bit[]						=
2822 	{
2823 		VK_FORMAT_R32G32B32A32_UINT,
2824 		VK_FORMAT_R32G32B32A32_SINT,
2825 		VK_FORMAT_R32G32B32A32_SFLOAT,
2826 		//VK_FORMAT_R64G64_UINT, remove from the test it couln'd not be use
2827 		//VK_FORMAT_R64G64_SINT, remove from the test it couln'd not be use
2828 		//VK_FORMAT_R64G64_SFLOAT, remove from the test it couln'd not be use
2829 	};
2830 
2831 	const FormatsArray			formatsCompressedSets[]							=
2832 	{
2833 		{
2834 			compressedFormats64bit,
2835 			DE_LENGTH_OF_ARRAY(compressedFormats64bit)
2836 		},
2837 		{
2838 			compressedFormats128bit,
2839 			DE_LENGTH_OF_ARRAY(compressedFormats128bit)
2840 		},
2841 	};
2842 
2843 	const FormatsArray			formatsUncompressedSets[]						=
2844 	{
2845 		{
2846 			uncompressedFormats64bit,
2847 			DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
2848 		},
2849 		{
2850 			uncompressedFormats128bit,
2851 			DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
2852 		},
2853 	};
2854 
2855 	DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
2856 
2857 	MovePtr<tcu::TestCaseGroup>	texelViewCompatibleTests							(new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
2858 
2859 	for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
2860 	{
2861 		MovePtr<tcu::TestCaseGroup>	pipelineTypeGroup	(new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
2862 
2863 		for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
2864 		{
2865 			const bool mipmapTest = mipmapness[mipmapTestNdx];
2866 
2867 			MovePtr<tcu::TestCaseGroup>	mipmapTypeGroup	(new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
2868 
2869 			for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
2870 			{
2871 				if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
2872 					continue;
2873 
2874 				if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
2875 					continue;
2876 
2877 				MovePtr<tcu::TestCaseGroup>	imageOperationGroup	(new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
2878 
2879 				// Iterate through bitness groups (64 bit, 128 bit, etc)
2880 				for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
2881 				{
2882 					for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
2883 					{
2884 						const VkFormat				formatCompressed			= formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
2885 						const std::string			compressedFormatGroupName	= getFormatShortString(formatCompressed);
2886 						MovePtr<tcu::TestCaseGroup>	compressedFormatGroup		(new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
2887 
2888 						for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
2889 						{
2890 							const VkFormat			formatUncompressed			= formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
2891 							const std::string		uncompressedFormatGroupName	= getFormatShortString(formatUncompressed);
2892 							const TestParameters	parameters					=
2893 							{
2894 								static_cast<Operation>(operationNdx),
2895 								static_cast<ShaderType>(shaderType),
2896 								mipmapTest ? getUnniceResolution(formatCompressed, 3u) : UVec3(64u, 64u, 1u),
2897 								IMAGE_TYPE_2D,
2898 								formatCompressed,
2899 								formatUncompressed,
2900 								(operationNdx == OPERATION_IMAGE_STORE) ? 3u : 2u,
2901 								compressedImageUsageFlags[operationNdx],
2902 								compressedImageViewUsageFlags[operationNdx],
2903 								uncompressedImageUsageFlags[operationNdx],
2904 								mipmapTest,
2905 								VK_FORMAT_R8G8B8A8_UNORM
2906 							};
2907 
2908 							compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
2909 						}
2910 
2911 						imageOperationGroup->addChild(compressedFormatGroup.release());
2912 					}
2913 				}
2914 
2915 				mipmapTypeGroup->addChild(imageOperationGroup.release());
2916 			}
2917 
2918 			pipelineTypeGroup->addChild(mipmapTypeGroup.release());
2919 		}
2920 
2921 		texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
2922 	}
2923 
2924 	return texelViewCompatibleTests.release();
2925 }
2926 
2927 } // image
2928 } // vkt
2929