• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*------------------------------------------------------------------------
2  * Vulkan Conformance Tests
3  * ------------------------
4  *
5  * Copyright (c) 2015 The Khronos Group Inc.
6  * Copyright (c) 2015 Intel Corporation
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  *      http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  *
20  *//*!
21  * \file
22  * \brief Draw Indexed Tests
23  *//*--------------------------------------------------------------------*/
24 
25 #include "vktDrawIndexedTest.hpp"
26 
27 #include "vktTestCaseUtil.hpp"
28 #include "vktDrawTestCaseUtil.hpp"
29 
30 #include "vktDrawBaseClass.hpp"
31 
32 #include "tcuTestLog.hpp"
33 #include "tcuResource.hpp"
34 #include "tcuImageCompare.hpp"
35 #include "tcuTextureUtil.hpp"
36 #include "tcuRGBA.hpp"
37 
38 #include "vkDefs.hpp"
39 #include "vkCmdUtil.hpp"
40 
41 namespace vkt
42 {
43 namespace Draw
44 {
45 namespace
46 {
47 
48 enum
49 {
50 	VERTEX_OFFSET_DEFAULT	= 13,
51 	VERTEX_OFFSET_MINUS_ONE	= -1,
52 	VERTEX_OFFSET_NEGATIVE	= -13,
53 };
54 
55 enum class IndexBindOffset
56 {
57 	DEFAULT		= 0,
58 	POSITIVE	= 16,	// Must be aligned to the index data type size.
59 };
60 
61 enum class MemoryBindOffset
62 {
63 	DEFAULT		= 0,
64 	POSITIVE	= 16,	// Will be rounded up to the alignment requirement.
65 };
66 
67 struct TestSpec2 : TestSpecBase
68 {
69 	const int32_t			vertexOffset;
70 	const vk::VkDeviceSize	bindIndexBufferOffset;
71 	const vk::VkDeviceSize	memoryBindOffset;
72 	bool	useMaintenance5Ext;
73 
TestSpec2vkt::Draw::__anon8452b1810111::TestSpec274 	TestSpec2 (const ShaderMap& shaders_,
75 			  vk::VkPrimitiveTopology topology_,
76 			  SharedGroupParams groupParams_,
77 			  int32_t vertexOffset_,
78 			  vk::VkDeviceSize bindIndexBufferOffset_,
79 			  vk::VkDeviceSize memoryBindOffset_,
80 			  bool useMaintenance5Ext_)
81 		: TestSpecBase			{shaders_, topology_, groupParams_}
82 		, vertexOffset			(vertexOffset_)
83 		, bindIndexBufferOffset	(bindIndexBufferOffset_)
84 		, memoryBindOffset		(memoryBindOffset_)
85 		, useMaintenance5Ext(useMaintenance5Ext_)
86 	{
87 	}
88 };
89 
90 class DrawIndexed : public DrawTestsBaseClass
91 {
92 public:
93 	typedef		TestSpec2	TestSpec;
94 
95 								DrawIndexed				(Context				&context,
96 														 TestSpec				testSpec);
97 	virtual		tcu::TestStatus iterate					(void);
98 protected:
99 				void			cmdBindIndexBufferImpl	(vk::VkCommandBuffer	commandBuffer,
100 														 vk::VkBuffer			indexBuffer,
101 														 vk::VkDeviceSize		offset,
102 														 vk::VkDeviceSize		size,
103 														 vk::VkIndexType		indexType);
104 	std::vector<deUint32>		m_indexes;
105 	de::SharedPtr<Buffer>		m_indexBuffer;
106 	const TestSpec				m_testSpec;
107 };
108 
109 class DrawInstancedIndexed : public DrawIndexed
110 {
111 public:
112 								DrawInstancedIndexed	(Context &context, TestSpec testSpec);
113 	virtual		tcu::TestStatus	iterate					(void);
114 };
115 
DrawIndexed(Context & context,TestSpec testSpec)116 DrawIndexed::DrawIndexed (Context &context, TestSpec testSpec)
117 	: DrawTestsBaseClass(context, testSpec.shaders[glu::SHADERTYPE_VERTEX], testSpec.shaders[glu::SHADERTYPE_FRAGMENT], testSpec.groupParams, testSpec.topology)
118 	, m_testSpec(testSpec)
119 {
120 	// When using a positive vertex offset, the strategy is:
121 	// - Storing vertices with that offset in the vertex buffer.
122 	// - Using indices normally as if they were stored at the start of the buffer.
123 	//
124 	// When using a negative vertex offset, the strategy is:
125 	// - Store vertices at the start of the vertex buffer.
126 	// - Increase indices by abs(offset) so when substracting it, it results in the regular positions.
127 
128 	const uint32_t indexOffset = (testSpec.vertexOffset < 0 ? static_cast<uint32_t>(-testSpec.vertexOffset) : 0u);
129 	switch (m_topology)
130 	{
131 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
132 			m_indexes.push_back(0 + indexOffset);
133 			m_indexes.push_back(0 + indexOffset);
134 			m_indexes.push_back(2 + indexOffset);
135 			m_indexes.push_back(0 + indexOffset);
136 			m_indexes.push_back(6 + indexOffset);
137 			m_indexes.push_back(6 + indexOffset);
138 			m_indexes.push_back(0 + indexOffset);
139 			m_indexes.push_back(7 + indexOffset);
140 			break;
141 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
142 			m_indexes.push_back(0 + indexOffset);
143 			m_indexes.push_back(0 + indexOffset);
144 			m_indexes.push_back(2 + indexOffset);
145 			m_indexes.push_back(0 + indexOffset);
146 			m_indexes.push_back(6 + indexOffset);
147 			m_indexes.push_back(5 + indexOffset);
148 			m_indexes.push_back(0 + indexOffset);
149 			m_indexes.push_back(7 + indexOffset);
150 			break;
151 
152 		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
153 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
154 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
155 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
156 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
157 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
158 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
159 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
160 		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
161 		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
162 			DE_FATAL("Topology not implemented");
163 			break;
164 		default:
165 			DE_FATAL("Unknown topology");
166 			break;
167 	}
168 
169 	// This works for both positive and negative vertex offsets.
170 	for (int unusedIdx = 0; unusedIdx < testSpec.vertexOffset; unusedIdx++)
171 	{
172 		m_data.push_back(VertexElementData(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec(), -1));
173 	}
174 
175 	int vertexIndex = (testSpec.vertexOffset >= 0 ? testSpec.vertexOffset : 0);
176 
177 	m_data.push_back(VertexElementData(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
178 	m_data.push_back(VertexElementData(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
179 	m_data.push_back(VertexElementData(tcu::Vec4(	-0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
180 	m_data.push_back(VertexElementData(tcu::Vec4(	 1.0f,	-1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
181 	m_data.push_back(VertexElementData(tcu::Vec4(	-0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
182 	m_data.push_back(VertexElementData(tcu::Vec4(	 0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
183 	m_data.push_back(VertexElementData(tcu::Vec4(	 0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
184 	m_data.push_back(VertexElementData(tcu::Vec4(	 0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), vertexIndex++));
185 
186 	m_data.push_back(VertexElementData(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec(), -1));
187 
188 	initialize();
189 }
190 
cmdBindIndexBufferImpl(vk::VkCommandBuffer commandBuffer,vk::VkBuffer indexBuffer,vk::VkDeviceSize offset,vk::VkDeviceSize dataSize,vk::VkIndexType indexType)191 void DrawIndexed::cmdBindIndexBufferImpl(vk::VkCommandBuffer	commandBuffer,
192 										 vk::VkBuffer			indexBuffer,
193 										 vk::VkDeviceSize		offset,
194 										 vk::VkDeviceSize		dataSize,
195 										 vk::VkIndexType		indexType)
196 {
197 #ifndef CTS_USES_VULKANSC
198 	if (m_testSpec.useMaintenance5Ext)
199 		m_vk.cmdBindIndexBuffer2KHR(commandBuffer, indexBuffer, offset, dataSize, indexType);
200 	else
201 #endif
202 	{
203 		DE_UNREF(dataSize);
204 		m_vk.cmdBindIndexBuffer(commandBuffer, indexBuffer, offset, indexType);
205 	}
206 }
207 
iterate(void)208 tcu::TestStatus DrawIndexed::iterate (void)
209 {
210 	tcu::TestLog&			log			= m_context.getTestContext().getLog();
211 	const auto&				vki			= m_context.getInstanceInterface();
212 	const auto				physDev		= m_context.getPhysicalDevice();
213 	const vk::VkQueue		queue		= m_context.getUniversalQueue();
214 	const vk::VkDevice		device		= m_context.getDevice();
215 	const auto				memProps	= vk::getPhysicalDeviceMemoryProperties(vki, physDev);
216 	const auto				atomSize	= m_context.getDeviceProperties().limits.nonCoherentAtomSize;
217 	const auto				dataSize	= static_cast<vk::VkDeviceSize>(de::dataSize(m_indexes));
218 	const auto				bufferSize	= dataSize + m_testSpec.bindIndexBufferOffset;
219 	vk::SimpleAllocator		allocator	(m_vk, device, memProps, vk::SimpleAllocator::OptionalOffsetParams({ atomSize, m_testSpec.memoryBindOffset }));
220 
221 	m_indexBuffer = Buffer::createAndAlloc(	m_vk, device,
222 											BufferCreateInfo(bufferSize,
223 															 vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT),
224 											allocator,
225 											vk::MemoryRequirement::HostVisible);
226 
227 	uint8_t* ptr = reinterpret_cast<uint8_t*>(m_indexBuffer->getBoundMemory().getHostPtr());
228 
229 	deMemset(ptr, 0xFF, static_cast<size_t>(m_testSpec.bindIndexBufferOffset));
230 	deMemcpy(ptr + m_testSpec.bindIndexBufferOffset, de::dataOrNull(m_indexes), de::dataSize(m_indexes));
231 	vk::flushAlloc(m_vk, device, m_indexBuffer->getBoundMemory());
232 
233 	const vk::VkDeviceSize	vertexBufferOffset	= 0;
234 	const vk::VkBuffer		vertexBuffer		= m_vertexBuffer->object();
235 	const vk::VkBuffer		indexBuffer			= m_indexBuffer->object();
236 
237 #ifndef CTS_USES_VULKANSC
238 	if (m_groupParams->useSecondaryCmdBuffer)
239 	{
240 		// record secondary command buffer
241 		if (m_groupParams->secondaryCmdBufferCompletelyContainsDynamicRenderpass)
242 		{
243 			beginSecondaryCmdBuffer(m_vk, vk::VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT);
244 			beginDynamicRender(*m_secCmdBuffer);
245 		}
246 		else
247 			beginSecondaryCmdBuffer(m_vk);
248 
249 		m_vk.cmdBindVertexBuffers(*m_secCmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
250 		cmdBindIndexBufferImpl(*m_secCmdBuffer, indexBuffer, m_testSpec.bindIndexBufferOffset, dataSize, vk::VK_INDEX_TYPE_UINT32);
251 		m_vk.cmdBindPipeline(*m_secCmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
252 		m_vk.cmdDrawIndexed(*m_secCmdBuffer, 6, 1, 2, m_testSpec.vertexOffset, 0);
253 
254 		if (m_groupParams->secondaryCmdBufferCompletelyContainsDynamicRenderpass)
255 			endDynamicRender(*m_secCmdBuffer);
256 
257 		endCommandBuffer(m_vk, *m_secCmdBuffer);
258 
259 		// record primary command buffer
260 		beginCommandBuffer(m_vk, *m_cmdBuffer, 0u);
261 		preRenderBarriers();
262 
263 		if (!m_groupParams->secondaryCmdBufferCompletelyContainsDynamicRenderpass)
264 			beginDynamicRender(*m_cmdBuffer, vk::VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
265 
266 		m_vk.cmdExecuteCommands(*m_cmdBuffer, 1u, &*m_secCmdBuffer);
267 
268 		if (!m_groupParams->secondaryCmdBufferCompletelyContainsDynamicRenderpass)
269 			endDynamicRender(*m_cmdBuffer);
270 
271 		endCommandBuffer(m_vk, *m_cmdBuffer);
272 	}
273 	else if (m_groupParams->useDynamicRendering)
274 	{
275 		beginCommandBuffer(m_vk, *m_cmdBuffer, 0u);
276 		preRenderBarriers();
277 		beginDynamicRender(*m_cmdBuffer);
278 
279 		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
280 		cmdBindIndexBufferImpl(*m_cmdBuffer, indexBuffer, m_testSpec.bindIndexBufferOffset, dataSize, vk::VK_INDEX_TYPE_UINT32);
281 		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
282 		m_vk.cmdDrawIndexed(*m_cmdBuffer, 6, 1, 2, m_testSpec.vertexOffset, 0);
283 
284 		endDynamicRender(*m_cmdBuffer);
285 		endCommandBuffer(m_vk, *m_cmdBuffer);
286 	}
287 #endif // CTS_USES_VULKANSC
288 
289 	if (!m_groupParams->useDynamicRendering)
290 	{
291 		beginCommandBuffer(m_vk, *m_cmdBuffer, 0u);
292 		preRenderBarriers();
293 		beginLegacyRender(*m_cmdBuffer);
294 
295 		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
296 		cmdBindIndexBufferImpl(*m_cmdBuffer, indexBuffer, m_testSpec.bindIndexBufferOffset, dataSize, vk::VK_INDEX_TYPE_UINT32);
297 		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
298 		m_vk.cmdDrawIndexed(*m_cmdBuffer, 6, 1, 2, m_testSpec.vertexOffset, 0);
299 
300 		endLegacyRender(*m_cmdBuffer);
301 		endCommandBuffer(m_vk, *m_cmdBuffer);
302 	}
303 
304 	submitCommandsAndWait(m_vk, device, queue, m_cmdBuffer.get());
305 
306 	// Validation
307 	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5f + static_cast<float>(WIDTH)), (int)(0.5f + static_cast<float>(HEIGHT)));
308 								  referenceFrame.allocLevel(0);
309 
310 	const deInt32 frameWidth	= referenceFrame.getWidth();
311 	const deInt32 frameHeight	= referenceFrame.getHeight();
312 
313 	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
314 
315 	ReferenceImageCoordinates refCoords;
316 
317 	for (int y = 0; y < frameHeight; y++)
318 	{
319 		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
320 
321 		for (int x = 0; x < frameWidth; x++)
322 		{
323 			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
324 
325 			if ((yCoord >= refCoords.bottom &&
326 				 yCoord <= refCoords.top	&&
327 				 xCoord >= refCoords.left	&&
328 				 xCoord <= refCoords.right))
329 				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
330 		}
331 	}
332 
333 	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
334 	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
335 		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
336 
337 	qpTestResult res = QP_TEST_RESULT_PASS;
338 
339 	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
340 		referenceFrame.getLevel(0), renderedFrame, 0.05f,
341 		tcu::COMPARE_LOG_RESULT)) {
342 		res = QP_TEST_RESULT_FAIL;
343 	}
344 
345 	return tcu::TestStatus(res, qpGetTestResultName(res));
346 }
347 
DrawInstancedIndexed(Context & context,TestSpec testSpec)348 DrawInstancedIndexed::DrawInstancedIndexed (Context &context, TestSpec testSpec)
349 	: DrawIndexed	(context, testSpec)
350 {
351 }
352 
iterate(void)353 tcu::TestStatus DrawInstancedIndexed::iterate (void)
354 {
355 	tcu::TestLog&			log			= m_context.getTestContext().getLog();
356 	const auto&				vki			= m_context.getInstanceInterface();
357 	const auto				physDev		= m_context.getPhysicalDevice();
358 	const vk::VkQueue		queue		= m_context.getUniversalQueue();
359 	const vk::VkDevice		device		= m_context.getDevice();
360 	const auto				memProps	= vk::getPhysicalDeviceMemoryProperties(vki, physDev);
361 	const auto				dataSize	= static_cast<vk::VkDeviceSize>(de::dataSize(m_indexes));
362 	const vk::VkDeviceSize	bufferSize	= dataSize + m_testSpec.bindIndexBufferOffset;
363 	const auto				atomSize	= m_context.getDeviceProperties().limits.nonCoherentAtomSize;
364 	vk::SimpleAllocator		allocator	(m_vk, device, memProps, vk::SimpleAllocator::OptionalOffsetParams({ atomSize, m_testSpec.memoryBindOffset }));
365 
366 	beginCommandBuffer(m_vk, *m_cmdBuffer, 0u);
367 	preRenderBarriers();
368 
369 #ifndef CTS_USES_VULKANSC
370 	if (m_groupParams->useDynamicRendering)
371 		beginDynamicRender(*m_cmdBuffer);
372 	else
373 		beginLegacyRender(*m_cmdBuffer);
374 #else
375 	beginLegacyRender(*m_cmdBuffer);
376 #endif // CTS_USES_VULKANSC
377 
378 	m_indexBuffer = Buffer::createAndAlloc(	m_vk, device,
379 											BufferCreateInfo(bufferSize,
380 															 vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT),
381 											allocator,
382 											vk::MemoryRequirement::HostVisible);
383 
384 	uint8_t* ptr = reinterpret_cast<uint8_t*>(m_indexBuffer->getBoundMemory().getHostPtr());
385 
386 	deMemset(ptr, 0xFF, static_cast<size_t>(m_testSpec.bindIndexBufferOffset));
387 	deMemcpy(ptr + m_testSpec.bindIndexBufferOffset, de::dataOrNull(m_indexes), de::dataSize(m_indexes));
388 	vk::flushAlloc(m_vk, device, m_indexBuffer->getBoundMemory());
389 
390 	const vk::VkDeviceSize	vertexBufferOffset	= 0;
391 	const vk::VkBuffer		vertexBuffer		= m_vertexBuffer->object();
392 	const vk::VkBuffer		indexBuffer			= m_indexBuffer->object();
393 
394 	m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
395 	cmdBindIndexBufferImpl(*m_cmdBuffer, indexBuffer, m_testSpec.bindIndexBufferOffset, dataSize, vk::VK_INDEX_TYPE_UINT32);
396 	m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
397 
398 	switch (m_topology)
399 	{
400 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
401 			m_vk.cmdDrawIndexed(*m_cmdBuffer, 6, 4, 2, m_testSpec.vertexOffset, 2);
402 			break;
403 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
404 			m_vk.cmdDrawIndexed(*m_cmdBuffer, 4, 4, 2, m_testSpec.vertexOffset, 2);
405 			break;
406 		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
407 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
408 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
409 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
410 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
411 		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
412 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
413 		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
414 		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
415 		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
416 			DE_FATAL("Topology not implemented");
417 			break;
418 		default:
419 			DE_FATAL("Unknown topology");
420 			break;
421 	}
422 
423 #ifndef CTS_USES_VULKANSC
424 	if (m_groupParams->useDynamicRendering)
425 		endDynamicRender(*m_cmdBuffer);
426 	else
427 		endLegacyRender(*m_cmdBuffer);
428 #else
429 	endLegacyRender(*m_cmdBuffer);
430 #endif // CTS_USES_VULKANSC
431 
432 	endCommandBuffer(m_vk, *m_cmdBuffer);
433 
434 	submitCommandsAndWait(m_vk, device, queue, m_cmdBuffer.get());
435 
436 	// Validation
437 	VK_CHECK(m_vk.queueWaitIdle(queue));
438 
439 	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5f + static_cast<float>(WIDTH)), (int)(0.5f + static_cast<float>(HEIGHT)));
440 	referenceFrame.allocLevel(0);
441 
442 	const deInt32 frameWidth = referenceFrame.getWidth();
443 	const deInt32 frameHeight = referenceFrame.getHeight();
444 
445 	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
446 
447 	ReferenceImageInstancedCoordinates refInstancedCoords;
448 
449 	for (int y = 0; y < frameHeight; y++)
450 	{
451 		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
452 
453 		for (int x = 0; x < frameWidth; x++)
454 		{
455 			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
456 
457 			if ((yCoord >= refInstancedCoords.bottom	&&
458 				 yCoord <= refInstancedCoords.top		&&
459 				 xCoord >= refInstancedCoords.left		&&
460 				 xCoord <= refInstancedCoords.right))
461 				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
462 		}
463 	}
464 
465 	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
466 	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
467 		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
468 
469 	qpTestResult res = QP_TEST_RESULT_PASS;
470 
471 	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
472 		referenceFrame.getLevel(0), renderedFrame, 0.05f,
473 		tcu::COMPARE_LOG_RESULT)) {
474 		res = QP_TEST_RESULT_FAIL;
475 	}
476 
477 	return tcu::TestStatus(res, qpGetTestResultName(res));
478 }
479 
checkSupport(Context & context,DrawIndexed::TestSpec testSpec)480 void checkSupport(Context& context, DrawIndexed::TestSpec testSpec)
481 {
482 	if (testSpec.groupParams->useDynamicRendering)
483 		context.requireDeviceFunctionality("VK_KHR_dynamic_rendering");
484 
485 #ifndef CTS_USES_VULKANSC
486 	if (testSpec.useMaintenance5Ext)
487 		context.requireDeviceFunctionality(VK_KHR_MAINTENANCE_5_EXTENSION_NAME);
488 #endif
489 }
490 
491 }	// anonymous
492 
DrawIndexedTests(tcu::TestContext & testCtx,const SharedGroupParams groupParams)493 DrawIndexedTests::DrawIndexedTests (tcu::TestContext &testCtx, const SharedGroupParams groupParams)
494 	: TestCaseGroup		(testCtx, "indexed_draw")
495 	, m_groupParams		(groupParams)
496 {
497 	/* Left blank on purpose */
498 }
499 
~DrawIndexedTests(void)500 DrawIndexedTests::~DrawIndexedTests (void) {}
501 
502 
init(void)503 void DrawIndexedTests::init	(void)
504 {
505 	init(false);
506 #ifndef CTS_USES_VULKANSC
507 	init(true);
508 #endif
509 }
510 
init(bool useMaintenance5Ext)511 void DrawIndexedTests::init (bool useMaintenance5Ext)
512 {
513 	std::string	maintenance5ExtNameSuffix;
514 	std::string	maintenance5ExtDescSuffix;
515 
516 	if (useMaintenance5Ext)
517 	{
518 		maintenance5ExtNameSuffix = "_maintenance_5";
519 		maintenance5ExtDescSuffix = " using vkCmdBindIndexBuffer2KHR() introduced in VK_KHR_maintenance5";
520 	}
521 
522 	const struct {
523 		const vk::VkPrimitiveTopology		topology;
524 		const char*							nameSuffix;
525 	} TopologyCases[] =
526 	{
527 		// triangle list
528 		{ vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,	"triangle_list"},
529 		// triangle strip
530 		{ vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,	"triangle_strip"},
531 	};
532 
533 	const struct
534 	{
535 		const int		offset;
536 		const char*		nameSuffix;
537 	} OffsetCases[] =
538 	{
539 		{ VERTEX_OFFSET_DEFAULT,	""},
540 		//  using -1 as the vertex offset
541 		{ VERTEX_OFFSET_MINUS_ONE,	"_offset_minus_one"},
542 		//  using a large negative number as the vertex offset
543 		{ VERTEX_OFFSET_NEGATIVE,	"_offset_negative_large"},
544 	};
545 
546 	const struct
547 	{
548 		IndexBindOffset	bindOffset;
549 		const char*		nameSuffix;
550 	} IndexBindOffsetCases[] =
551 	{
552 		{ IndexBindOffset::DEFAULT,		""},
553 		//  and applying an index buffer bind offset
554 		{ IndexBindOffset::POSITIVE,	"_with_bind_offset"},
555 	};
556 
557 	const struct
558 	{
559 		MemoryBindOffset	memoryBindOffset;
560 		const char*			nameSuffix;
561 	} MemoryBindOffsetCases[] =
562 	{
563 		{ MemoryBindOffset::DEFAULT,	""},
564 		//  and applying an extra memory allocation offset
565 		{ MemoryBindOffset::POSITIVE,	"_with_alloc_offset"},
566 	};
567 
568 	for (const auto& offsetCase : OffsetCases)
569 	{
570 		for (const auto& indexBindOffsetCase : IndexBindOffsetCases)
571 		{
572 			const auto indexBindOffset = static_cast<vk::VkDeviceSize>(indexBindOffsetCase.bindOffset);
573 
574 			for (const auto& memoryBindOffsetCase : MemoryBindOffsetCases)
575 			{
576 				const auto memoryBindOffset = static_cast<vk::VkDeviceSize>(memoryBindOffsetCase.memoryBindOffset);
577 
578 				for (const auto& topologyCase : TopologyCases)
579 				{
580 					{
581 						DrawIndexed::TestSpec testSpec
582 						(
583 							{
584 								{ glu::SHADERTYPE_VERTEX, "vulkan/draw/VertexFetch.vert" },
585 								{ glu::SHADERTYPE_FRAGMENT, "vulkan/draw/VertexFetch.frag" }
586 							},
587 							topologyCase.topology,
588 							m_groupParams,
589 							offsetCase.offset,
590 							indexBindOffset,
591 							memoryBindOffset,
592 							useMaintenance5Ext
593 						);
594 
595 						const auto testName = std::string("draw_indexed_") + topologyCase.nameSuffix + offsetCase.nameSuffix + indexBindOffsetCase.nameSuffix + memoryBindOffsetCase.nameSuffix + maintenance5ExtNameSuffix;
596 
597 						addChild(new InstanceFactory<DrawIndexed, FunctionSupport1<DrawIndexed::TestSpec> >
598 							(m_testCtx, testName, testSpec, FunctionSupport1<DrawIndexed::TestSpec>::Args(checkSupport, testSpec)));
599 					}
600 					{
601 						DrawInstancedIndexed::TestSpec testSpec
602 						(
603 							{
604 								{ glu::SHADERTYPE_VERTEX, "vulkan/draw/VertexFetchInstancedFirstInstance.vert" },
605 								{ glu::SHADERTYPE_FRAGMENT, "vulkan/draw/VertexFetch.frag" }
606 							},
607 							topologyCase.topology,
608 							m_groupParams,
609 							offsetCase.offset,
610 							indexBindOffset,
611 							memoryBindOffset,
612 							useMaintenance5Ext
613 						);
614 
615 						const auto testName = std::string("draw_instanced_indexed_") + topologyCase.nameSuffix + offsetCase.nameSuffix + indexBindOffsetCase.nameSuffix + memoryBindOffsetCase.nameSuffix + maintenance5ExtNameSuffix;
616 
617 						addChild(new InstanceFactory<DrawInstancedIndexed, FunctionSupport1<DrawInstancedIndexed::TestSpec> >
618 							(m_testCtx, testName, testSpec, FunctionSupport1<DrawInstancedIndexed::TestSpec>::Args(checkSupport, testSpec)));
619 					}
620 				}
621 			}
622 		}
623 	}
624 }
625 
626 }	// DrawTests
627 }	// vkt
628