1 /*-------------------------------------------------------------------------
2 * Vulkan CTS Framework
3 * --------------------
4 *
5 * Copyright (c) 2021 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Defines class for handling resources ( programs, pipelines, files, etc. )
22 *//*--------------------------------------------------------------------*/
23
24 #include "vkResourceInterface.hpp"
25 #include "vkQueryUtil.hpp"
26
27 #ifdef CTS_USES_VULKANSC
28 #include <functional>
29 #include <fstream>
30 #include "vkSafetyCriticalUtil.hpp"
31 #include "vkRefUtil.hpp"
32 #include "tcuCommandLine.hpp"
33 #include "vksCacheBuilder.hpp"
34 #include "vksSerializer.hpp"
35 #include "vkApiVersion.hpp"
36 using namespace vksc_server::json;
37 #endif // CTS_USES_VULKANSC
38
39 namespace vk
40 {
41
ResourceInterface(tcu::TestContext & testCtx)42 ResourceInterface::ResourceInterface (tcu::TestContext& testCtx)
43 : m_testCtx (testCtx)
44 #ifdef CTS_USES_VULKANSC
45 , m_commandPoolIndex (0u)
46 , m_resourceCounter (0u)
47 , m_statCurrent (resetDeviceObjectReservationCreateInfo())
48 , m_statMax (resetDeviceObjectReservationCreateInfo())
49 , m_enabledHandleDestroy (true)
50 #endif // CTS_USES_VULKANSC
51 {
52 #ifdef CTS_USES_VULKANSC
53 // pipelineCacheRequestCount does not contain one instance of createPipelineCache call that happens only in subprocess
54 m_statCurrent.pipelineCacheRequestCount = 1u;
55 m_statMax.pipelineCacheRequestCount = 1u;
56 #endif // CTS_USES_VULKANSC
57 }
58
~ResourceInterface()59 ResourceInterface::~ResourceInterface ()
60 {
61 }
62
initTestCase(const std::string & casePath)63 void ResourceInterface::initTestCase (const std::string& casePath)
64 {
65 m_currentTestPath = casePath;
66 }
67
getCasePath() const68 const std::string& ResourceInterface::getCasePath() const
69 {
70 return m_currentTestPath;
71 }
72
73 #ifdef CTS_USES_VULKANSC
initApiVersion(const deUint32 version)74 void ResourceInterface::initApiVersion (const deUint32 version)
75 {
76 const ApiVersion apiVersion = unpackVersion(version);
77 const bool vulkanSC = (apiVersion.variantNum == 1);
78
79 m_version = tcu::Maybe<deUint32>(version);
80 m_vulkanSC = vulkanSC;
81 }
82
isVulkanSC(void) const83 bool ResourceInterface::isVulkanSC (void) const
84 {
85 return m_vulkanSC.get();
86 }
87
incResourceCounter()88 deUint64 ResourceInterface::incResourceCounter ()
89 {
90 return ++m_resourceCounter;
91 }
92
getStatMutex()93 std::mutex& ResourceInterface::getStatMutex ()
94 {
95 return m_mutex;
96 }
97
getStatCurrent()98 VkDeviceObjectReservationCreateInfo& ResourceInterface::getStatCurrent ()
99 {
100 return m_statCurrent;
101 }
102
getStatMax()103 VkDeviceObjectReservationCreateInfo& ResourceInterface::getStatMax ()
104 {
105 return m_statMax;
106 }
107
getStatMax() const108 const VkDeviceObjectReservationCreateInfo& ResourceInterface::getStatMax () const
109 {
110 return m_statMax;
111 }
112
setHandleDestroy(bool value)113 void ResourceInterface::setHandleDestroy(bool value)
114 {
115 m_enabledHandleDestroy = value;
116 }
117
isEnabledHandleDestroy() const118 bool ResourceInterface::isEnabledHandleDestroy() const
119 {
120 return m_enabledHandleDestroy;
121 }
122
removeRedundantObjects()123 void ResourceInterface::removeRedundantObjects ()
124 {
125 // At the end of the day we only need to export objects used in pipelines.
126 // Rest of the objects may be removed from m_json* structures as redundant
127 std::set<VkSamplerYcbcrConversion> samplerYcbcrConversionsInPipeline;
128 std::set<VkSampler> samplersInPipeline;
129 std::set<VkShaderModule> shadersInPipeline;
130 std::set<VkRenderPass> renderPassesInPipeline;
131 std::set<VkPipelineLayout> pipelineLayoutsInPipeline;
132 std::set<VkDescriptorSetLayout> descriptorSetLayoutsInPipeline;
133
134 Context jsonReader;
135
136 for (auto it = begin(m_pipelineInput.pipelines); it != end(m_pipelineInput.pipelines); ++it)
137 {
138 if (it->pipelineContents.find("VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO") != std::string::npos)
139 {
140 VkGraphicsPipelineCreateInfo gpCI;
141 deMemset(&gpCI, 0, sizeof(gpCI));
142 readJSON_VkGraphicsPipelineCreateInfo(jsonReader, it->pipelineContents, gpCI);
143
144 for (deUint32 i = 0; i < gpCI.stageCount; ++i)
145 shadersInPipeline.insert(gpCI.pStages[i].module);
146 renderPassesInPipeline.insert(gpCI.renderPass);
147 pipelineLayoutsInPipeline.insert(gpCI.layout);
148 }
149 else if (it->pipelineContents.find("VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO") != std::string::npos)
150 {
151 VkComputePipelineCreateInfo cpCI;
152 deMemset(&cpCI, 0, sizeof(cpCI));
153 readJSON_VkComputePipelineCreateInfo(jsonReader, it->pipelineContents, cpCI);
154
155 shadersInPipeline.insert(cpCI.stage.module);
156 pipelineLayoutsInPipeline.insert(cpCI.layout);
157 }
158 else
159 TCU_THROW(InternalError, "Could not recognize pipeline type");
160 }
161 for (auto it = begin(m_pipelineInput.shaderModules); it != end(m_pipelineInput.shaderModules); )
162 {
163 if (shadersInPipeline.find(it->first) == end(shadersInPipeline))
164 it = m_pipelineInput.shaderModules.erase(it);
165 else
166 ++it;
167 }
168 for (auto it = begin(m_pipelineInput.renderPasses); it != end(m_pipelineInput.renderPasses); )
169 {
170 if (renderPassesInPipeline.find(it->first) == end(renderPassesInPipeline))
171 it = m_pipelineInput.renderPasses.erase(it);
172 else
173 ++it;
174 }
175 for (auto it = begin(m_pipelineInput.pipelineLayouts); it != end(m_pipelineInput.pipelineLayouts); )
176 {
177 if (pipelineLayoutsInPipeline.find(it->first) == end(pipelineLayoutsInPipeline))
178 {
179 it = m_pipelineInput.pipelineLayouts.erase(it);
180 }
181 else
182 {
183 VkPipelineLayoutCreateInfo plCI;
184 deMemset(&plCI, 0, sizeof(plCI));
185 readJSON_VkPipelineLayoutCreateInfo(jsonReader, it->second, plCI);
186 for (deUint32 i = 0; i < plCI.setLayoutCount; ++i)
187 descriptorSetLayoutsInPipeline.insert(plCI.pSetLayouts[i]);
188 ++it;
189 }
190 }
191 for (auto it = begin(m_pipelineInput.descriptorSetLayouts); it != end(m_pipelineInput.descriptorSetLayouts); )
192 {
193 if (descriptorSetLayoutsInPipeline.find(it->first) == end(descriptorSetLayoutsInPipeline))
194 it = m_pipelineInput.descriptorSetLayouts.erase(it);
195 else
196 {
197 VkDescriptorSetLayoutCreateInfo dsCI;
198 deMemset(&dsCI, 0, sizeof(dsCI));
199 readJSON_VkDescriptorSetLayoutCreateInfo(jsonReader, it->second, dsCI);
200
201 for (deUint32 i = 0; i < dsCI.bindingCount; ++i)
202 {
203 if (dsCI.pBindings[i].pImmutableSamplers == NULL)
204 continue;
205 for (deUint32 j = 0; j < dsCI.pBindings[i].descriptorCount; ++j)
206 {
207 if (dsCI.pBindings[i].pImmutableSamplers[j] == DE_NULL)
208 continue;
209 samplersInPipeline.insert(dsCI.pBindings[i].pImmutableSamplers[j]);
210 }
211 }
212 ++it;
213 }
214 }
215
216 for (auto it = begin(m_pipelineInput.samplers); it != end(m_pipelineInput.samplers); )
217 {
218 if (samplersInPipeline.find(it->first) == end(samplersInPipeline))
219 it = m_pipelineInput.samplers.erase(it);
220 else
221 {
222 VkSamplerCreateInfo sCI;
223 deMemset(&sCI, 0, sizeof(sCI));
224 readJSON_VkSamplerCreateInfo(jsonReader, it->second, sCI);
225
226 if (sCI.pNext != DE_NULL)
227 {
228 VkSamplerYcbcrConversionInfo* info = (VkSamplerYcbcrConversionInfo*)(sCI.pNext);
229 if (info->sType == VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO)
230 samplerYcbcrConversionsInPipeline.insert(info->conversion);
231 }
232 ++it;
233 }
234 }
235 for (auto it = begin(m_pipelineInput.samplerYcbcrConversions); it != end(m_pipelineInput.samplerYcbcrConversions); )
236 {
237 if (samplerYcbcrConversionsInPipeline.find(it->first) == end(samplerYcbcrConversionsInPipeline))
238 it = m_pipelineInput.samplerYcbcrConversions.erase(it);
239 else
240 ++it;
241 }
242 }
243
finalizeCommandBuffers()244 void ResourceInterface::finalizeCommandBuffers ()
245 {
246 // We have information about command buffer sizes
247 // Now we have to convert it into command pool sizes
248 std::map<deUint64, std::size_t> cpToIndex;
249 for (std::size_t i = 0; i < m_commandPoolMemoryConsumption.size(); ++i)
250 cpToIndex.insert({ m_commandPoolMemoryConsumption[i].commandPool, i });
251 for (const auto& memC : m_commandBufferMemoryConsumption)
252 {
253 std::size_t j = cpToIndex[memC.second.commandPool];
254 m_commandPoolMemoryConsumption[j].updateValues
255 (
256 memC.second.maxCommandPoolAllocated,
257 memC.second.maxCommandPoolReservedSize,
258 memC.second.maxCommandBufferAllocated
259 );
260 m_commandPoolMemoryConsumption[j].commandBufferCount++;
261 }
262 // Each m_commandPoolMemoryConsumption element must have at least one command buffer ( see DeviceDriverSC::createCommandPoolHandlerNorm() )
263 // As a result we have to ensure that commandBufferRequestCount is not less than the number of command pools
264 m_statMax.commandBufferRequestCount = de::max(deUint32(m_commandPoolMemoryConsumption.size()), m_statMax.commandBufferRequestCount);
265 }
266
exportData() const267 std::vector<deUint8> ResourceInterface::exportData () const
268 {
269 vksc_server::VulkanDataTransmittedFromMainToSubprocess vdtfmtsp(m_pipelineInput, m_statMax, m_commandPoolMemoryConsumption, m_pipelineSizes);
270
271 return vksc_server::Serialize(vdtfmtsp);
272 }
273
importData(std::vector<deUint8> & importText) const274 void ResourceInterface::importData (std::vector<deUint8>& importText) const
275 {
276 vksc_server::VulkanDataTransmittedFromMainToSubprocess vdtfmtsp = vksc_server::Deserialize<vksc_server::VulkanDataTransmittedFromMainToSubprocess>(importText);
277
278 m_pipelineInput = vdtfmtsp.pipelineCacheInput;
279 m_statMax = vdtfmtsp.memoryReservation;
280 m_commandPoolMemoryConsumption = vdtfmtsp.commandPoolMemoryConsumption;
281 m_pipelineSizes = vdtfmtsp.pipelineSizes;
282 }
283
registerObjectHash(deUint64 handle,std::size_t hashValue) const284 void ResourceInterface::registerObjectHash (deUint64 handle, std::size_t hashValue) const
285 {
286 m_objectHashes[handle] = hashValue;
287 }
288
getObjectHashes() const289 const std::map<deUint64,std::size_t>& ResourceInterface::getObjectHashes () const
290 {
291 return m_objectHashes;
292 }
293
294 struct PipelinePoolSizeInfo
295 {
296 deUint32 maxTestCount;
297 deUint32 size;
298 };
299
preparePipelinePoolSizes()300 void ResourceInterface::preparePipelinePoolSizes()
301 {
302 std::map<std::string, std::vector<PipelinePoolSizeInfo>> pipelineInfoPerTest;
303
304 // Step 1: collect information about all pipelines in each test, group by size
305 for (const auto& pipeline : m_pipelineInput.pipelines)
306 {
307 auto it = std::find_if(begin(m_pipelineSizes), end(m_pipelineSizes), vksc_server::PipelineIdentifierEqual(pipeline.id));
308 if (it == end(m_pipelineSizes))
309 TCU_THROW(InternalError, "VkPipelinePoolEntrySizeCreateInfo not created for pipelineIdentifier");
310
311 PipelinePoolSizeInfo ppsi
312 {
313 it->count,
314 it->size
315 };
316
317 for (const auto& test : pipeline.tests)
318 {
319 auto pit = pipelineInfoPerTest.find(test);
320 if (pit == end(pipelineInfoPerTest))
321 pit = pipelineInfoPerTest.insert({ test, std::vector<PipelinePoolSizeInfo>() }).first;
322 // group by the same sizes in a test
323 bool found = false;
324 for (size_t i = 0; i<pit->second.size(); ++i)
325 {
326 if (pit->second[i].size == ppsi.size)
327 {
328 pit->second[i].maxTestCount += ppsi.maxTestCount;
329 found = true;
330 break;
331 }
332 }
333 if(!found)
334 pit->second.push_back(ppsi);
335 }
336 }
337
338 // Step 2: choose pipeline pool sizes
339 std::vector<PipelinePoolSizeInfo> finalPoolSizes;
340 for (const auto& pInfo : pipelineInfoPerTest)
341 {
342 for (const auto& ppsi1 : pInfo.second)
343 {
344 auto it = std::find_if(begin(finalPoolSizes), end(finalPoolSizes), [&ppsi1](const PipelinePoolSizeInfo& x) { return (x.size == ppsi1.size); });
345 if (it != end(finalPoolSizes))
346 it->maxTestCount = de::max(it->maxTestCount, ppsi1.maxTestCount);
347 else
348 finalPoolSizes.push_back(ppsi1);
349 }
350 }
351
352 // Step 3: convert results to VkPipelinePoolSize
353 m_pipelinePoolSizes.clear();
354 for (const auto& ppsi : finalPoolSizes)
355 {
356 VkPipelinePoolSize poolSize =
357 {
358 VK_STRUCTURE_TYPE_PIPELINE_POOL_SIZE, // VkStructureType sType;
359 DE_NULL, // const void* pNext;
360 ppsi.size, // VkDeviceSize poolEntrySize;
361 ppsi.maxTestCount // deUint32 poolEntryCount;
362 };
363 m_pipelinePoolSizes.emplace_back(poolSize);
364 }
365 }
366
getPipelinePoolSizes() const367 std::vector<VkPipelinePoolSize> ResourceInterface::getPipelinePoolSizes () const
368 {
369 return m_pipelinePoolSizes;
370 }
371
fillPoolEntrySize(vk::VkPipelineOfflineCreateInfo & pipelineIdentifier) const372 void ResourceInterface::fillPoolEntrySize (vk::VkPipelineOfflineCreateInfo& pipelineIdentifier) const
373 {
374 auto it = std::find_if(begin(m_pipelineSizes), end(m_pipelineSizes), vksc_server::PipelineIdentifierEqual(pipelineIdentifier));
375 if( it == end(m_pipelineSizes) )
376 TCU_THROW(InternalError, "VkPipelinePoolEntrySizeCreateInfo not created for pipelineIdentifier");
377 pipelineIdentifier.poolEntrySize = it->size;
378 }
379
getNextCommandPoolSize()380 vksc_server::VulkanCommandMemoryConsumption ResourceInterface::getNextCommandPoolSize ()
381 {
382 if (m_commandPoolMemoryConsumption.empty())
383 return vksc_server::VulkanCommandMemoryConsumption();
384
385 vksc_server::VulkanCommandMemoryConsumption result = m_commandPoolMemoryConsumption[m_commandPoolIndex];
386 // modulo operation is just a safeguard against excessive number of requests
387 m_commandPoolIndex = (m_commandPoolIndex + 1) % deUint32(m_commandPoolMemoryConsumption.size());
388 return result;
389 }
390
getCacheDataSize() const391 std::size_t ResourceInterface::getCacheDataSize () const
392 {
393 return m_cacheData.size();
394 }
395
getCacheData() const396 const deUint8* ResourceInterface::getCacheData () const
397 {
398 return m_cacheData.data();
399 }
400
getPipelineCache(VkDevice device) const401 VkPipelineCache ResourceInterface::getPipelineCache(VkDevice device) const
402 {
403 auto pit = m_pipelineCache.find(device);
404 if (pit == end(m_pipelineCache))
405 TCU_THROW(InternalError, "m_pipelineCache not found for this device");
406 return pit->second.get()->get();
407 }
408
409 #endif // CTS_USES_VULKANSC
410
ResourceInterfaceStandard(tcu::TestContext & testCtx)411 ResourceInterfaceStandard::ResourceInterfaceStandard (tcu::TestContext& testCtx)
412 : ResourceInterface(testCtx)
413 {
414 }
415
initDevice(DeviceInterface & deviceInterface,VkDevice device)416 void ResourceInterfaceStandard::initDevice (DeviceInterface& deviceInterface, VkDevice device)
417 {
418 // ResourceInterfaceStandard is a class for running VulkanSC tests on normal Vulkan driver.
419 // CTS does not have vkCreateShaderModule function defined for Vulkan SC driver, but we need this function
420 // So ResourceInterfaceStandard class must have its own vkCreateShaderModule function pointer
421 // Moreover - we create additional function pointers for vkCreateGraphicsPipelines, vkCreateComputePipelines, etc.
422 // BTW: although ResourceInterfaceStandard exists in normal Vulkan tests - only initDevice and buildProgram functions are used by Vulkan tests
423 // Other functions are called from within DeviceDriverSC which does not exist in these tests ( DeviceDriver class is used instead )
424 m_createShaderModuleFunc[device] = (CreateShaderModuleFunc) deviceInterface.getDeviceProcAddr(device, "vkCreateShaderModule");
425 m_createGraphicsPipelinesFunc[device] = (CreateGraphicsPipelinesFunc) deviceInterface.getDeviceProcAddr(device, "vkCreateGraphicsPipelines");
426 m_createComputePipelinesFunc[device] = (CreateComputePipelinesFunc) deviceInterface.getDeviceProcAddr(device, "vkCreateComputePipelines");
427 #ifdef CTS_USES_VULKANSC
428 if (m_testCtx.getCommandLine().isSubProcess())
429 {
430 if (m_cacheData.size() > 0)
431 {
432 VkPipelineCacheCreateInfo pCreateInfo =
433 {
434 VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, // VkStructureType sType;
435 DE_NULL, // const void* pNext;
436 VK_PIPELINE_CACHE_CREATE_READ_ONLY_BIT |
437 VK_PIPELINE_CACHE_CREATE_USE_APPLICATION_STORAGE_BIT, // VkPipelineCacheCreateFlags flags;
438 m_cacheData.size(), // deUintptr initialDataSize;
439 m_cacheData.data() // const void* pInitialData;
440 };
441 m_pipelineCache[device] = de::SharedPtr<Move<VkPipelineCache>>(new Move<VkPipelineCache>(createPipelineCache(deviceInterface, device, &pCreateInfo, DE_NULL)));
442 }
443 }
444 #endif // CTS_USES_VULKANSC
445 }
446
deinitDevice(VkDevice device)447 void ResourceInterfaceStandard::deinitDevice (VkDevice device)
448 {
449 #ifdef CTS_USES_VULKANSC
450 if (m_testCtx.getCommandLine().isSubProcess())
451 {
452 m_pipelineCache.erase(device);
453 }
454 #else
455 DE_UNREF(device);
456 #endif // CTS_USES_VULKANSC
457 }
458
459 #ifdef CTS_USES_VULKANSC
460
registerDeviceFeatures(VkDevice device,const VkDeviceCreateInfo * pCreateInfo) const461 void ResourceInterfaceStandard::registerDeviceFeatures (VkDevice device,
462 const VkDeviceCreateInfo* pCreateInfo) const
463 {
464 VkPhysicalDeviceFeatures2* chainedFeatures = (VkPhysicalDeviceFeatures2*)findStructureInChain(pCreateInfo->pNext, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2);
465 if (chainedFeatures != NULL)
466 {
467 m_deviceFeatures[device] = writeJSON_pNextChain(pCreateInfo->pNext);
468 }
469 else
470 {
471 VkPhysicalDeviceFeatures2 deviceFeatures2 = initVulkanStructure();
472 if (pCreateInfo->pEnabledFeatures != NULL)
473 deviceFeatures2.features = *(pCreateInfo->pEnabledFeatures);
474
475 deviceFeatures2.pNext = (void *)pCreateInfo->pNext;
476 m_deviceFeatures[device] = writeJSON_VkPhysicalDeviceFeatures2(deviceFeatures2);
477 }
478
479 std::vector<std::string> extensions;
480 for (deUint32 i = 0; i < pCreateInfo->enabledExtensionCount; ++i)
481 extensions.push_back(pCreateInfo->ppEnabledExtensionNames[i]);
482 m_deviceExtensions[device] = extensions;
483 }
484
unregisterDeviceFeatures(VkDevice device) const485 void ResourceInterfaceStandard::unregisterDeviceFeatures(VkDevice device) const
486 {
487 m_deviceFeatures.erase(device);
488 m_deviceExtensions.erase(device);
489 }
490
createShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule,bool normalMode) const491 VkResult ResourceInterfaceStandard::createShaderModule (VkDevice device,
492 const VkShaderModuleCreateInfo* pCreateInfo,
493 const VkAllocationCallbacks* pAllocator,
494 VkShaderModule* pShaderModule,
495 bool normalMode) const
496 {
497 if (normalMode)
498 {
499 if (isVulkanSC())
500 {
501 *pShaderModule = VkShaderModule(++m_resourceCounter);
502 registerObjectHash(pShaderModule->getInternal(), calculateShaderModuleHash(*pCreateInfo, getObjectHashes()));
503 return VK_SUCCESS;
504 }
505 else
506 {
507 const auto it = m_createShaderModuleFunc.find(device);
508 if (it != end(m_createShaderModuleFunc))
509 {
510 VkResult result = it->second(device, pCreateInfo, pAllocator, pShaderModule);
511 registerObjectHash(pShaderModule->getInternal(), calculateShaderModuleHash(*pCreateInfo, getObjectHashes()));
512 return result;
513 }
514 TCU_THROW(InternalError, "vkCreateShaderModule not defined");
515 }
516 }
517
518 // main process: store VkShaderModuleCreateInfo in JSON format. Shaders will be sent later for m_pipelineCache creation ( and sent through file to another process )
519 *pShaderModule = VkShaderModule(++m_resourceCounter);
520 registerObjectHash(pShaderModule->getInternal(), calculateShaderModuleHash(*pCreateInfo, getObjectHashes()));
521 m_pipelineInput.shaderModules.insert({ *pShaderModule, writeJSON_VkShaderModuleCreateInfo(*pCreateInfo) });
522 return VK_SUCCESS;
523 }
524
makeGraphicsPipelineIdentifier(const std::string & testPath,const VkGraphicsPipelineCreateInfo & gpCI,const std::map<deUint64,std::size_t> & objectHashes)525 VkPipelineOfflineCreateInfo makeGraphicsPipelineIdentifier (const std::string& testPath, const VkGraphicsPipelineCreateInfo& gpCI, const std::map<deUint64, std::size_t>& objectHashes)
526 {
527 DE_UNREF(testPath);
528 VkPipelineOfflineCreateInfo pipelineID = resetPipelineOfflineCreateInfo();
529 std::size_t hashValue = calculateGraphicsPipelineHash(gpCI, objectHashes);
530 memcpy(pipelineID.pipelineIdentifier, &hashValue, sizeof(std::size_t));
531 return pipelineID;
532 }
533
makeComputePipelineIdentifier(const std::string & testPath,const VkComputePipelineCreateInfo & cpCI,const std::map<deUint64,std::size_t> & objectHashes)534 VkPipelineOfflineCreateInfo makeComputePipelineIdentifier (const std::string& testPath, const VkComputePipelineCreateInfo& cpCI, const std::map<deUint64, std::size_t>& objectHashes)
535 {
536 DE_UNREF(testPath);
537 VkPipelineOfflineCreateInfo pipelineID = resetPipelineOfflineCreateInfo();
538 std::size_t hashValue = calculateComputePipelineHash(cpCI, objectHashes);
539 memcpy(pipelineID.pipelineIdentifier, &hashValue, sizeof(std::size_t));
540 return pipelineID;
541 }
542
createGraphicsPipelines(VkDevice device,VkPipelineCache pipelineCache,deUint32 createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,bool normalMode) const543 VkResult ResourceInterfaceStandard::createGraphicsPipelines (VkDevice device,
544 VkPipelineCache pipelineCache,
545 deUint32 createInfoCount,
546 const VkGraphicsPipelineCreateInfo* pCreateInfos,
547 const VkAllocationCallbacks* pAllocator,
548 VkPipeline* pPipelines,
549 bool normalMode) const
550 {
551 DE_UNREF(pipelineCache);
552
553 // build pipeline identifiers (if required), make a copy of pCreateInfos
554 std::vector<VkPipelineOfflineCreateInfo> pipelineIDs;
555 std::vector<deUint8> idInPNextChain;
556 std::vector<VkGraphicsPipelineCreateInfo> pCreateInfoCopies;
557
558 for (deUint32 i = 0; i < createInfoCount; ++i)
559 {
560 pCreateInfoCopies.push_back(pCreateInfos[i]);
561
562 // Check if test added pipeline identifier on its own
563 VkPipelineOfflineCreateInfo* idInfo = (VkPipelineOfflineCreateInfo*)findStructureInChain(pCreateInfos[i].pNext, VK_STRUCTURE_TYPE_PIPELINE_OFFLINE_CREATE_INFO);
564 if (idInfo == DE_NULL)
565 {
566 pipelineIDs.push_back(makeGraphicsPipelineIdentifier(m_currentTestPath, pCreateInfos[i], getObjectHashes()));
567 idInPNextChain.push_back(0);
568 }
569 else
570 {
571 pipelineIDs.push_back(*idInfo);
572 idInPNextChain.push_back(1);
573 }
574
575 if (normalMode)
576 fillPoolEntrySize(pipelineIDs.back());
577 }
578
579 // reset not used pointers, so that JSON generation does not crash
580 std::vector<VkPipelineViewportStateCreateInfo> viewportStateCopies (createInfoCount);
581 if (!normalMode)
582 {
583 for (deUint32 i = 0; i < createInfoCount; ++i)
584 {
585 bool vertexInputStateRequired = false;
586 bool inputAssemblyStateRequired = false;
587 bool tessellationStateRequired = false;
588 bool viewportStateRequired = false;
589 bool viewportStateViewportsRequired = false;
590 bool viewportStateScissorsRequired = false;
591 bool multiSampleStateRequired = false;
592 bool depthStencilStateRequired = false;
593 bool colorBlendStateRequired = false;
594
595 if (pCreateInfoCopies[i].pStages != DE_NULL)
596 {
597 for (deUint32 j = 0; j < pCreateInfoCopies[i].stageCount; ++j)
598 {
599 if (pCreateInfoCopies[i].pStages[j].stage == VK_SHADER_STAGE_VERTEX_BIT)
600 {
601 vertexInputStateRequired = true;
602 inputAssemblyStateRequired = true;
603 }
604 if (pCreateInfoCopies[i].pStages[j].stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
605 {
606 tessellationStateRequired = true;
607 }
608 }
609 }
610 if (pCreateInfoCopies[i].pDynamicState != DE_NULL)
611 {
612 if (pCreateInfoCopies[i].pDynamicState->pDynamicStates != DE_NULL)
613 for (deUint32 j = 0; j < pCreateInfoCopies[i].pDynamicState->dynamicStateCount; ++j)
614 {
615 if (pCreateInfoCopies[i].pDynamicState->pDynamicStates[j] == VK_DYNAMIC_STATE_VIEWPORT || pCreateInfoCopies[i].pDynamicState->pDynamicStates[j] == VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT)
616 {
617 viewportStateRequired = true;
618 viewportStateViewportsRequired = true;
619 }
620 if (pCreateInfoCopies[i].pDynamicState->pDynamicStates[j] == VK_DYNAMIC_STATE_SCISSOR || pCreateInfoCopies[i].pDynamicState->pDynamicStates[j] == VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT)
621 {
622 viewportStateRequired = true;
623 viewportStateScissorsRequired = true;
624 }
625 if (pCreateInfoCopies[i].pDynamicState->pDynamicStates[j] == VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT)
626 viewportStateRequired = true;
627 }
628 }
629 if (pCreateInfoCopies[i].pRasterizationState != DE_NULL)
630 {
631 if (pCreateInfoCopies[i].pRasterizationState->rasterizerDiscardEnable == VK_FALSE)
632 {
633 viewportStateRequired = true;
634 viewportStateViewportsRequired = true;
635 viewportStateScissorsRequired = true;
636 multiSampleStateRequired = true;
637 depthStencilStateRequired = true;
638 colorBlendStateRequired = true;
639 }
640 }
641 if (pCreateInfoCopies[i].pVertexInputState != DE_NULL && !vertexInputStateRequired)
642 pCreateInfoCopies[i].pVertexInputState = DE_NULL;
643 if (pCreateInfoCopies[i].pInputAssemblyState != DE_NULL && !inputAssemblyStateRequired)
644 pCreateInfoCopies[i].pInputAssemblyState = DE_NULL;
645 if (pCreateInfoCopies[i].pTessellationState != DE_NULL && !tessellationStateRequired)
646 pCreateInfoCopies[i].pTessellationState = DE_NULL;
647 if (pCreateInfoCopies[i].pViewportState != DE_NULL)
648 {
649 if (viewportStateRequired)
650 {
651 viewportStateCopies[i] = *(pCreateInfoCopies[i].pViewportState);
652 bool exchangeVP = false;
653 if (pCreateInfoCopies[i].pViewportState->pViewports != DE_NULL && !viewportStateViewportsRequired)
654 {
655 viewportStateCopies[i].pViewports = DE_NULL;
656 viewportStateCopies[i].viewportCount = 0u;
657 exchangeVP = true;
658 }
659 if (pCreateInfoCopies[i].pViewportState->pScissors != DE_NULL && !viewportStateScissorsRequired)
660 {
661 viewportStateCopies[i].pScissors = DE_NULL;
662 viewportStateCopies[i].scissorCount = 0u;
663 exchangeVP = true;
664 }
665 if (exchangeVP)
666 pCreateInfoCopies[i].pViewportState = &(viewportStateCopies[i]);
667 }
668 else
669 pCreateInfoCopies[i].pViewportState = DE_NULL;
670 }
671 if (pCreateInfoCopies[i].pMultisampleState != DE_NULL && !multiSampleStateRequired)
672 pCreateInfoCopies[i].pMultisampleState = DE_NULL;
673 if (pCreateInfoCopies[i].pDepthStencilState != DE_NULL && !depthStencilStateRequired)
674 pCreateInfoCopies[i].pDepthStencilState = DE_NULL;
675 if (pCreateInfoCopies[i].pColorBlendState != DE_NULL && !colorBlendStateRequired)
676 pCreateInfoCopies[i].pColorBlendState = DE_NULL;
677 }
678 }
679
680 // Include pipelineIdentifiers into pNext chain of pCreateInfoCopies - skip this operation if pipeline identifier was created inside test
681 for (deUint32 i = 0; i < createInfoCount; ++i)
682 {
683 if (idInPNextChain[i] == 0)
684 {
685 pipelineIDs[i].pNext = pCreateInfoCopies[i].pNext;
686 pCreateInfoCopies[i].pNext = &pipelineIDs[i];
687 }
688 }
689
690 // subprocess: load graphics pipelines from OUR m_pipelineCache cache
691 if (normalMode)
692 {
693 const auto it = m_createGraphicsPipelinesFunc.find(device);
694 if (it != end(m_createGraphicsPipelinesFunc))
695 {
696 auto pit = m_pipelineCache.find(device);
697 if ( pit != end(m_pipelineCache) )
698 {
699 VkPipelineCache pCache = pit->second->get();
700 return it->second(device, pCache, createInfoCount, pCreateInfoCopies.data(), pAllocator, pPipelines);
701 }
702 TCU_THROW(InternalError, "m_pipelineCache not initialized for this device");
703 }
704 TCU_THROW(InternalError, "vkCreateGraphicsPipelines not defined");
705 }
706
707 // main process: store pipelines in JSON format. Pipelines will be sent later for m_pipelineCache creation ( and sent through file to another process )
708 for (deUint32 i = 0; i < createInfoCount; ++i)
709 {
710 m_pipelineIdentifiers.insert({ pPipelines[i], pipelineIDs[i] });
711
712 auto it = std::find_if(begin(m_pipelineInput.pipelines), end(m_pipelineInput.pipelines), vksc_server::PipelineIdentifierEqual(pipelineIDs[i]));
713 pipelineIDs[i].pNext = DE_NULL;
714 if (it == end(m_pipelineInput.pipelines))
715 {
716 const auto& featIt = m_deviceFeatures.find(device);
717 if(featIt == end(m_deviceFeatures))
718 TCU_THROW(InternalError, "Can't find device features for this pipeline");
719 const auto& extIt = m_deviceExtensions.find(device);
720 if (extIt == end(m_deviceExtensions))
721 TCU_THROW(InternalError, "Can't find device extensions for this pipeline");
722
723 m_pipelineInput.pipelines.push_back(vksc_server::VulkanJsonPipelineDescription(
724 pipelineIDs[i],
725 writeJSON_VkGraphicsPipelineCreateInfo(pCreateInfoCopies[i]),
726 featIt->second,
727 extIt->second,
728 m_currentTestPath));
729 }
730 else
731 it->add(m_currentTestPath);
732 }
733 return VK_SUCCESS;
734 }
735
createComputePipelines(VkDevice device,VkPipelineCache pipelineCache,deUint32 createInfoCount,const VkComputePipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines,bool normalMode) const736 VkResult ResourceInterfaceStandard::createComputePipelines (VkDevice device,
737 VkPipelineCache pipelineCache,
738 deUint32 createInfoCount,
739 const VkComputePipelineCreateInfo* pCreateInfos,
740 const VkAllocationCallbacks* pAllocator,
741 VkPipeline* pPipelines,
742 bool normalMode) const
743 {
744 DE_UNREF(pipelineCache);
745
746 // build pipeline identifiers (if required), make a copy of pCreateInfos
747 std::vector<VkPipelineOfflineCreateInfo> pipelineIDs;
748 std::vector<deUint8> idInPNextChain;
749 std::vector<VkComputePipelineCreateInfo> pCreateInfoCopies;
750
751 for (deUint32 i = 0; i < createInfoCount; ++i)
752 {
753 pCreateInfoCopies.push_back(pCreateInfos[i]);
754
755 // Check if test added pipeline identifier on its own
756 VkPipelineOfflineCreateInfo* idInfo = (VkPipelineOfflineCreateInfo*)findStructureInChain(pCreateInfos[i].pNext, VK_STRUCTURE_TYPE_PIPELINE_OFFLINE_CREATE_INFO);
757 if (idInfo == DE_NULL)
758 {
759 pipelineIDs.push_back(makeComputePipelineIdentifier(m_currentTestPath, pCreateInfos[i], getObjectHashes()));
760 idInPNextChain.push_back(0);
761 }
762 else
763 {
764 pipelineIDs.push_back(*idInfo);
765 idInPNextChain.push_back(1);
766 }
767
768 if (normalMode)
769 fillPoolEntrySize(pipelineIDs.back());
770 }
771
772 // Include pipelineIdentifiers into pNext chain of pCreateInfoCopies - skip this operation if pipeline identifier was created inside test
773 for (deUint32 i = 0; i < createInfoCount; ++i)
774 {
775 if (idInPNextChain[i] == 0)
776 {
777 pipelineIDs[i].pNext = pCreateInfoCopies[i].pNext;
778 pCreateInfoCopies[i].pNext = &pipelineIDs[i];
779 }
780 }
781
782 // subprocess: load compute pipelines from OUR pipeline cache
783 if (normalMode)
784 {
785 const auto it = m_createComputePipelinesFunc.find(device);
786 if (it != end(m_createComputePipelinesFunc))
787 {
788 auto pit = m_pipelineCache.find(device);
789 if ( pit != end(m_pipelineCache) )
790 {
791 VkPipelineCache pCache = pit->second->get();
792 return it->second(device, pCache, createInfoCount, pCreateInfoCopies.data(), pAllocator, pPipelines);
793 }
794 TCU_THROW(InternalError, "m_pipelineCache not initialized for this device");
795 }
796 TCU_THROW(InternalError, "vkCreateComputePipelines not defined");
797 }
798
799 // main process: store pipelines in JSON format. Pipelines will be sent later for m_pipelineCache creation ( and sent through file to another process )
800 for (deUint32 i = 0; i < createInfoCount; ++i)
801 {
802 m_pipelineIdentifiers.insert({ pPipelines[i], pipelineIDs[i] });
803
804 auto it = std::find_if(begin(m_pipelineInput.pipelines), end(m_pipelineInput.pipelines), vksc_server::PipelineIdentifierEqual(pipelineIDs[i]));
805 pipelineIDs[i].pNext = DE_NULL;
806 if (it == end(m_pipelineInput.pipelines))
807 {
808 const auto& featIt = m_deviceFeatures.find(device);
809 if (featIt == end(m_deviceFeatures))
810 TCU_THROW(InternalError, "Can't find device features for this pipeline");
811 const auto& extIt = m_deviceExtensions.find(device);
812 if (extIt == end(m_deviceExtensions))
813 TCU_THROW(InternalError, "Can't find device extensions for this pipeline");
814
815 m_pipelineInput.pipelines.push_back(vksc_server::VulkanJsonPipelineDescription(
816 pipelineIDs[i],
817 writeJSON_VkComputePipelineCreateInfo(pCreateInfoCopies[i]),
818 featIt->second,
819 extIt->second,
820 m_currentTestPath));
821 }
822 else
823 it->add(m_currentTestPath);
824 }
825 return VK_SUCCESS;
826 }
827
destroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator) const828 void ResourceInterfaceStandard::destroyPipeline (VkDevice device,
829 VkPipeline pipeline,
830 const VkAllocationCallbacks* pAllocator) const
831 {
832 DE_UNREF(device);
833 DE_UNREF(pAllocator);
834
835 auto it = m_pipelineIdentifiers.find(pipeline);
836 if(it==end(m_pipelineIdentifiers))
837 TCU_THROW(InternalError, "Can't find pipeline");
838
839 auto pit = std::find_if(begin(m_pipelineInput.pipelines), end(m_pipelineInput.pipelines), vksc_server::PipelineIdentifierEqual(it->second));
840 if (pit == end(m_pipelineInput.pipelines))
841 TCU_THROW(InternalError, "Can't find pipeline identifier");
842 pit->remove();
843 }
844
createRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass) const845 void ResourceInterfaceStandard::createRenderPass (VkDevice device,
846 const VkRenderPassCreateInfo* pCreateInfo,
847 const VkAllocationCallbacks* pAllocator,
848 VkRenderPass* pRenderPass) const
849 {
850 DE_UNREF(device);
851 DE_UNREF(pAllocator);
852 m_pipelineInput.renderPasses.insert({ *pRenderPass, writeJSON_VkRenderPassCreateInfo(*pCreateInfo) });
853 }
854
createRenderPass2(VkDevice device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass) const855 void ResourceInterfaceStandard::createRenderPass2 (VkDevice device,
856 const VkRenderPassCreateInfo2* pCreateInfo,
857 const VkAllocationCallbacks* pAllocator,
858 VkRenderPass* pRenderPass) const
859 {
860 DE_UNREF(device);
861 DE_UNREF(pAllocator);
862 m_pipelineInput.renderPasses.insert({ *pRenderPass, writeJSON_VkRenderPassCreateInfo2(*pCreateInfo) });
863 }
864
createPipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout) const865 void ResourceInterfaceStandard::createPipelineLayout (VkDevice device,
866 const VkPipelineLayoutCreateInfo* pCreateInfo,
867 const VkAllocationCallbacks* pAllocator,
868 VkPipelineLayout* pPipelineLayout) const
869 {
870 DE_UNREF(device);
871 DE_UNREF(pAllocator);
872 m_pipelineInput.pipelineLayouts.insert({*pPipelineLayout, writeJSON_VkPipelineLayoutCreateInfo(*pCreateInfo) });
873 }
874
createDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout) const875 void ResourceInterfaceStandard::createDescriptorSetLayout (VkDevice device,
876 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
877 const VkAllocationCallbacks* pAllocator,
878 VkDescriptorSetLayout* pSetLayout) const
879 {
880 DE_UNREF(device);
881 DE_UNREF(pAllocator);
882 m_pipelineInput.descriptorSetLayouts.insert({ *pSetLayout, writeJSON_VkDescriptorSetLayoutCreateInfo(*pCreateInfo) });
883 }
884
createSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler) const885 void ResourceInterfaceStandard::createSampler (VkDevice device,
886 const VkSamplerCreateInfo* pCreateInfo,
887 const VkAllocationCallbacks* pAllocator,
888 VkSampler* pSampler) const
889 {
890 DE_UNREF(device);
891 DE_UNREF(pAllocator);
892 m_pipelineInput.samplers.insert({ *pSampler, writeJSON_VkSamplerCreateInfo(*pCreateInfo) });
893 }
894
createSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion) const895 void ResourceInterfaceStandard::createSamplerYcbcrConversion (VkDevice device,
896 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
897 const VkAllocationCallbacks* pAllocator,
898 VkSamplerYcbcrConversion* pYcbcrConversion) const
899 {
900 DE_UNREF(device);
901 DE_UNREF(pAllocator);
902 m_pipelineInput.samplerYcbcrConversions.insert({ *pYcbcrConversion, writeJSON_VkSamplerYcbcrConversionCreateInfo(*pCreateInfo) });
903 }
904
createCommandPool(VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool) const905 void ResourceInterfaceStandard::createCommandPool (VkDevice device,
906 const VkCommandPoolCreateInfo* pCreateInfo,
907 const VkAllocationCallbacks* pAllocator,
908 VkCommandPool* pCommandPool) const
909 {
910 DE_UNREF(device);
911 DE_UNREF(pCreateInfo);
912 DE_UNREF(pAllocator);
913 m_commandPoolMemoryConsumption.push_back(vksc_server::VulkanCommandMemoryConsumption(pCommandPool->getInternal()));
914 }
915
allocateCommandBuffers(VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers) const916 void ResourceInterfaceStandard::allocateCommandBuffers (VkDevice device,
917 const VkCommandBufferAllocateInfo* pAllocateInfo,
918 VkCommandBuffer* pCommandBuffers) const
919 {
920 DE_UNREF(device);
921 for (deUint32 i = 0; i < pAllocateInfo->commandBufferCount; ++i)
922 {
923 m_commandBufferMemoryConsumption.insert({ pCommandBuffers[i], vksc_server::VulkanCommandMemoryConsumption(pAllocateInfo->commandPool.getInternal()) });
924 }
925 }
926
increaseCommandBufferSize(VkCommandBuffer commandBuffer,VkDeviceSize commandSize) const927 void ResourceInterfaceStandard::increaseCommandBufferSize (VkCommandBuffer commandBuffer,
928 VkDeviceSize commandSize) const
929 {
930 auto it = m_commandBufferMemoryConsumption.find(commandBuffer);
931 if (it == end(m_commandBufferMemoryConsumption))
932 TCU_THROW(InternalError, "Unregistered command buffer");
933
934 it->second.updateValues(commandSize, commandSize, commandSize);
935 }
936
resetCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags) const937 void ResourceInterfaceStandard::resetCommandPool (VkDevice device,
938 VkCommandPool commandPool,
939 VkCommandPoolResetFlags flags) const
940 {
941 DE_UNREF(device);
942 DE_UNREF(flags);
943
944 for (auto& memC : m_commandBufferMemoryConsumption)
945 {
946 if (memC.second.commandPool == commandPool.getInternal())
947 memC.second.resetValues();
948 }
949 }
950
importPipelineCacheData(const PlatformInterface & vkp,VkInstance instance,const InstanceInterface & vki,VkPhysicalDevice physicalDevice,deUint32 queueIndex)951 void ResourceInterfaceStandard::importPipelineCacheData (const PlatformInterface& vkp,
952 VkInstance instance,
953 const InstanceInterface& vki,
954 VkPhysicalDevice physicalDevice,
955 deUint32 queueIndex)
956 {
957 if(!std::string(m_testCtx.getCommandLine().getPipelineCompilerPath()).empty())
958 {
959 m_cacheData = vksc_server::buildOfflinePipelineCache(m_pipelineInput,
960 std::string( m_testCtx.getCommandLine().getPipelineCompilerPath()),
961 std::string( m_testCtx.getCommandLine().getPipelineCompilerDataDir()),
962 std::string( m_testCtx.getCommandLine().getPipelineCompilerArgs()),
963 std::string( m_testCtx.getCommandLine().getPipelineCompilerOutputFile()),
964 std::string( m_testCtx.getCommandLine().getPipelineCompilerLogFile()),
965 std::string( m_testCtx.getCommandLine().getPipelineCompilerFilePrefix()) );
966 }
967 else
968 {
969 m_cacheData = vksc_server::buildPipelineCache(m_pipelineInput, vkp, instance, vki, physicalDevice, queueIndex);
970 }
971
972 VkPhysicalDeviceVulkanSC10Properties vulkanSC10Properties = initVulkanStructure();
973 VkPhysicalDeviceProperties2 deviceProperties2 = initVulkanStructure(&vulkanSC10Properties);
974 vki.getPhysicalDeviceProperties2(physicalDevice, &deviceProperties2);
975
976 m_pipelineSizes = vksc_server::extractSizesFromPipelineCache( m_pipelineInput, m_cacheData, deUint32(m_testCtx.getCommandLine().getPipelineDefaultSize()), vulkanSC10Properties.recyclePipelineMemory == VK_TRUE);
977 preparePipelinePoolSizes();
978 }
979
resetObjects()980 void ResourceInterfaceStandard::resetObjects ()
981 {
982 m_pipelineInput = {};
983 m_objectHashes.clear();
984 m_commandPoolMemoryConsumption.clear();
985 m_commandPoolIndex = 0u;
986 m_commandBufferMemoryConsumption.clear();
987 m_resourceCounter = 0u;
988 m_statCurrent = resetDeviceObjectReservationCreateInfo();
989 m_statMax = resetDeviceObjectReservationCreateInfo();
990 // pipelineCacheRequestCount does not contain one instance of createPipelineCache call that happens only in subprocess
991 m_statCurrent.pipelineCacheRequestCount = 1u;
992 m_statMax.pipelineCacheRequestCount = 1u;
993 m_cacheData.clear();
994 m_pipelineIdentifiers.clear();
995 m_pipelineSizes.clear();
996 m_pipelinePoolSizes.clear();
997 runGarbageCollection();
998 }
999
resetPipelineCaches()1000 void ResourceInterfaceStandard::resetPipelineCaches ()
1001 {
1002 if (m_testCtx.getCommandLine().isSubProcess())
1003 {
1004 m_pipelineCache.clear();
1005 }
1006 }
1007
1008 #endif // CTS_USES_VULKANSC
1009
compileProgram(const vk::ProgramIdentifier & progId,const vk::GlslSource & source,glu::ShaderProgramInfo * buildInfo,const tcu::CommandLine & commandLine)1010 vk::ProgramBinary* ResourceInterfaceStandard::compileProgram (const vk::ProgramIdentifier& progId,
1011 const vk::GlslSource& source,
1012 glu::ShaderProgramInfo* buildInfo,
1013 const tcu::CommandLine& commandLine)
1014 {
1015 DE_UNREF(progId);
1016 return vk::buildProgram(source, buildInfo, commandLine);
1017 }
1018
compileProgram(const vk::ProgramIdentifier & progId,const vk::HlslSource & source,glu::ShaderProgramInfo * buildInfo,const tcu::CommandLine & commandLine)1019 vk::ProgramBinary* ResourceInterfaceStandard::compileProgram (const vk::ProgramIdentifier& progId,
1020 const vk::HlslSource& source,
1021 glu::ShaderProgramInfo* buildInfo,
1022 const tcu::CommandLine& commandLine)
1023 {
1024 DE_UNREF(progId);
1025 return vk::buildProgram(source, buildInfo, commandLine);
1026 }
1027
compileProgram(const vk::ProgramIdentifier & progId,const vk::SpirVAsmSource & source,vk::SpirVProgramInfo * buildInfo,const tcu::CommandLine & commandLine)1028 vk::ProgramBinary* ResourceInterfaceStandard::compileProgram (const vk::ProgramIdentifier& progId,
1029 const vk::SpirVAsmSource& source,
1030 vk::SpirVProgramInfo* buildInfo,
1031 const tcu::CommandLine& commandLine)
1032 {
1033 DE_UNREF(progId);
1034 return vk::assembleProgram(source, buildInfo, commandLine);
1035 }
1036
1037 #ifdef CTS_USES_VULKANSC
1038
ResourceInterfaceVKSC(tcu::TestContext & testCtx)1039 ResourceInterfaceVKSC::ResourceInterfaceVKSC (tcu::TestContext& testCtx)
1040 : ResourceInterfaceStandard(testCtx)
1041 {
1042 m_address = std::string(testCtx.getCommandLine().getServerAddress());
1043 }
1044
getServer()1045 vksc_server::Server* ResourceInterfaceVKSC::getServer ()
1046 {
1047 if (!m_server)
1048 {
1049 m_server = std::make_shared<vksc_server::Server>(m_address);
1050 }
1051 return m_server.get();
1052 }
1053
noServer() const1054 bool ResourceInterfaceVKSC::noServer () const
1055 {
1056 return m_address.empty();
1057 }
1058
compileProgram(const vk::ProgramIdentifier & progId,const vk::GlslSource & source,glu::ShaderProgramInfo * buildInfo,const tcu::CommandLine & commandLine)1059 vk::ProgramBinary* ResourceInterfaceVKSC::compileProgram (const vk::ProgramIdentifier& progId,
1060 const vk::GlslSource& source,
1061 glu::ShaderProgramInfo* buildInfo,
1062 const tcu::CommandLine& commandLine)
1063 {
1064 if (noServer()) return ResourceInterfaceStandard::compileProgram(progId, source, buildInfo, commandLine);
1065
1066 DE_UNREF(progId);
1067 DE_UNREF(buildInfo);
1068
1069 vksc_server::CompileShaderRequest request;
1070 request.source.active = "glsl";
1071 request.source.glsl = source;
1072 request.commandLine = commandLine.getInitialCmdLine();
1073 vksc_server::CompileShaderResponse response;
1074 getServer()->SendRequest(request, response);
1075
1076 return new ProgramBinary(PROGRAM_FORMAT_SPIRV, response.binary.size(), response.binary.data());
1077 }
1078
compileProgram(const vk::ProgramIdentifier & progId,const vk::HlslSource & source,glu::ShaderProgramInfo * buildInfo,const tcu::CommandLine & commandLine)1079 vk::ProgramBinary* ResourceInterfaceVKSC::compileProgram (const vk::ProgramIdentifier& progId,
1080 const vk::HlslSource& source,
1081 glu::ShaderProgramInfo* buildInfo,
1082 const tcu::CommandLine& commandLine)
1083 {
1084 if (noServer()) return ResourceInterfaceStandard::compileProgram(progId, source, buildInfo, commandLine);
1085
1086 DE_UNREF(progId);
1087 DE_UNREF(buildInfo);
1088
1089 vksc_server::CompileShaderRequest request;
1090 request.source.active = "hlsl";
1091 request.source.hlsl = source;
1092 request.commandLine = commandLine.getInitialCmdLine();
1093 vksc_server::CompileShaderResponse response;
1094 getServer()->SendRequest(request, response);
1095
1096 return new ProgramBinary(PROGRAM_FORMAT_SPIRV, response.binary.size(), response.binary.data());
1097 }
1098
compileProgram(const vk::ProgramIdentifier & progId,const vk::SpirVAsmSource & source,vk::SpirVProgramInfo * buildInfo,const tcu::CommandLine & commandLine)1099 vk::ProgramBinary* ResourceInterfaceVKSC::compileProgram (const vk::ProgramIdentifier& progId,
1100 const vk::SpirVAsmSource& source,
1101 vk::SpirVProgramInfo* buildInfo,
1102 const tcu::CommandLine& commandLine)
1103 {
1104 if (noServer()) return ResourceInterfaceStandard::compileProgram(progId, source, buildInfo, commandLine);
1105
1106 DE_UNREF(progId);
1107 DE_UNREF(buildInfo);
1108
1109 vksc_server::CompileShaderRequest request;
1110 request.source.active = "spirv";
1111 request.source.spirv = source;
1112 request.commandLine = commandLine.getInitialCmdLine();
1113 vksc_server::CompileShaderResponse response;
1114 getServer()->SendRequest(request, response);
1115
1116 return new ProgramBinary(PROGRAM_FORMAT_SPIRV, response.binary.size(), response.binary.data());
1117 }
1118
createShaderModule(VkDevice device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule,bool normalMode) const1119 VkResult ResourceInterfaceVKSC::createShaderModule (VkDevice device,
1120 const VkShaderModuleCreateInfo* pCreateInfo,
1121 const VkAllocationCallbacks* pAllocator,
1122 VkShaderModule* pShaderModule,
1123 bool normalMode) const
1124 {
1125 if (noServer() || !normalMode || !isVulkanSC())
1126 return ResourceInterfaceStandard::createShaderModule(device, pCreateInfo, pAllocator, pShaderModule, normalMode);
1127
1128 // We will reach this place only in one case:
1129 // - server exists
1130 // - subprocess asks for creation of VkShaderModule which will be later ignored, because it will receive the whole pipeline from server
1131 // ( Are there any tests which receive VkShaderModule and do not use it in any pipeline ? )
1132 *pShaderModule = VkShaderModule(++m_resourceCounter);
1133 registerObjectHash(pShaderModule->getInternal(), calculateShaderModuleHash(*pCreateInfo, getObjectHashes()));
1134 return VK_SUCCESS;
1135 }
1136
1137
importPipelineCacheData(const PlatformInterface & vkp,VkInstance instance,const InstanceInterface & vki,VkPhysicalDevice physicalDevice,deUint32 queueIndex)1138 void ResourceInterfaceVKSC::importPipelineCacheData (const PlatformInterface& vkp,
1139 VkInstance instance,
1140 const InstanceInterface& vki,
1141 VkPhysicalDevice physicalDevice,
1142 deUint32 queueIndex)
1143 {
1144 if (noServer())
1145 {
1146 ResourceInterfaceStandard::importPipelineCacheData(vkp, instance, vki, physicalDevice, queueIndex);
1147 return;
1148 }
1149
1150 vksc_server::CreateCacheRequest request;
1151 request.input = m_pipelineInput;
1152 std::vector<int> caseFraction = m_testCtx.getCommandLine().getCaseFraction();
1153 request.caseFraction = caseFraction.empty() ? -1 : caseFraction[0];
1154
1155 vksc_server::CreateCacheResponse response;
1156 getServer()->SendRequest(request, response);
1157
1158 if (response.status)
1159 {
1160 m_cacheData = std::move(response.binary);
1161
1162 VkPhysicalDeviceVulkanSC10Properties vulkanSC10Properties = initVulkanStructure();
1163 VkPhysicalDeviceProperties2 deviceProperties2 = initVulkanStructure(&vulkanSC10Properties);
1164 vki.getPhysicalDeviceProperties2(physicalDevice, &deviceProperties2);
1165
1166 m_pipelineSizes = vksc_server::extractSizesFromPipelineCache( m_pipelineInput, m_cacheData, deUint32(m_testCtx.getCommandLine().getPipelineDefaultSize()), vulkanSC10Properties.recyclePipelineMemory == VK_TRUE);
1167 preparePipelinePoolSizes();
1168 }
1169 else { TCU_THROW(InternalError, "Server did not return pipeline cache data when requested (check server log for details)"); }
1170 }
1171
MultithreadedDestroyGuard(de::SharedPtr<vk::ResourceInterface> resourceInterface)1172 MultithreadedDestroyGuard::MultithreadedDestroyGuard (de::SharedPtr<vk::ResourceInterface> resourceInterface)
1173 : m_resourceInterface{ resourceInterface }
1174 {
1175 if (m_resourceInterface.get() != DE_NULL)
1176 m_resourceInterface->setHandleDestroy(false);
1177 }
1178
~MultithreadedDestroyGuard()1179 MultithreadedDestroyGuard::~MultithreadedDestroyGuard ()
1180 {
1181 if (m_resourceInterface.get() != DE_NULL)
1182 m_resourceInterface->setHandleDestroy(true);
1183 }
1184
1185 #endif // CTS_USES_VULKANSC
1186
1187
1188 } // namespace vk
1189