1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "VkBuffer.hpp"
16
17 #include "VkConfig.hpp"
18 #include "VkDeviceMemory.hpp"
19
20 #include <cstring>
21 #include <limits>
22
23 namespace vk {
24
Buffer(const VkBufferCreateInfo * pCreateInfo,void * mem)25 Buffer::Buffer(const VkBufferCreateInfo *pCreateInfo, void *mem)
26 : flags(pCreateInfo->flags)
27 , size(pCreateInfo->size)
28 , usage(pCreateInfo->usage)
29 , sharingMode(pCreateInfo->sharingMode)
30 {
31 if(pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT)
32 {
33 queueFamilyIndexCount = pCreateInfo->queueFamilyIndexCount;
34 queueFamilyIndices = reinterpret_cast<uint32_t *>(mem);
35 memcpy(queueFamilyIndices, pCreateInfo->pQueueFamilyIndices, sizeof(uint32_t) * queueFamilyIndexCount);
36 }
37
38 const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
39 for(; nextInfo != nullptr; nextInfo = nextInfo->pNext)
40 {
41 if(nextInfo->sType == VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO)
42 {
43 const auto *externalInfo = reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>(nextInfo);
44 supportedExternalMemoryHandleTypes = externalInfo->handleTypes;
45 }
46 }
47 }
48
destroy(const VkAllocationCallbacks * pAllocator)49 void Buffer::destroy(const VkAllocationCallbacks *pAllocator)
50 {
51 vk::freeHostMemory(queueFamilyIndices, pAllocator);
52 }
53
ComputeRequiredAllocationSize(const VkBufferCreateInfo * pCreateInfo)54 size_t Buffer::ComputeRequiredAllocationSize(const VkBufferCreateInfo *pCreateInfo)
55 {
56 return (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) ? sizeof(uint32_t) * pCreateInfo->queueFamilyIndexCount : 0;
57 }
58
GetMemoryRequirements(VkDeviceSize size,VkBufferUsageFlags usage)59 const VkMemoryRequirements Buffer::GetMemoryRequirements(VkDeviceSize size, VkBufferUsageFlags usage)
60 {
61 VkMemoryRequirements memoryRequirements = {};
62
63 memoryRequirements.size = size;
64
65 if(usage & (VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT))
66 {
67 memoryRequirements.alignment = vk::MIN_TEXEL_BUFFER_OFFSET_ALIGNMENT;
68 }
69 else if(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)
70 {
71 memoryRequirements.alignment = vk::MIN_STORAGE_BUFFER_OFFSET_ALIGNMENT;
72 }
73 else if(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)
74 {
75 memoryRequirements.alignment = vk::MIN_UNIFORM_BUFFER_OFFSET_ALIGNMENT;
76 }
77 else
78 {
79 memoryRequirements.alignment = REQUIRED_MEMORY_ALIGNMENT;
80 }
81
82 memoryRequirements.memoryTypeBits = vk::MEMORY_TYPE_GENERIC_BIT;
83
84 return memoryRequirements;
85 }
86
getMemoryRequirements() const87 const VkMemoryRequirements Buffer::getMemoryRequirements() const
88 {
89 return GetMemoryRequirements(size, usage);
90 }
91
canBindToMemory(DeviceMemory * pDeviceMemory) const92 bool Buffer::canBindToMemory(DeviceMemory *pDeviceMemory) const
93 {
94 return pDeviceMemory->checkExternalMemoryHandleType(supportedExternalMemoryHandleTypes);
95 }
96
bind(DeviceMemory * pDeviceMemory,VkDeviceSize pMemoryOffset)97 void Buffer::bind(DeviceMemory *pDeviceMemory, VkDeviceSize pMemoryOffset)
98 {
99 memory = pDeviceMemory->getOffsetPointer(pMemoryOffset);
100 }
101
copyFrom(const void * srcMemory,VkDeviceSize pSize,VkDeviceSize pOffset)102 void Buffer::copyFrom(const void *srcMemory, VkDeviceSize pSize, VkDeviceSize pOffset)
103 {
104 ASSERT((pSize + pOffset) <= size);
105
106 memcpy(getOffsetPointer(pOffset), srcMemory, pSize);
107 }
108
copyTo(void * dstMemory,VkDeviceSize pSize,VkDeviceSize pOffset) const109 void Buffer::copyTo(void *dstMemory, VkDeviceSize pSize, VkDeviceSize pOffset) const
110 {
111 ASSERT((pSize + pOffset) <= size);
112
113 memcpy(dstMemory, getOffsetPointer(pOffset), pSize);
114 }
115
copyTo(Buffer * dstBuffer,const VkBufferCopy2KHR & pRegion) const116 void Buffer::copyTo(Buffer *dstBuffer, const VkBufferCopy2KHR &pRegion) const
117 {
118 copyTo(dstBuffer->getOffsetPointer(pRegion.dstOffset), pRegion.size, pRegion.srcOffset);
119 }
120
fill(VkDeviceSize dstOffset,VkDeviceSize fillSize,uint32_t data)121 void Buffer::fill(VkDeviceSize dstOffset, VkDeviceSize fillSize, uint32_t data)
122 {
123 size_t bytes = (fillSize == VK_WHOLE_SIZE) ? (size - dstOffset) : fillSize;
124
125 ASSERT((bytes + dstOffset) <= size);
126
127 uint32_t *memToWrite = static_cast<uint32_t *>(getOffsetPointer(dstOffset));
128
129 // Vulkan 1.1 spec: "If VK_WHOLE_SIZE is used and the remaining size of the buffer is
130 // not a multiple of 4, then the nearest smaller multiple is used."
131 for(; bytes >= 4; bytes -= 4, memToWrite++)
132 {
133 *memToWrite = data;
134 }
135 }
136
update(VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)137 void Buffer::update(VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
138 {
139 ASSERT((dataSize + dstOffset) <= size);
140
141 memcpy(getOffsetPointer(dstOffset), pData, dataSize);
142 }
143
getOffsetPointer(VkDeviceSize offset) const144 void *Buffer::getOffsetPointer(VkDeviceSize offset) const
145 {
146 return reinterpret_cast<uint8_t *>(memory) + offset;
147 }
148
end() const149 uint8_t *Buffer::end() const
150 {
151 return reinterpret_cast<uint8_t *>(getOffsetPointer(size + 1));
152 }
153
154 } // namespace vk
155