1 // Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "VkBuffer.hpp"
16
17 #include "VkConfig.hpp"
18 #include "VkDeviceMemory.hpp"
19
20 #include <cstring>
21 #include <limits>
22
23 namespace vk {
24
Buffer(const VkBufferCreateInfo * pCreateInfo,void * mem)25 Buffer::Buffer(const VkBufferCreateInfo *pCreateInfo, void *mem)
26 : flags(pCreateInfo->flags)
27 , size(pCreateInfo->size)
28 , usage(pCreateInfo->usage)
29 , sharingMode(pCreateInfo->sharingMode)
30 {
31 if(pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT)
32 {
33 queueFamilyIndexCount = pCreateInfo->queueFamilyIndexCount;
34 queueFamilyIndices = reinterpret_cast<uint32_t *>(mem);
35 memcpy(queueFamilyIndices, pCreateInfo->pQueueFamilyIndices, sizeof(uint32_t) * queueFamilyIndexCount);
36 }
37
38 const auto *nextInfo = reinterpret_cast<const VkBaseInStructure *>(pCreateInfo->pNext);
39 for(; nextInfo != nullptr; nextInfo = nextInfo->pNext)
40 {
41 if(nextInfo->sType == VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO)
42 {
43 const auto *externalInfo = reinterpret_cast<const VkExternalMemoryBufferCreateInfo *>(nextInfo);
44 supportedExternalMemoryHandleTypes = externalInfo->handleTypes;
45 }
46 }
47 }
48
destroy(const VkAllocationCallbacks * pAllocator)49 void Buffer::destroy(const VkAllocationCallbacks *pAllocator)
50 {
51 vk::freeHostMemory(queueFamilyIndices, pAllocator);
52 }
53
ComputeRequiredAllocationSize(const VkBufferCreateInfo * pCreateInfo)54 size_t Buffer::ComputeRequiredAllocationSize(const VkBufferCreateInfo *pCreateInfo)
55 {
56 return (pCreateInfo->sharingMode == VK_SHARING_MODE_CONCURRENT) ? sizeof(uint32_t) * pCreateInfo->queueFamilyIndexCount : 0;
57 }
58
getMemoryRequirements() const59 const VkMemoryRequirements Buffer::getMemoryRequirements() const
60 {
61 VkMemoryRequirements memoryRequirements = {};
62
63 // Add 15 bytes of padding to ensure that any type of attribute within the
64 // buffer can be read using 16-bit accesses.
65 // TODO(b/196822081): Also reserve space for a header containing the size of the buffer (for robust buffer access)
66 memoryRequirements.size = this->size + 15;
67
68 if(memoryRequirements.size < this->size) // Overflow occurred
69 {
70 memoryRequirements.size = std::numeric_limits<VkDeviceSize>::max();
71 }
72
73 if(usage & (VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT))
74 {
75 memoryRequirements.alignment = vk::MIN_TEXEL_BUFFER_OFFSET_ALIGNMENT;
76 }
77 else if(usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT)
78 {
79 memoryRequirements.alignment = vk::MIN_STORAGE_BUFFER_OFFSET_ALIGNMENT;
80 }
81 else if(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)
82 {
83 memoryRequirements.alignment = vk::MIN_UNIFORM_BUFFER_OFFSET_ALIGNMENT;
84 }
85 else
86 {
87 memoryRequirements.alignment = REQUIRED_MEMORY_ALIGNMENT;
88 }
89
90 memoryRequirements.memoryTypeBits = vk::MEMORY_TYPE_GENERIC_BIT;
91
92 return memoryRequirements;
93 }
94
canBindToMemory(DeviceMemory * pDeviceMemory) const95 bool Buffer::canBindToMemory(DeviceMemory *pDeviceMemory) const
96 {
97 return pDeviceMemory->checkExternalMemoryHandleType(supportedExternalMemoryHandleTypes);
98 }
99
bind(DeviceMemory * pDeviceMemory,VkDeviceSize pMemoryOffset)100 void Buffer::bind(DeviceMemory *pDeviceMemory, VkDeviceSize pMemoryOffset)
101 {
102 memory = pDeviceMemory->getOffsetPointer(pMemoryOffset);
103 }
104
copyFrom(const void * srcMemory,VkDeviceSize pSize,VkDeviceSize pOffset)105 void Buffer::copyFrom(const void *srcMemory, VkDeviceSize pSize, VkDeviceSize pOffset)
106 {
107 ASSERT((pSize + pOffset) <= size);
108
109 memcpy(getOffsetPointer(pOffset), srcMemory, pSize);
110 }
111
copyTo(void * dstMemory,VkDeviceSize pSize,VkDeviceSize pOffset) const112 void Buffer::copyTo(void *dstMemory, VkDeviceSize pSize, VkDeviceSize pOffset) const
113 {
114 ASSERT((pSize + pOffset) <= size);
115
116 memcpy(dstMemory, getOffsetPointer(pOffset), pSize);
117 }
118
copyTo(Buffer * dstBuffer,const VkBufferCopy2KHR & pRegion) const119 void Buffer::copyTo(Buffer *dstBuffer, const VkBufferCopy2KHR &pRegion) const
120 {
121 copyTo(dstBuffer->getOffsetPointer(pRegion.dstOffset), pRegion.size, pRegion.srcOffset);
122 }
123
fill(VkDeviceSize dstOffset,VkDeviceSize fillSize,uint32_t data)124 void Buffer::fill(VkDeviceSize dstOffset, VkDeviceSize fillSize, uint32_t data)
125 {
126 size_t bytes = (fillSize == VK_WHOLE_SIZE) ? (size - dstOffset) : fillSize;
127
128 ASSERT((bytes + dstOffset) <= size);
129
130 uint32_t *memToWrite = static_cast<uint32_t *>(getOffsetPointer(dstOffset));
131
132 // Vulkan 1.1 spec: "If VK_WHOLE_SIZE is used and the remaining size of the buffer is
133 // not a multiple of 4, then the nearest smaller multiple is used."
134 for(; bytes >= 4; bytes -= 4, memToWrite++)
135 {
136 *memToWrite = data;
137 }
138 }
139
update(VkDeviceSize dstOffset,VkDeviceSize dataSize,const void * pData)140 void Buffer::update(VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData)
141 {
142 ASSERT((dataSize + dstOffset) <= size);
143
144 memcpy(getOffsetPointer(dstOffset), pData, dataSize);
145 }
146
getOffsetPointer(VkDeviceSize offset) const147 void *Buffer::getOffsetPointer(VkDeviceSize offset) const
148 {
149 return reinterpret_cast<uint8_t *>(memory) + offset;
150 }
151
end() const152 uint8_t *Buffer::end() const
153 {
154 return reinterpret_cast<uint8_t *>(getOffsetPointer(size + 1));
155 }
156
157 } // namespace vk
158