• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 * Copyright (C) 2021 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "CommandBufferStagingStream.h"
17 
18 #if PLATFORM_SDK_VERSION < 26
19 #include <cutils/log.h>
20 #else
21 #include <log/log.h>
22 #endif
23 #include <cutils/properties.h>
24 #include <errno.h>
25 #include <stdio.h>
26 #include <stdlib.h>
27 #include <string.h>
28 #include <unistd.h>
29 
30 #include <atomic>
31 #include <vector>
32 
33 static const size_t kReadSize = 512 * 1024;
34 static const size_t kWriteOffset = kReadSize;
35 
36 namespace gfxstream {
37 namespace vk {
38 
CommandBufferStagingStream()39 CommandBufferStagingStream::CommandBufferStagingStream()
40     : IOStream(1048576), m_size(0), m_writePos(0) {
41     // use default allocators
42     m_alloc = [](size_t size) -> Memory {
43         return {
44             .deviceMemory = VK_NULL_HANDLE,  // no device memory for malloc
45             .ptr = malloc(size),
46         };
47     };
48     m_free = [](const Memory& mem) { free(mem.ptr); };
49     m_realloc = [](const Memory& mem, size_t size) -> Memory {
50         return {.deviceMemory = VK_NULL_HANDLE, .ptr = realloc(mem.ptr, size)};
51     };
52 }
53 
CommandBufferStagingStream(const Alloc & allocFn,const Free & freeFn)54 CommandBufferStagingStream::CommandBufferStagingStream(const Alloc& allocFn, const Free& freeFn)
55     : CommandBufferStagingStream() {
56     m_usingCustomAlloc = true;
57     // for custom allocation, allocate metadata memory at the beginning.
58     // m_alloc, m_free and m_realloc wraps sync data logic
59 
60     // \param size to allocate
61     // \return ptr starting at data
62     m_alloc = [&allocFn](size_t size) -> Memory {
63         // allocation requested size + sync data size
64 
65         // <---sync bytes--><----Data--->
66         // |———————————————|————————————|
67         // |0|1|2|3|4|5|6|7|............|
68         // |———————————————|————————————|
69         // ꜛ               ꜛ
70         // allocated ptr   ptr to data [dataPtr]
71 
72         Memory memory;
73         if (!allocFn) {
74             ALOGE("Custom allocation (%zu bytes) failed\n", size);
75             return memory;
76         }
77 
78         // custom allocation/free requires metadata for sync between host/guest
79         const size_t totalSize = size + kSyncDataSize;
80         memory = allocFn(totalSize);
81         if (!memory.ptr) {
82             ALOGE("Custom allocation (%zu bytes) failed\n", size);
83             return memory;
84         }
85 
86         // set sync data to read complete
87         uint32_t* syncDWordPtr = reinterpret_cast<uint32_t*>(memory.ptr);
88         __atomic_store_n(syncDWordPtr, kSyncDataReadComplete, __ATOMIC_RELEASE);
89         return memory;
90     };
91 
92     m_free = [&freeFn](const Memory& mem) {
93         if (!freeFn) {
94             ALOGE("Custom free for memory(%p) failed\n", mem.ptr);
95             return;
96         }
97         freeFn(mem);
98     };
99 
100     // \param ptr is the data pointer currently allocated
101     // \return dataPtr starting at data
102     m_realloc = [this](const Memory& mem, size_t size) -> Memory {
103         // realloc requires freeing previously allocated memory
104         // read sync DWORD to ensure host is done reading this memory
105         // before releasing it.
106 
107         size_t hostWaits = 0;
108 
109         uint32_t* syncDWordPtr = reinterpret_cast<uint32_t*>(mem.ptr);
110         while (__atomic_load_n(syncDWordPtr, __ATOMIC_ACQUIRE) != kSyncDataReadComplete) {
111             hostWaits++;
112             usleep(10);
113             if (hostWaits > 1000) {
114                 ALOGD("%s: warning, stalled on host decoding on this command buffer stream\n",
115                       __func__);
116             }
117         }
118 
119         // for custom allocation/free, memory holding metadata must be copied
120         // along with stream data
121         // <---sync bytes--><----Data--->
122         // |———————————————|————————————|
123         // |0|1|2|3|4|5|6|7|............|
124         // |———————————————|————————————|
125         // ꜛ               ꜛ
126         // [copyLocation]  ptr to data [ptr]
127 
128         const size_t toCopySize = m_writePos + kSyncDataSize;
129         unsigned char* copyLocation = static_cast<unsigned char*>(mem.ptr);
130         std::vector<uint8_t> tmp(copyLocation, copyLocation + toCopySize);
131         m_free(mem);
132 
133         // get new buffer and copy previous stream data to it
134         Memory newMemory = m_alloc(size);
135         unsigned char* newBuf = static_cast<unsigned char*>(newMemory.ptr);
136         if (!newBuf) {
137             ALOGE("Custom allocation (%zu bytes) failed\n", size);
138             return newMemory;
139         }
140         // copy previous data
141         memcpy(newBuf, tmp.data(), toCopySize);
142 
143         return newMemory;
144     };
145 }
146 
~CommandBufferStagingStream()147 CommandBufferStagingStream::~CommandBufferStagingStream() {
148     flush();
149     if (m_mem.ptr) m_free(m_mem);
150 }
151 
getDataPtr()152 unsigned char* CommandBufferStagingStream::getDataPtr() {
153     if (!m_mem.ptr) return nullptr;
154     const size_t metadataSize = m_usingCustomAlloc ? kSyncDataSize : 0;
155     return static_cast<unsigned char*>(m_mem.ptr) + metadataSize;
156 }
157 
markFlushing()158 void CommandBufferStagingStream::markFlushing() {
159     if (!m_usingCustomAlloc) {
160         return;
161     }
162     uint32_t* syncDWordPtr = reinterpret_cast<uint32_t*>(m_mem.ptr);
163     __atomic_store_n(syncDWordPtr, kSyncDataReadPending, __ATOMIC_RELEASE);
164 }
165 
idealAllocSize(size_t len)166 size_t CommandBufferStagingStream::idealAllocSize(size_t len) {
167     if (len > 1048576) return len;
168     return 1048576;
169 }
170 
allocBuffer(size_t minSize)171 void* CommandBufferStagingStream::allocBuffer(size_t minSize) {
172     size_t allocSize = (1048576 < minSize ? minSize : 1048576);
173     // Initial case: blank
174     if (!m_mem.ptr) {
175         m_mem = m_alloc(allocSize);
176         m_size = allocSize;
177         return getDataPtr();
178     }
179 
180     // Calculate remaining
181     size_t remaining = m_size - m_writePos;
182     // check if there is at least minSize bytes left in buffer
183     // if not, reallocate a buffer of big enough size
184     if (remaining < minSize) {
185         size_t newAllocSize = m_size * 2 + allocSize;
186         m_mem = m_realloc(m_mem, newAllocSize);
187         m_size = newAllocSize;
188 
189         return (void*)(getDataPtr() + m_writePos);
190     }
191 
192     // for custom allocations, host should have finished reading
193     // data from command buffer since command buffers are flushed
194     // on queue submit.
195     // allocBuffer should not be called on command buffers that are currently
196     // being read by the host
197     if (m_usingCustomAlloc) {
198         uint32_t* syncDWordPtr = reinterpret_cast<uint32_t*>(m_mem.ptr);
199         LOG_ALWAYS_FATAL_IF(
200             __atomic_load_n(syncDWordPtr, __ATOMIC_ACQUIRE) != kSyncDataReadComplete,
201             "FATAL: allocBuffer() called but previous read not complete");
202     }
203 
204     return (void*)(getDataPtr() + m_writePos);
205 }
206 
commitBuffer(size_t size)207 int CommandBufferStagingStream::commitBuffer(size_t size)
208 {
209     m_writePos += size;
210     return 0;
211 }
212 
readFully(void *,size_t)213 const unsigned char *CommandBufferStagingStream::readFully(void*, size_t) {
214     // Not supported
215     ALOGE("CommandBufferStagingStream::%s: Fatal: not supported\n", __func__);
216     abort();
217     return nullptr;
218 }
219 
read(void *,size_t *)220 const unsigned char *CommandBufferStagingStream::read(void*, size_t*) {
221     // Not supported
222     ALOGE("CommandBufferStagingStream::%s: Fatal: not supported\n", __func__);
223     abort();
224     return nullptr;
225 }
226 
writeFully(const void *,size_t)227 int CommandBufferStagingStream::writeFully(const void*, size_t)
228 {
229     // Not supported
230     ALOGE("CommandBufferStagingStream::%s: Fatal: not supported\n", __func__);
231     abort();
232     return 0;
233 }
234 
commitBufferAndReadFully(size_t,void *,size_t)235 const unsigned char *CommandBufferStagingStream::commitBufferAndReadFully(
236     size_t, void *, size_t) {
237 
238     // Not supported
239     ALOGE("CommandBufferStagingStream::%s: Fatal: not supported\n", __func__);
240     abort();
241     return nullptr;
242 }
243 
getWritten(unsigned char ** bufOut,size_t * sizeOut)244 void CommandBufferStagingStream::getWritten(unsigned char** bufOut, size_t* sizeOut) {
245     *bufOut = getDataPtr();
246     *sizeOut = m_writePos;
247 }
248 
reset()249 void CommandBufferStagingStream::reset() {
250     m_writePos = 0;
251     IOStream::rewind();
252 }
253 
getDeviceMemory()254 VkDeviceMemory CommandBufferStagingStream::getDeviceMemory() { return m_mem.deviceMemory; }
255 
256 }  // namespace vk
257 }  // namespace gfxstream
258