1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include "HostConnection.h"
17
18 #include "cutils/properties.h"
19
20 #ifdef HOST_BUILD
21 #include "android/base/Tracing.h"
22 #endif
23
24 #ifdef GOLDFISH_NO_GL
25 struct gl_client_context_t {
26 int placeholder;
27 };
28 class GLEncoder : public gl_client_context_t {
29 public:
GLEncoder(IOStream *,ChecksumCalculator *)30 GLEncoder(IOStream*, ChecksumCalculator*) { }
setContextAccessor(gl_client_context_t * ())31 void setContextAccessor(gl_client_context_t *()) { }
32 };
33 struct gl2_client_context_t {
34 int placeholder;
35 };
36 class GL2Encoder : public gl2_client_context_t {
37 public:
GL2Encoder(IOStream *,ChecksumCalculator *)38 GL2Encoder(IOStream*, ChecksumCalculator*) { }
setContextAccessor(gl2_client_context_t * ())39 void setContextAccessor(gl2_client_context_t *()) { }
setNoHostError(bool)40 void setNoHostError(bool) { }
setDrawCallFlushInterval(uint32_t)41 void setDrawCallFlushInterval(uint32_t) { }
setHasAsyncUnmapBuffer(int)42 void setHasAsyncUnmapBuffer(int) { }
setHasSyncBufferData(int)43 void setHasSyncBufferData(int) { }
44 };
45 #else
46 #include "GLEncoder.h"
47 #include "GL2Encoder.h"
48 #endif
49
50 #ifdef GFXSTREAM
51 #include "VkEncoder.h"
52 #include "AddressSpaceStream.h"
53 #else
54 namespace goldfish_vk {
55 struct VkEncoder {
VkEncodergoldfish_vk::VkEncoder56 VkEncoder(IOStream*) { }
decRefgoldfish_vk::VkEncoder57 void decRef() { }
58 int placeholder;
59 };
60 } // namespace goldfish_vk
61 class QemuPipeStream;
62 typedef QemuPipeStream AddressSpaceStream;
createAddressSpaceStream(size_t bufSize)63 AddressSpaceStream* createAddressSpaceStream(size_t bufSize) {
64 ALOGE("%s: FATAL: Trying to create ASG stream in unsupported build\n", __func__);
65 abort();
66 }
createVirtioGpuAddressSpaceStream(size_t bufSize)67 AddressSpaceStream* createVirtioGpuAddressSpaceStream(size_t bufSize) {
68 ALOGE("%s: FATAL: Trying to create virtgpu ASG stream in unsupported build\n", __func__);
69 abort();
70 }
71 #endif
72
73 using goldfish_vk::VkEncoder;
74
75 #include "ProcessPipe.h"
76 #include "QemuPipeStream.h"
77 #include "TcpStream.h"
78 #include "ThreadInfo.h"
79 #include <gralloc_cb_bp.h>
80 #include <unistd.h>
81
82 #ifdef VIRTIO_GPU
83
84 #include "VirtioGpuStream.h"
85 #include "VirtioGpuPipeStream.h"
86
87 #include <cros_gralloc_handle.h>
88 #include <drm/virtgpu_drm.h>
89 #include <xf86drm.h>
90
91 #endif
92
93 #undef LOG_TAG
94 #define LOG_TAG "HostConnection"
95 #if PLATFORM_SDK_VERSION < 26
96 #include <cutils/log.h>
97 #else
98 #include <log/log.h>
99 #endif
100
101 #define STREAM_BUFFER_SIZE (4*1024*1024)
102 #define STREAM_PORT_NUM 22468
103
getConnectionTypeFromProperty()104 static HostConnectionType getConnectionTypeFromProperty() {
105 #ifdef __Fuchsia__
106 return HOST_CONNECTION_ADDRESS_SPACE;
107 #else
108 char transportValue[PROPERTY_VALUE_MAX] = "";
109
110 do {
111 property_get("ro.boot.qemu.gltransport.name", transportValue, "");
112 if (transportValue[0]) { break; }
113
114 property_get("ro.boot.qemu.gltransport", transportValue, "");
115 if (transportValue[0]) { break; }
116
117 property_get("ro.boot.hardware.gltransport", transportValue, "");
118 } while (false);
119
120 if (!transportValue[0]) return HOST_CONNECTION_QEMU_PIPE;
121
122 if (!strcmp("tcp", transportValue)) return HOST_CONNECTION_TCP;
123 if (!strcmp("pipe", transportValue)) return HOST_CONNECTION_QEMU_PIPE;
124 if (!strcmp("virtio-gpu", transportValue)) return HOST_CONNECTION_VIRTIO_GPU;
125 if (!strcmp("asg", transportValue)) return HOST_CONNECTION_ADDRESS_SPACE;
126 if (!strcmp("virtio-gpu-pipe", transportValue)) return HOST_CONNECTION_VIRTIO_GPU_PIPE;
127 if (!strcmp("virtio-gpu-asg", transportValue)) return HOST_CONNECTION_VIRTIO_GPU_ADDRESS_SPACE;
128
129 return HOST_CONNECTION_QEMU_PIPE;
130 #endif
131 }
132
getDrawCallFlushIntervalFromProperty()133 static uint32_t getDrawCallFlushIntervalFromProperty() {
134 constexpr uint32_t kDefaultValue = 800;
135
136 char flushValue[PROPERTY_VALUE_MAX] = "";
137 property_get("ro.boot.qemu.gltransport.drawFlushInterval", flushValue, "");
138 if (!flushValue[0]) return kDefaultValue;
139
140 const long interval = strtol(flushValue, 0, 10);
141 return (interval > 0) ? uint32_t(interval) : kDefaultValue;
142 }
143
getGrallocTypeFromProperty()144 static GrallocType getGrallocTypeFromProperty() {
145 char value[PROPERTY_VALUE_MAX] = "";
146 property_get("ro.hardware.gralloc", value, "");
147
148 if (!value[0]) return GRALLOC_TYPE_RANCHU;
149
150 if (!strcmp("ranchu", value)) return GRALLOC_TYPE_RANCHU;
151 if (!strcmp("minigbm", value)) return GRALLOC_TYPE_MINIGBM;
152 return GRALLOC_TYPE_RANCHU;
153 }
154
155 class GoldfishGralloc : public Gralloc
156 {
157 public:
createColorBuffer(ExtendedRCEncoderContext * rcEnc,int width,int height,uint32_t glformat)158 virtual uint32_t createColorBuffer(
159 ExtendedRCEncoderContext* rcEnc,
160 int width, int height, uint32_t glformat) {
161 return rcEnc->rcCreateColorBuffer(
162 rcEnc, width, height, glformat);
163 }
164
getHostHandle(native_handle_t const * handle)165 virtual uint32_t getHostHandle(native_handle_t const* handle)
166 {
167 return cb_handle_t::from(handle)->hostHandle;
168 }
169
getFormat(native_handle_t const * handle)170 virtual int getFormat(native_handle_t const* handle)
171 {
172 return cb_handle_t::from(handle)->format;
173 }
174
getAllocatedSize(native_handle_t const * handle)175 virtual size_t getAllocatedSize(native_handle_t const* handle)
176 {
177 return static_cast<size_t>(cb_handle_t::from(handle)->allocatedSize());
178 }
179 };
180
align_up(uint32_t n,uint32_t a)181 static inline uint32_t align_up(uint32_t n, uint32_t a) {
182 return ((n + a - 1) / a) * a;
183 }
184
185 #ifdef VIRTIO_GPU
186
187 class MinigbmGralloc : public Gralloc {
188 public:
createColorBuffer(ExtendedRCEncoderContext *,int width,int height,uint32_t glformat)189 virtual uint32_t createColorBuffer(
190 ExtendedRCEncoderContext*,
191 int width, int height, uint32_t glformat) {
192
193 // Only supported format for pbuffers in gfxstream
194 // should be RGBA8
195 const uint32_t kGlRGB = 0x1907;
196 const uint32_t kGlRGBA = 0x1908;
197 const uint32_t kVirglFormatRGBA = 67; // VIRGL_FORMAT_R8G8B8A8_UNORM;
198 uint32_t virtgpu_format = 0;
199 uint32_t bpp = 0;
200 switch (glformat) {
201 case kGlRGB:
202 ALOGD("Note: egl wanted GL_RGB, still using RGBA");
203 virtgpu_format = kVirglFormatRGBA;
204 bpp = 4;
205 break;
206 case kGlRGBA:
207 virtgpu_format = kVirglFormatRGBA;
208 bpp = 4;
209 break;
210 default:
211 ALOGD("Note: egl wanted 0x%x, still using RGBA", glformat);
212 virtgpu_format = kVirglFormatRGBA;
213 bpp = 4;
214 break;
215 }
216 const uint32_t kPipeTexture2D = 2; // PIPE_TEXTURE_2D
217 const uint32_t kBindRenderTarget = 1 << 1; // VIRGL_BIND_RENDER_TARGET
218 struct drm_virtgpu_resource_create res_create;
219 memset(&res_create, 0, sizeof(res_create));
220 res_create.target = kPipeTexture2D;
221 res_create.format = virtgpu_format;
222 res_create.bind = kBindRenderTarget;
223 res_create.width = width;
224 res_create.height = height;
225 res_create.depth = 1;
226 res_create.array_size = 1;
227 res_create.last_level = 0;
228 res_create.nr_samples = 0;
229 res_create.stride = bpp * width;
230 res_create.size = align_up(bpp * width * height, PAGE_SIZE);
231
232 int ret = drmIoctl(m_fd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE, &res_create);
233 if (ret) {
234 ALOGE("%s: DRM_IOCTL_VIRTGPU_RESOURCE_CREATE failed with %s (%d)\n", __func__,
235 strerror(errno), errno);
236 abort();
237 }
238
239 return res_create.res_handle;
240 }
241
getHostHandle(native_handle_t const * handle)242 virtual uint32_t getHostHandle(native_handle_t const* handle) {
243 struct drm_virtgpu_resource_info info;
244 if (!getResInfo(handle, &info)) {
245 ALOGE("%s: failed to get resource info\n", __func__);
246 return 0;
247 }
248
249 return info.res_handle;
250 }
251
getFormat(native_handle_t const * handle)252 virtual int getFormat(native_handle_t const* handle) {
253 return ((cros_gralloc_handle *)handle)->droid_format;
254 }
255
getAllocatedSize(native_handle_t const * handle)256 virtual size_t getAllocatedSize(native_handle_t const* handle) {
257 struct drm_virtgpu_resource_info info;
258 if (!getResInfo(handle, &info)) {
259 ALOGE("%s: failed to get resource info\n", __func__);
260 return 0;
261 }
262
263 return info.size;
264 }
265
setFd(int fd)266 void setFd(int fd) { m_fd = fd; }
267
268 private:
269
getResInfo(native_handle_t const * handle,struct drm_virtgpu_resource_info * info)270 bool getResInfo(native_handle_t const* handle,
271 struct drm_virtgpu_resource_info* info) {
272 memset(info, 0x0, sizeof(*info));
273 if (m_fd < 0) {
274 ALOGE("%s: Error, rendernode fd missing\n", __func__);
275 return false;
276 }
277
278 struct drm_gem_close gem_close;
279 memset(&gem_close, 0x0, sizeof(gem_close));
280
281 cros_gralloc_handle const* cros_handle =
282 reinterpret_cast<cros_gralloc_handle const*>(handle);
283
284 uint32_t prime_handle;
285 int ret = drmPrimeFDToHandle(m_fd, cros_handle->fds[0], &prime_handle);
286 if (ret) {
287 ALOGE("%s: DRM_IOCTL_PRIME_FD_TO_HANDLE failed: %s (errno %d)\n",
288 __func__, strerror(errno), errno);
289 return false;
290 }
291
292 info->bo_handle = prime_handle;
293 gem_close.handle = prime_handle;
294
295 ret = drmIoctl(m_fd, DRM_IOCTL_VIRTGPU_RESOURCE_INFO, info);
296 if (ret) {
297 ALOGE("%s: DRM_IOCTL_VIRTGPU_RESOURCE_INFO failed: %s (errno %d)\n",
298 __func__, strerror(errno), errno);
299 drmIoctl(m_fd, DRM_IOCTL_GEM_CLOSE, &gem_close);
300 return false;
301 }
302
303 drmIoctl(m_fd, DRM_IOCTL_GEM_CLOSE, &gem_close);
304 return true;
305 }
306
307 int m_fd = -1;
308 };
309
310 #else
311
312 class MinigbmGralloc : public Gralloc {
313 public:
createColorBuffer(ExtendedRCEncoderContext *,int width,int height,uint32_t glformat)314 virtual uint32_t createColorBuffer(
315 ExtendedRCEncoderContext*,
316 int width, int height, uint32_t glformat) {
317 ALOGE("%s: Error: using minigbm without -DVIRTIO_GPU\n", __func__);
318 return 0;
319 }
320
getHostHandle(native_handle_t const * handle)321 virtual uint32_t getHostHandle(native_handle_t const* handle) {
322 ALOGE("%s: Error: using minigbm without -DVIRTIO_GPU\n", __func__);
323 return 0;
324 }
325
getFormat(native_handle_t const * handle)326 virtual int getFormat(native_handle_t const* handle) {
327 ALOGE("%s: Error: using minigbm without -DVIRTIO_GPU\n", __func__);
328 return 0;
329 }
330
getAllocatedSize(native_handle_t const * handle)331 virtual size_t getAllocatedSize(native_handle_t const* handle) {
332 ALOGE("%s: Error: using minigbm without -DVIRTIO_GPU\n", __func__);
333 return 0;
334 }
335
setFd(int fd)336 void setFd(int fd) { m_fd = fd; }
337
338 private:
339
340 int m_fd = -1;
341 };
342
343 #endif
344
345 class GoldfishProcessPipe : public ProcessPipe
346 {
347 public:
processPipeInit(HostConnectionType connType,renderControl_encoder_context_t * rcEnc)348 bool processPipeInit(HostConnectionType connType, renderControl_encoder_context_t *rcEnc)
349 {
350 return ::processPipeInit(connType, rcEnc);
351 }
352
353 };
354
355 static GoldfishGralloc m_goldfishGralloc;
356 static GoldfishProcessPipe m_goldfishProcessPipe;
357
HostConnection()358 HostConnection::HostConnection() :
359 exitUncleanly(false),
360 m_checksumHelper(),
361 m_glExtensions(),
362 m_grallocOnly(true),
363 m_noHostError(true),
364 m_rendernodeFd(-1),
365 m_rendernodeFdOwned(false) {
366 #ifdef HOST_BUILD
367 android::base::initializeTracing();
368 #endif
369 }
370
~HostConnection()371 HostConnection::~HostConnection()
372 {
373 // round-trip to ensure that queued commands have been processed
374 // before process pipe closure is detected.
375 if (m_rcEnc && !exitUncleanly) {
376 (void)m_rcEnc->rcGetRendererVersion(m_rcEnc.get());
377 }
378
379 if (m_grallocType == GRALLOC_TYPE_MINIGBM) {
380 delete m_grallocHelper;
381 }
382
383 if (m_rendernodeFdOwned) {
384 close(m_rendernodeFd);
385 }
386
387 if (m_vkEnc) {
388 m_vkEnc->decRef();
389 }
390
391 if (m_stream) {
392 m_stream->decRef();
393 }
394 }
395
396 // static
connect()397 std::unique_ptr<HostConnection> HostConnection::connect() {
398 const enum HostConnectionType connType = getConnectionTypeFromProperty();
399 // const enum HostConnectionType connType = HOST_CONNECTION_VIRTIO_GPU;
400
401 // Use "new" to access a non-public constructor.
402 auto con = std::unique_ptr<HostConnection>(new HostConnection);
403 switch (connType) {
404 case HOST_CONNECTION_ADDRESS_SPACE: {
405 auto stream = createAddressSpaceStream(STREAM_BUFFER_SIZE);
406 if (!stream) {
407 ALOGE("Failed to create AddressSpaceStream for host connection!!!\n");
408 return nullptr;
409 }
410 con->m_connectionType = HOST_CONNECTION_ADDRESS_SPACE;
411 con->m_grallocType = GRALLOC_TYPE_RANCHU;
412 con->m_stream = stream;
413 con->m_grallocHelper = &m_goldfishGralloc;
414 con->m_processPipe = &m_goldfishProcessPipe;
415 break;
416 }
417 case HOST_CONNECTION_QEMU_PIPE: {
418 auto stream = new QemuPipeStream(STREAM_BUFFER_SIZE);
419 if (!stream) {
420 ALOGE("Failed to create QemuPipeStream for host connection!!!\n");
421 return nullptr;
422 }
423 if (stream->connect() < 0) {
424 ALOGE("Failed to connect to host (QemuPipeStream)!!!\n");
425 return nullptr;
426 }
427 con->m_connectionType = HOST_CONNECTION_QEMU_PIPE;
428 con->m_grallocType = GRALLOC_TYPE_RANCHU;
429 con->m_stream = stream;
430 con->m_grallocHelper = &m_goldfishGralloc;
431 con->m_processPipe = &m_goldfishProcessPipe;
432 break;
433 }
434 case HOST_CONNECTION_TCP: {
435 #ifdef __Fuchsia__
436 ALOGE("Fuchsia doesn't support HOST_CONNECTION_TCP!!!\n");
437 return nullptr;
438 break;
439 #else
440 auto stream = new TcpStream(STREAM_BUFFER_SIZE);
441 if (!stream) {
442 ALOGE("Failed to create TcpStream for host connection!!!\n");
443 return nullptr;
444 }
445
446 if (stream->connect("10.0.2.2", STREAM_PORT_NUM) < 0) {
447 ALOGE("Failed to connect to host (TcpStream)!!!\n");
448 return nullptr;
449 }
450 con->m_connectionType = HOST_CONNECTION_TCP;
451 con->m_grallocType = GRALLOC_TYPE_RANCHU;
452 con->m_stream = stream;
453 con->m_grallocHelper = &m_goldfishGralloc;
454 con->m_processPipe = &m_goldfishProcessPipe;
455 break;
456 #endif
457 }
458 #ifdef VIRTIO_GPU
459 case HOST_CONNECTION_VIRTIO_GPU: {
460 auto stream = new VirtioGpuStream(STREAM_BUFFER_SIZE);
461 if (!stream) {
462 ALOGE("Failed to create VirtioGpu for host connection!!!\n");
463 return nullptr;
464 }
465 if (stream->connect() < 0) {
466 ALOGE("Failed to connect to host (VirtioGpu)!!!\n");
467 return nullptr;
468 }
469 con->m_connectionType = HOST_CONNECTION_VIRTIO_GPU;
470 con->m_grallocType = GRALLOC_TYPE_MINIGBM;
471 auto rendernodeFd = stream->getRendernodeFd();
472 con->m_processPipe = stream->getProcessPipe();
473 con->m_stream = stream;
474 con->m_rendernodeFdOwned = false;
475 con->m_rendernodeFdOwned = rendernodeFd;
476 MinigbmGralloc* m = new MinigbmGralloc;
477 m->setFd(rendernodeFd);
478 con->m_grallocHelper = m;
479 break;
480 }
481 case HOST_CONNECTION_VIRTIO_GPU_PIPE: {
482 auto stream = new VirtioGpuPipeStream(STREAM_BUFFER_SIZE);
483 if (!stream) {
484 ALOGE("Failed to create VirtioGpu for host connection!!!\n");
485 return nullptr;
486 }
487 if (stream->connect() < 0) {
488 ALOGE("Failed to connect to host (VirtioGpu)!!!\n");
489 return nullptr;
490 }
491 con->m_connectionType = HOST_CONNECTION_VIRTIO_GPU_PIPE;
492 con->m_grallocType = getGrallocTypeFromProperty();
493 con->m_rendernodeFdOwned = false;
494 auto rendernodeFd = stream->getRendernodeFd();
495 con->m_stream = stream;
496 con->m_rendernodeFd = rendernodeFd;
497 switch (con->m_grallocType) {
498 case GRALLOC_TYPE_RANCHU:
499 con->m_grallocHelper = &m_goldfishGralloc;
500 break;
501 case GRALLOC_TYPE_MINIGBM: {
502 MinigbmGralloc* m = new MinigbmGralloc;
503 m->setFd(rendernodeFd);
504 con->m_grallocHelper = m;
505 break;
506 }
507 default:
508 ALOGE("Fatal: Unknown gralloc type 0x%x\n", con->m_grallocType);
509 abort();
510 }
511 con->m_processPipe = &m_goldfishProcessPipe;
512 break;
513 }
514 #if !defined(HOST_BUILD) && !defined(__Fuchsia__)
515 case HOST_CONNECTION_VIRTIO_GPU_ADDRESS_SPACE: {
516 auto stream = createVirtioGpuAddressSpaceStream(STREAM_BUFFER_SIZE);
517 if (!stream) {
518 ALOGE("Failed to create virtgpu AddressSpaceStream for host connection!!!\n");
519 return nullptr;
520 }
521 con->m_connectionType = HOST_CONNECTION_VIRTIO_GPU_ADDRESS_SPACE;
522 con->m_grallocType = getGrallocTypeFromProperty();
523 con->m_rendernodeFdOwned = false;
524 auto rendernodeFd = stream->getRendernodeFd();
525 con->m_stream = stream;
526 con->m_rendernodeFd = rendernodeFd;
527 switch (con->m_grallocType) {
528 case GRALLOC_TYPE_RANCHU:
529 con->m_grallocHelper = &m_goldfishGralloc;
530 break;
531 case GRALLOC_TYPE_MINIGBM: {
532 MinigbmGralloc* m = new MinigbmGralloc;
533 m->setFd(rendernodeFd);
534 con->m_grallocHelper = m;
535 break;
536 }
537 default:
538 ALOGE("Fatal: Unknown gralloc type 0x%x\n", con->m_grallocType);
539 abort();
540 }
541 con->m_processPipe = &m_goldfishProcessPipe;
542 break;
543 }
544 #endif // !HOST_BUILD && !__Fuchsia__
545 #else
546 default:
547 break;
548 #endif
549 }
550
551 // send zero 'clientFlags' to the host.
552 unsigned int *pClientFlags =
553 (unsigned int *)con->m_stream->allocBuffer(sizeof(unsigned int));
554 *pClientFlags = 0;
555 con->m_stream->commitBuffer(sizeof(unsigned int));
556
557 ALOGD("HostConnection::get() New Host Connection established %p, tid %d\n",
558 con.get(), getCurrentThreadId());
559
560 // ALOGD("Address space echo latency check done\n");
561 return con;
562 }
563
get()564 HostConnection *HostConnection::get() {
565 return getWithThreadInfo(getEGLThreadInfo());
566 }
567
getWithThreadInfo(EGLThreadInfo * tinfo)568 HostConnection *HostConnection::getWithThreadInfo(EGLThreadInfo* tinfo) {
569 // Get thread info
570 if (!tinfo) {
571 return NULL;
572 }
573
574 if (tinfo->hostConn == NULL) {
575 tinfo->hostConn = HostConnection::createUnique();
576 }
577
578 return tinfo->hostConn.get();
579 }
580
exit()581 void HostConnection::exit() {
582 EGLThreadInfo *tinfo = getEGLThreadInfo();
583 if (!tinfo) {
584 return;
585 }
586
587 tinfo->hostConn.reset();
588 }
589
exitUnclean()590 void HostConnection::exitUnclean() {
591 EGLThreadInfo *tinfo = getEGLThreadInfo();
592 if (!tinfo) {
593 return;
594 }
595
596 tinfo->hostConn->exitUncleanly = true;
597 tinfo->hostConn.reset();
598 }
599
600 // static
createUnique()601 std::unique_ptr<HostConnection> HostConnection::createUnique() {
602 ALOGD("%s: call\n", __func__);
603 return connect();
604 }
605
glEncoder()606 GLEncoder *HostConnection::glEncoder()
607 {
608 if (!m_glEnc) {
609 m_glEnc = std::make_unique<GLEncoder>(m_stream, checksumHelper());
610 DBG("HostConnection::glEncoder new encoder %p, tid %d",
611 m_glEnc, getCurrentThreadId());
612 m_glEnc->setContextAccessor(s_getGLContext);
613 }
614 return m_glEnc.get();
615 }
616
gl2Encoder()617 GL2Encoder *HostConnection::gl2Encoder()
618 {
619 if (!m_gl2Enc) {
620 m_gl2Enc =
621 std::make_unique<GL2Encoder>(m_stream, checksumHelper());
622 DBG("HostConnection::gl2Encoder new encoder %p, tid %d",
623 m_gl2Enc, getCurrentThreadId());
624 m_gl2Enc->setContextAccessor(s_getGL2Context);
625 m_gl2Enc->setNoHostError(m_noHostError);
626 m_gl2Enc->setDrawCallFlushInterval(
627 getDrawCallFlushIntervalFromProperty());
628 m_gl2Enc->setHasAsyncUnmapBuffer(m_rcEnc->hasAsyncUnmapBuffer());
629 m_gl2Enc->setHasSyncBufferData(m_rcEnc->hasSyncBufferData());
630 }
631 return m_gl2Enc.get();
632 }
633
vkEncoder()634 VkEncoder *HostConnection::vkEncoder()
635 {
636 rcEncoder();
637 if (!m_vkEnc) {
638 m_vkEnc = new VkEncoder(m_stream);
639 }
640 return m_vkEnc;
641 }
642
rcEncoder()643 ExtendedRCEncoderContext *HostConnection::rcEncoder()
644 {
645 if (!m_rcEnc) {
646 m_rcEnc = std::make_unique<ExtendedRCEncoderContext>(m_stream,
647 checksumHelper());
648
649 ExtendedRCEncoderContext* rcEnc = m_rcEnc.get();
650 setChecksumHelper(rcEnc);
651 queryAndSetSyncImpl(rcEnc);
652 queryAndSetDmaImpl(rcEnc);
653 queryAndSetGLESMaxVersion(rcEnc);
654 queryAndSetNoErrorState(rcEnc);
655 queryAndSetHostCompositionImpl(rcEnc);
656 queryAndSetDirectMemSupport(rcEnc);
657 queryAndSetVulkanSupport(rcEnc);
658 queryAndSetDeferredVulkanCommandsSupport(rcEnc);
659 queryAndSetVulkanNullOptionalStringsSupport(rcEnc);
660 queryAndSetVulkanCreateResourcesWithRequirementsSupport(rcEnc);
661 queryAndSetVulkanIgnoredHandles(rcEnc);
662 queryAndSetYUVCache(rcEnc);
663 queryAndSetAsyncUnmapBuffer(rcEnc);
664 queryAndSetVirtioGpuNext(rcEnc);
665 queryHasSharedSlotsHostMemoryAllocator(rcEnc);
666 queryAndSetVulkanFreeMemorySync(rcEnc);
667 queryAndSetVirtioGpuNativeSync(rcEnc);
668 queryAndSetVulkanShaderFloat16Int8Support(rcEnc);
669 queryAndSetVulkanAsyncQueueSubmitSupport(rcEnc);
670 queryAndSetHostSideTracingSupport(rcEnc);
671 queryAndSetAsyncFrameCommands(rcEnc);
672 queryAndSetVulkanQueueSubmitWithCommandsSupport(rcEnc);
673 queryAndSetVulkanBatchedDescriptorSetUpdateSupport(rcEnc);
674 queryAndSetSyncBufferData(rcEnc);
675 queryAndSetReadColorBufferDma(rcEnc);
676 queryVersion(rcEnc);
677 if (m_processPipe) {
678 m_processPipe->processPipeInit(m_connectionType, rcEnc);
679 }
680 }
681 return m_rcEnc.get();
682 }
683
getOrCreateRendernodeFd()684 int HostConnection::getOrCreateRendernodeFd() {
685 if (m_rendernodeFd >= 0) return m_rendernodeFd;
686 #ifdef __Fuchsia__
687 return -1;
688 #else
689 #ifdef VIRTIO_GPU
690 m_rendernodeFd = VirtioGpuPipeStream::openRendernode();
691 if (m_rendernodeFd < 0) {
692 ALOGE("%s: failed to create secondary "
693 "rendernode for host connection. "
694 "error: %s (%d)\n", __FUNCTION__,
695 strerror(errno), errno);
696 return -1;
697 }
698
699 // Remember to close it on exit
700 m_rendernodeFdOwned = true;
701 return m_rendernodeFd;
702 #else
703 return -1;
704 #endif
705 #endif
706 }
707
s_getGLContext()708 gl_client_context_t *HostConnection::s_getGLContext()
709 {
710 EGLThreadInfo *ti = getEGLThreadInfo();
711 if (ti->hostConn) {
712 return ti->hostConn->m_glEnc.get();
713 }
714 return NULL;
715 }
716
s_getGL2Context()717 gl2_client_context_t *HostConnection::s_getGL2Context()
718 {
719 EGLThreadInfo *ti = getEGLThreadInfo();
720 if (ti->hostConn) {
721 return ti->hostConn->m_gl2Enc.get();
722 }
723 return NULL;
724 }
725
queryGLExtensions(ExtendedRCEncoderContext * rcEnc)726 const std::string& HostConnection::queryGLExtensions(ExtendedRCEncoderContext *rcEnc) {
727 if (!m_glExtensions.empty()) {
728 return m_glExtensions;
729 }
730
731 // Extensions strings are usually quite long, preallocate enough here.
732 std::string extensions_buffer(1023, '\0');
733
734 // rcGetGLString() returns required size including the 0-terminator, so
735 // account it when passing/using the sizes.
736 int extensionSize = rcEnc->rcGetGLString(rcEnc, GL_EXTENSIONS,
737 &extensions_buffer[0],
738 extensions_buffer.size() + 1);
739 if (extensionSize < 0) {
740 extensions_buffer.resize(-extensionSize);
741 extensionSize = rcEnc->rcGetGLString(rcEnc, GL_EXTENSIONS,
742 &extensions_buffer[0],
743 -extensionSize + 1);
744 }
745
746 if (extensionSize > 0) {
747 extensions_buffer.resize(extensionSize - 1);
748 m_glExtensions.swap(extensions_buffer);
749 }
750
751 return m_glExtensions;
752 }
753
queryAndSetHostCompositionImpl(ExtendedRCEncoderContext * rcEnc)754 void HostConnection::queryAndSetHostCompositionImpl(ExtendedRCEncoderContext *rcEnc) {
755 const std::string& glExtensions = queryGLExtensions(rcEnc);
756 ALOGD("HostComposition ext %s", glExtensions.c_str());
757 // make sure V2 is checked first before V1, as host may declare supporting both
758 if (glExtensions.find(kHostCompositionV2) != std::string::npos) {
759 rcEnc->setHostComposition(HOST_COMPOSITION_V2);
760 }
761 else if (glExtensions.find(kHostCompositionV1) != std::string::npos) {
762 rcEnc->setHostComposition(HOST_COMPOSITION_V1);
763 }
764 else {
765 rcEnc->setHostComposition(HOST_COMPOSITION_NONE);
766 }
767 }
768
setChecksumHelper(ExtendedRCEncoderContext * rcEnc)769 void HostConnection::setChecksumHelper(ExtendedRCEncoderContext *rcEnc) {
770 const std::string& glExtensions = queryGLExtensions(rcEnc);
771 // check the host supported version
772 uint32_t checksumVersion = 0;
773 const char* checksumPrefix = ChecksumCalculator::getMaxVersionStrPrefix();
774 const char* glProtocolStr = strstr(glExtensions.c_str(), checksumPrefix);
775 if (glProtocolStr) {
776 uint32_t maxVersion = ChecksumCalculator::getMaxVersion();
777 sscanf(glProtocolStr+strlen(checksumPrefix), "%d", &checksumVersion);
778 if (maxVersion < checksumVersion) {
779 checksumVersion = maxVersion;
780 }
781 // The ordering of the following two commands matters!
782 // Must tell the host first before setting it in the guest
783 rcEnc->rcSelectChecksumHelper(rcEnc, checksumVersion, 0);
784 m_checksumHelper.setVersion(checksumVersion);
785 }
786 }
787
queryAndSetSyncImpl(ExtendedRCEncoderContext * rcEnc)788 void HostConnection::queryAndSetSyncImpl(ExtendedRCEncoderContext *rcEnc) {
789 const std::string& glExtensions = queryGLExtensions(rcEnc);
790 #if PLATFORM_SDK_VERSION <= 16 || (!defined(__i386__) && !defined(__x86_64__))
791 rcEnc->setSyncImpl(SYNC_IMPL_NONE);
792 #else
793 if (glExtensions.find(kRCNativeSyncV4) != std::string::npos) {
794 rcEnc->setSyncImpl(SYNC_IMPL_NATIVE_SYNC_V4);
795 } else if (glExtensions.find(kRCNativeSyncV3) != std::string::npos) {
796 rcEnc->setSyncImpl(SYNC_IMPL_NATIVE_SYNC_V3);
797 } else if (glExtensions.find(kRCNativeSyncV2) != std::string::npos) {
798 rcEnc->setSyncImpl(SYNC_IMPL_NATIVE_SYNC_V2);
799 } else {
800 rcEnc->setSyncImpl(SYNC_IMPL_NONE);
801 }
802 #endif
803 }
804
queryAndSetDmaImpl(ExtendedRCEncoderContext * rcEnc)805 void HostConnection::queryAndSetDmaImpl(ExtendedRCEncoderContext *rcEnc) {
806 std::string glExtensions = queryGLExtensions(rcEnc);
807 #if PLATFORM_SDK_VERSION <= 16 || (!defined(__i386__) && !defined(__x86_64__))
808 rcEnc->setDmaImpl(DMA_IMPL_NONE);
809 #else
810 if (glExtensions.find(kDmaExtStr_v1) != std::string::npos) {
811 rcEnc->setDmaImpl(DMA_IMPL_v1);
812 } else {
813 rcEnc->setDmaImpl(DMA_IMPL_NONE);
814 }
815 #endif
816 }
817
queryAndSetGLESMaxVersion(ExtendedRCEncoderContext * rcEnc)818 void HostConnection::queryAndSetGLESMaxVersion(ExtendedRCEncoderContext* rcEnc) {
819 std::string glExtensions = queryGLExtensions(rcEnc);
820 if (glExtensions.find(kGLESMaxVersion_2) != std::string::npos) {
821 rcEnc->setGLESMaxVersion(GLES_MAX_VERSION_2);
822 } else if (glExtensions.find(kGLESMaxVersion_3_0) != std::string::npos) {
823 rcEnc->setGLESMaxVersion(GLES_MAX_VERSION_3_0);
824 } else if (glExtensions.find(kGLESMaxVersion_3_1) != std::string::npos) {
825 rcEnc->setGLESMaxVersion(GLES_MAX_VERSION_3_1);
826 } else if (glExtensions.find(kGLESMaxVersion_3_2) != std::string::npos) {
827 rcEnc->setGLESMaxVersion(GLES_MAX_VERSION_3_2);
828 } else {
829 ALOGW("Unrecognized GLES max version string in extensions: %s",
830 glExtensions.c_str());
831 rcEnc->setGLESMaxVersion(GLES_MAX_VERSION_2);
832 }
833 }
834
queryAndSetNoErrorState(ExtendedRCEncoderContext * rcEnc)835 void HostConnection::queryAndSetNoErrorState(ExtendedRCEncoderContext* rcEnc) {
836 std::string glExtensions = queryGLExtensions(rcEnc);
837 if (glExtensions.find(kGLESUseHostError) != std::string::npos) {
838 m_noHostError = false;
839 }
840 }
841
queryAndSetDirectMemSupport(ExtendedRCEncoderContext * rcEnc)842 void HostConnection::queryAndSetDirectMemSupport(ExtendedRCEncoderContext* rcEnc) {
843 std::string glExtensions = queryGLExtensions(rcEnc);
844 if (glExtensions.find(kGLDirectMem) != std::string::npos) {
845 rcEnc->featureInfo()->hasDirectMem = true;
846 }
847 }
848
queryAndSetVulkanSupport(ExtendedRCEncoderContext * rcEnc)849 void HostConnection::queryAndSetVulkanSupport(ExtendedRCEncoderContext* rcEnc) {
850 std::string glExtensions = queryGLExtensions(rcEnc);
851 if (glExtensions.find(kVulkan) != std::string::npos) {
852 rcEnc->featureInfo()->hasVulkan = true;
853 }
854 }
855
queryAndSetDeferredVulkanCommandsSupport(ExtendedRCEncoderContext * rcEnc)856 void HostConnection::queryAndSetDeferredVulkanCommandsSupport(ExtendedRCEncoderContext* rcEnc) {
857 std::string glExtensions = queryGLExtensions(rcEnc);
858 if (glExtensions.find(kDeferredVulkanCommands) != std::string::npos) {
859 rcEnc->featureInfo()->hasDeferredVulkanCommands = true;
860 }
861 }
862
queryAndSetVulkanNullOptionalStringsSupport(ExtendedRCEncoderContext * rcEnc)863 void HostConnection::queryAndSetVulkanNullOptionalStringsSupport(ExtendedRCEncoderContext* rcEnc) {
864 std::string glExtensions = queryGLExtensions(rcEnc);
865 if (glExtensions.find(kVulkanNullOptionalStrings) != std::string::npos) {
866 rcEnc->featureInfo()->hasVulkanNullOptionalStrings = true;
867 }
868 }
869
queryAndSetVulkanCreateResourcesWithRequirementsSupport(ExtendedRCEncoderContext * rcEnc)870 void HostConnection::queryAndSetVulkanCreateResourcesWithRequirementsSupport(ExtendedRCEncoderContext* rcEnc) {
871 std::string glExtensions = queryGLExtensions(rcEnc);
872 if (glExtensions.find(kVulkanCreateResourcesWithRequirements) != std::string::npos) {
873 rcEnc->featureInfo()->hasVulkanCreateResourcesWithRequirements = true;
874 }
875 }
876
queryAndSetVulkanIgnoredHandles(ExtendedRCEncoderContext * rcEnc)877 void HostConnection::queryAndSetVulkanIgnoredHandles(ExtendedRCEncoderContext* rcEnc) {
878 std::string glExtensions = queryGLExtensions(rcEnc);
879 if (glExtensions.find(kVulkanIgnoredHandles) != std::string::npos) {
880 rcEnc->featureInfo()->hasVulkanIgnoredHandles = true;
881 }
882 }
883
queryAndSetYUVCache(ExtendedRCEncoderContext * rcEnc)884 void HostConnection::queryAndSetYUVCache(ExtendedRCEncoderContext* rcEnc) {
885 std::string glExtensions = queryGLExtensions(rcEnc);
886 if (glExtensions.find(kYUVCache) != std::string::npos) {
887 rcEnc->featureInfo()->hasYUVCache = true;
888 }
889 }
890
queryAndSetAsyncUnmapBuffer(ExtendedRCEncoderContext * rcEnc)891 void HostConnection::queryAndSetAsyncUnmapBuffer(ExtendedRCEncoderContext* rcEnc) {
892 std::string glExtensions = queryGLExtensions(rcEnc);
893 if (glExtensions.find(kAsyncUnmapBuffer) != std::string::npos) {
894 rcEnc->featureInfo()->hasAsyncUnmapBuffer = true;
895 }
896 }
897
queryAndSetVirtioGpuNext(ExtendedRCEncoderContext * rcEnc)898 void HostConnection::queryAndSetVirtioGpuNext(ExtendedRCEncoderContext* rcEnc) {
899 std::string glExtensions = queryGLExtensions(rcEnc);
900 if (glExtensions.find(kVirtioGpuNext) != std::string::npos) {
901 rcEnc->featureInfo()->hasVirtioGpuNext = true;
902 }
903 }
904
queryHasSharedSlotsHostMemoryAllocator(ExtendedRCEncoderContext * rcEnc)905 void HostConnection::queryHasSharedSlotsHostMemoryAllocator(ExtendedRCEncoderContext *rcEnc) {
906 const std::string& glExtensions = queryGLExtensions(rcEnc);
907 if (glExtensions.find(kHasSharedSlotsHostMemoryAllocator) != std::string::npos) {
908 rcEnc->featureInfo()->hasSharedSlotsHostMemoryAllocator = true;
909 }
910 }
911
queryAndSetVulkanFreeMemorySync(ExtendedRCEncoderContext * rcEnc)912 void HostConnection::queryAndSetVulkanFreeMemorySync(ExtendedRCEncoderContext *rcEnc) {
913 const std::string& glExtensions = queryGLExtensions(rcEnc);
914 if (glExtensions.find(kVulkanFreeMemorySync) != std::string::npos) {
915 rcEnc->featureInfo()->hasVulkanFreeMemorySync = true;
916 }
917 }
918
queryAndSetVirtioGpuNativeSync(ExtendedRCEncoderContext * rcEnc)919 void HostConnection::queryAndSetVirtioGpuNativeSync(ExtendedRCEncoderContext* rcEnc) {
920 std::string glExtensions = queryGLExtensions(rcEnc);
921 if (glExtensions.find(kVirtioGpuNativeSync) != std::string::npos) {
922 rcEnc->featureInfo()->hasVirtioGpuNativeSync = true;
923 }
924 }
925
queryAndSetVulkanShaderFloat16Int8Support(ExtendedRCEncoderContext * rcEnc)926 void HostConnection::queryAndSetVulkanShaderFloat16Int8Support(ExtendedRCEncoderContext* rcEnc) {
927 std::string glExtensions = queryGLExtensions(rcEnc);
928 if (glExtensions.find(kVulkanShaderFloat16Int8) != std::string::npos) {
929 rcEnc->featureInfo()->hasVulkanShaderFloat16Int8 = true;
930 }
931 }
932
queryAndSetVulkanAsyncQueueSubmitSupport(ExtendedRCEncoderContext * rcEnc)933 void HostConnection::queryAndSetVulkanAsyncQueueSubmitSupport(ExtendedRCEncoderContext* rcEnc) {
934 std::string glExtensions = queryGLExtensions(rcEnc);
935 if (glExtensions.find(kVulkanAsyncQueueSubmit) != std::string::npos) {
936 rcEnc->featureInfo()->hasVulkanAsyncQueueSubmit = true;
937 }
938 }
939
queryAndSetHostSideTracingSupport(ExtendedRCEncoderContext * rcEnc)940 void HostConnection::queryAndSetHostSideTracingSupport(ExtendedRCEncoderContext* rcEnc) {
941 std::string glExtensions = queryGLExtensions(rcEnc);
942 if (glExtensions.find(kHostSideTracing) != std::string::npos) {
943 rcEnc->featureInfo()->hasHostSideTracing = true;
944 }
945 }
946
queryAndSetAsyncFrameCommands(ExtendedRCEncoderContext * rcEnc)947 void HostConnection::queryAndSetAsyncFrameCommands(ExtendedRCEncoderContext* rcEnc) {
948 std::string glExtensions = queryGLExtensions(rcEnc);
949 if (glExtensions.find(kAsyncFrameCommands) != std::string::npos) {
950 rcEnc->featureInfo()->hasAsyncFrameCommands = true;
951 }
952 }
953
queryAndSetVulkanQueueSubmitWithCommandsSupport(ExtendedRCEncoderContext * rcEnc)954 void HostConnection::queryAndSetVulkanQueueSubmitWithCommandsSupport(ExtendedRCEncoderContext* rcEnc) {
955 std::string glExtensions = queryGLExtensions(rcEnc);
956 if (glExtensions.find(kVulkanQueueSubmitWithCommands) != std::string::npos) {
957 rcEnc->featureInfo()->hasVulkanQueueSubmitWithCommands = true;
958 }
959 }
960
queryAndSetVulkanBatchedDescriptorSetUpdateSupport(ExtendedRCEncoderContext * rcEnc)961 void HostConnection::queryAndSetVulkanBatchedDescriptorSetUpdateSupport(ExtendedRCEncoderContext* rcEnc) {
962 std::string glExtensions = queryGLExtensions(rcEnc);
963 if (glExtensions.find(kVulkanBatchedDescriptorSetUpdate) != std::string::npos) {
964 rcEnc->featureInfo()->hasVulkanBatchedDescriptorSetUpdate = true;
965 }
966 }
967
queryAndSetSyncBufferData(ExtendedRCEncoderContext * rcEnc)968 void HostConnection::queryAndSetSyncBufferData(ExtendedRCEncoderContext* rcEnc) {
969 std::string glExtensions = queryGLExtensions(rcEnc);
970 if (glExtensions.find(kSyncBufferData) != std::string::npos) {
971 rcEnc->featureInfo()->hasSyncBufferData = true;
972 }
973 }
974
queryAndSetReadColorBufferDma(ExtendedRCEncoderContext * rcEnc)975 void HostConnection::queryAndSetReadColorBufferDma(ExtendedRCEncoderContext* rcEnc) {
976 std::string glExtensions = queryGLExtensions(rcEnc);
977 if (glExtensions.find(kReadColorBufferDma) != std::string::npos) {
978 rcEnc->featureInfo()->hasReadColorBufferDma = true;
979 }
980 }
981
queryVersion(ExtendedRCEncoderContext * rcEnc)982 GLint HostConnection::queryVersion(ExtendedRCEncoderContext* rcEnc) {
983 GLint version = m_rcEnc->rcGetRendererVersion(m_rcEnc.get());
984 return version;
985 }
986