1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "FrameCaptureLayer"
19
20 #include <include/FrameCaptureLayer.h>
21 #include <media/stagefright/FrameCaptureProcessor.h>
22 #include <gui/BufferQueue.h>
23 #include <gui/GLConsumer.h>
24 #include <gui/IGraphicBufferConsumer.h>
25 #include <gui/Surface.h>
26 #include <media/stagefright/foundation/ADebug.h>
27 #include <media/stagefright/foundation/AMessage.h>
28 #include <media/stagefright/MediaErrors.h>
29 #include <renderengine/RenderEngine.h>
30 #include <utils/Log.h>
31
32 namespace android {
33
34 static const int64_t kAcquireBufferTimeoutNs = 100000000LL;
35 static constexpr float kDefaultMaxMasteringLuminance = 1000.0;
36 static constexpr float kDefaultMaxContentLuminance = 1000.0;
37
translateDataspace(ui::Dataspace dataspace)38 ui::Dataspace translateDataspace(ui::Dataspace dataspace) {
39 ui::Dataspace updatedDataspace = dataspace;
40 // translate legacy dataspaces to modern dataspaces
41 switch (dataspace) {
42 case ui::Dataspace::SRGB:
43 updatedDataspace = ui::Dataspace::V0_SRGB;
44 break;
45 case ui::Dataspace::SRGB_LINEAR:
46 updatedDataspace = ui::Dataspace::V0_SRGB_LINEAR;
47 break;
48 case ui::Dataspace::JFIF:
49 updatedDataspace = ui::Dataspace::V0_JFIF;
50 break;
51 case ui::Dataspace::BT601_625:
52 updatedDataspace = ui::Dataspace::V0_BT601_625;
53 break;
54 case ui::Dataspace::BT601_525:
55 updatedDataspace = ui::Dataspace::V0_BT601_525;
56 break;
57 case ui::Dataspace::BT709:
58 updatedDataspace = ui::Dataspace::V0_BT709;
59 break;
60 default:
61 break;
62 }
63
64 return updatedDataspace;
65 }
66
isHdrY410(const BufferItem & bi)67 bool isHdrY410(const BufferItem &bi) {
68 ui::Dataspace dataspace = translateDataspace(static_cast<ui::Dataspace>(bi.mDataSpace));
69 // pixel format is HDR Y410 masquerading as RGBA_1010102
70 return ((dataspace == ui::Dataspace::BT2020_ITU_PQ ||
71 dataspace == ui::Dataspace::BT2020_ITU_HLG) &&
72 bi.mGraphicBuffer->getPixelFormat() == HAL_PIXEL_FORMAT_RGBA_1010102);
73 }
74
75 struct FrameCaptureLayer::BufferLayer : public FrameCaptureProcessor::Layer {
BufferLayerandroid::FrameCaptureLayer::BufferLayer76 BufferLayer(const BufferItem &bi) : mBufferItem(bi) {}
77 void getLayerSettings(
78 const Rect &sourceCrop, uint32_t textureName,
79 renderengine::LayerSettings *layerSettings) override;
80 BufferItem mBufferItem;
81 };
82
getLayerSettings(const Rect & sourceCrop,uint32_t textureName,renderengine::LayerSettings * layerSettings)83 void FrameCaptureLayer::BufferLayer::getLayerSettings(
84 const Rect &sourceCrop, uint32_t textureName,
85 renderengine::LayerSettings *layerSettings) {
86 layerSettings->geometry.boundaries = sourceCrop.toFloatRect();
87 layerSettings->alpha = 1.0f;
88
89 layerSettings->sourceDataspace = translateDataspace(
90 static_cast<ui::Dataspace>(mBufferItem.mDataSpace));
91
92 // from BufferLayer
93 layerSettings->source.buffer.buffer = mBufferItem.mGraphicBuffer;
94 layerSettings->source.buffer.isOpaque = true;
95 layerSettings->source.buffer.fence = mBufferItem.mFence;
96 layerSettings->source.buffer.textureName = textureName;
97 layerSettings->source.buffer.usePremultipliedAlpha = false;
98 layerSettings->source.buffer.isY410BT2020 = isHdrY410(mBufferItem);
99 bool hasSmpte2086 = mBufferItem.mHdrMetadata.validTypes & HdrMetadata::SMPTE2086;
100 bool hasCta861_3 = mBufferItem.mHdrMetadata.validTypes & HdrMetadata::CTA861_3;
101 layerSettings->source.buffer.maxMasteringLuminance = hasSmpte2086
102 ? mBufferItem.mHdrMetadata.smpte2086.maxLuminance
103 : kDefaultMaxMasteringLuminance;
104 layerSettings->source.buffer.maxContentLuminance = hasCta861_3
105 ? mBufferItem.mHdrMetadata.cta8613.maxContentLightLevel
106 : kDefaultMaxContentLuminance;
107
108 // Set filtering to false since the capture itself doesn't involve
109 // any scaling, metadata retriever JNI is scaling the bitmap if
110 // display size is different from decoded size. If that scaling
111 // needs to be handled by server side, consider enable this based
112 // display size vs decoded size.
113 const bool useFiltering = false;
114 layerSettings->source.buffer.useTextureFiltering = useFiltering;
115
116 float textureMatrix[16];
117 GLConsumer::computeTransformMatrix(
118 textureMatrix, mBufferItem.mGraphicBuffer,
119 mBufferItem.mCrop, mBufferItem.mTransform, useFiltering);
120
121 // Flip y-coordinates because GLConsumer expects OpenGL convention.
122 mat4 tr = mat4::translate(vec4(.5, .5, 0, 1)) * mat4::scale(vec4(1, -1, 1, 1)) *
123 mat4::translate(vec4(-.5, -.5, 0, 1));
124
125 layerSettings->source.buffer.textureTransform =
126 mat4(static_cast<const float*>(textureMatrix)) * tr;
127 }
128
init()129 status_t FrameCaptureLayer::init() {
130 if (FrameCaptureProcessor::getInstance() == nullptr) {
131 ALOGE("failed to get capture processor");
132 return ERROR_UNSUPPORTED;
133 }
134
135 // Mimic surfaceflinger's BufferQueueLayer::onFirstRef() to create a
136 // BufferQueue for encoder output
137 sp<IGraphicBufferProducer> producer;
138 sp<IGraphicBufferConsumer> consumer;
139
140 BufferQueue::createBufferQueue(&producer, &consumer);
141 // We don't need HW_COMPOSER usage since we're not using hwc to compose.
142 // The buffer is only used as a GL texture.
143 consumer->setConsumerUsageBits(GraphicBuffer::USAGE_HW_TEXTURE);
144 consumer->setConsumerName(String8("FrameDecoder"));
145
146 status_t err = consumer->consumerConnect(
147 new BufferQueue::ProxyConsumerListener(this), false);
148 if (NO_ERROR != err) {
149 ALOGE("Error connecting to BufferQueue: %s (%d)", strerror(-err), err);
150 return err;
151 }
152
153 mConsumer = consumer;
154 mSurface = new Surface(producer);
155
156 return OK;
157 }
158
capture(const ui::PixelFormat reqPixelFormat,const Rect & sourceCrop,sp<GraphicBuffer> * outBuffer)159 status_t FrameCaptureLayer::capture(const ui::PixelFormat reqPixelFormat,
160 const Rect &sourceCrop, sp<GraphicBuffer> *outBuffer) {
161 ALOGV("capture: reqPixelFormat %d, crop {%d, %d, %d, %d}", reqPixelFormat,
162 sourceCrop.left, sourceCrop.top, sourceCrop.right, sourceCrop.bottom);
163
164 BufferItem bi;
165 status_t err = acquireBuffer(&bi);
166 if (err != OK) {
167 return err;
168 }
169
170 // create out buffer
171 const uint32_t usage =
172 GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN |
173 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
174 sp<GraphicBuffer> buffer = new GraphicBuffer(
175 sourceCrop.getWidth(), sourceCrop.getHeight(),
176 static_cast<android_pixel_format>(reqPixelFormat),
177 1, usage, std::string("thumbnail"));
178
179 err = FrameCaptureProcessor::getInstance()->capture(
180 new BufferLayer(bi), sourceCrop, buffer);
181 if (err == OK) {
182 *outBuffer = buffer;
183 }
184
185 (void)releaseBuffer(bi);
186 return err;
187 }
188
FrameCaptureLayer()189 FrameCaptureLayer::FrameCaptureLayer() : mFrameAvailable(false) {}
190
onFrameAvailable(const BufferItem &)191 void FrameCaptureLayer::onFrameAvailable(const BufferItem& /*item*/) {
192 ALOGV("onFrameAvailable");
193 Mutex::Autolock _lock(mLock);
194
195 mFrameAvailable = true;
196 mCondition.signal();
197 }
198
onBuffersReleased()199 void FrameCaptureLayer::onBuffersReleased() {
200 ALOGV("onBuffersReleased");
201 Mutex::Autolock _lock(mLock);
202
203 uint64_t mask = 0;
204 mConsumer->getReleasedBuffers(&mask);
205 for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
206 if (mask & (1ULL << i)) {
207 mSlotToBufferMap[i] = nullptr;
208 }
209 }
210 }
211
onSidebandStreamChanged()212 void FrameCaptureLayer::onSidebandStreamChanged() {
213 ALOGV("onSidebandStreamChanged");
214 }
215
acquireBuffer(BufferItem * bi)216 status_t FrameCaptureLayer::acquireBuffer(BufferItem *bi) {
217 ALOGV("acquireBuffer");
218 Mutex::Autolock _lock(mLock);
219
220 if (!mFrameAvailable) {
221 // The output buffer is already released to the codec at this point.
222 // Use a small timeout of 100ms in case the buffer hasn't arrived
223 // at the consumer end of the output surface yet.
224 if (mCondition.waitRelative(mLock, kAcquireBufferTimeoutNs) != OK) {
225 ALOGE("wait for buffer timed out");
226 return TIMED_OUT;
227 }
228 }
229 mFrameAvailable = false;
230
231 status_t err = mConsumer->acquireBuffer(bi, 0);
232 if (err != OK) {
233 ALOGE("failed to acquire buffer!");
234 return err;
235 }
236
237 if (bi->mGraphicBuffer != nullptr) {
238 mSlotToBufferMap[bi->mSlot] = bi->mGraphicBuffer;
239 } else {
240 bi->mGraphicBuffer = mSlotToBufferMap[bi->mSlot];
241 }
242
243 if (bi->mGraphicBuffer == nullptr) {
244 ALOGE("acquired null buffer!");
245 return BAD_VALUE;
246 }
247 return OK;
248 }
249
releaseBuffer(const BufferItem & bi)250 status_t FrameCaptureLayer::releaseBuffer(const BufferItem &bi) {
251 ALOGV("releaseBuffer");
252 Mutex::Autolock _lock(mLock);
253
254 return mConsumer->releaseBuffer(bi.mSlot, bi.mFrameNumber,
255 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, bi.mFence);
256 }
257
258 } // namespace android
259