• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2012, The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #include "hidl/HidlSupport.h"
19 #define LOG_TAG "MediaCodec"
20 #define ATRACE_TAG  ATRACE_TAG_VIDEO
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23 
24 #include <dlfcn.h>
25 #include <inttypes.h>
26 #include <future>
27 #include <random>
28 #include <set>
29 #include <string>
30 
31 #include <C2Buffer.h>
32 
33 #include "include/SoftwareRenderer.h"
34 
35 #include <android_media_codec.h>
36 #include <android_media_tv_flags.h>
37 
38 #include <android/api-level.h>
39 #include <android/content/pm/IPackageManagerNative.h>
40 #include <android/hardware/cas/native/1.0/IDescrambler.h>
41 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
42 #include <android/media/quality/IMediaQualityManager.h>
43 
44 #include <aidl/android/media/BnResourceManagerClient.h>
45 #include <aidl/android/media/IResourceManagerService.h>
46 #include <android/binder_ibinder.h>
47 #include <android/binder_manager.h>
48 #include <android/dlext.h>
49 #include <android-base/stringprintf.h>
50 #include <binder/IMemory.h>
51 #include <binder/IServiceManager.h>
52 #include <binder/MemoryDealer.h>
53 #include <com_android_graphics_libgui_flags.h>
54 #include <cutils/properties.h>
55 #include <gui/BufferItem.h>
56 #include <gui/BufferItemConsumer.h>
57 #include <gui/BufferQueue.h>
58 #include <gui/Surface.h>
59 #include <hidlmemory/FrameworkUtils.h>
60 #include <mediadrm/ICrypto.h>
61 #include <media/IOMX.h>
62 #include <media/MediaCodecBuffer.h>
63 #include <media/MediaCodecInfo.h>
64 #include <media/MediaMetricsItem.h>
65 #include <media/MediaResource.h>
66 #include <media/NdkMediaErrorPriv.h>
67 #include <media/NdkMediaFormat.h>
68 #include <media/NdkMediaFormatPriv.h>
69 #include <media/formatshaper/FormatShaper.h>
70 #include <media/stagefright/foundation/ABuffer.h>
71 #include <media/stagefright/foundation/ADebug.h>
72 #include <media/stagefright/foundation/AMessage.h>
73 #include <media/stagefright/foundation/AString.h>
74 #include <media/stagefright/foundation/AUtils.h>
75 #include <media/stagefright/foundation/avc_utils.h>
76 #include <media/stagefright/foundation/hexdump.h>
77 #include <media/stagefright/ACodec.h>
78 #include <media/stagefright/BatteryChecker.h>
79 #include <media/stagefright/BufferProducerWrapper.h>
80 #include <media/stagefright/CCodec.h>
81 #include <media/stagefright/CryptoAsync.h>
82 #include <media/stagefright/MediaCodec.h>
83 #include <media/stagefright/MediaCodecConstants.h>
84 #include <media/stagefright/MediaCodecList.h>
85 #include <media/stagefright/MediaDefs.h>
86 #include <media/stagefright/MediaErrors.h>
87 #include <media/stagefright/OMXClient.h>
88 #include <media/stagefright/PersistentSurface.h>
89 #include <media/stagefright/RenderedFrameInfo.h>
90 #include <media/stagefright/SurfaceUtils.h>
91 #include <nativeloader/dlext_namespaces.h>
92 #include <private/android_filesystem_config.h>
93 #include <server_configurable_flags/get_flags.h>
94 #include <utils/Singleton.h>
95 
96 namespace android {
97 
98 using Status = ::ndk::ScopedAStatus;
99 using aidl::android::media::BnResourceManagerClient;
100 using aidl::android::media::ClientInfoParcel;
101 using aidl::android::media::IResourceManagerClient;
102 using aidl::android::media::IResourceManagerService;
103 using media::quality::IMediaQualityManager;
104 using server_configurable_flags::GetServerConfigurableFlag;
105 using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
106 using JudderEvent = VideoRenderQualityTracker::JudderEvent;
107 
108 // key for media statistics
109 static const char *kCodecKeyName = "codec";
110 // attrs for media statistics
111 // NB: these are matched with public Java API constants defined
112 // in frameworks/base/media/java/android/media/MediaCodec.java
113 // These must be kept synchronized with the constants there.
114 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
115 static const char *kCodecCodec = "android.media.mediacodec.codec";  /* e.g. OMX.google.aac.decoder */
116 static const char *kCodecId = "android.media.mediacodec.id";
117 static const char *kCodecMime = "android.media.mediacodec.mime";    /* e.g. audio/mime */
118 static const char *kCodecMode = "android.media.mediacodec.mode";    /* audio, video */
119 static const char *kCodecModeVideo = "video";            /* values returned for kCodecMode */
120 static const char *kCodecModeAudio = "audio";
121 static const char *kCodecModeImage = "image";
122 static const char *kCodecModeUnknown = "unknown";
123 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
124 static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
125 static const char *kCodecSecure = "android.media.mediacodec.secure";   /* 0, 1 */
126 static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
127 static const char *kCodecWidth = "android.media.mediacodec.width";     /* 0..n */
128 static const char *kCodecHeight = "android.media.mediacodec.height";   /* 0..n */
129 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees";  /* 0/90/180/270 */
130 static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
131 static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
132 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
133 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
134 static const char *kCodecPriority = "android.media.mediacodec.priority";
135 
136 // Min/Max QP before shaping
137 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
138 static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
139 static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
140 static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
141 static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
142 static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
143 
144 // Min/Max QP after shaping
145 static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
146 static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
147 static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
148 static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
149 static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
150 static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
151 
152 // NB: These are not yet exposed as public Java API constants.
153 static const char *kCodecCrypto = "android.media.mediacodec.crypto";   /* 0,1 */
154 static const char *kCodecProfile = "android.media.mediacodec.profile";  /* 0..n */
155 static const char *kCodecLevel = "android.media.mediacodec.level";  /* 0..n */
156 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode";  /* CQ/VBR/CBR */
157 static const char *kCodecBitrate = "android.media.mediacodec.bitrate";  /* 0..n */
158 static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate";  /* 0..n */
159 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth";  /* 0..n */
160 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight";  /* 0..n */
161 static const char *kCodecError = "android.media.mediacodec.errcode";
162 static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs";   /* 0..n ms*/
163 static const char *kCodecErrorState = "android.media.mediacodec.errstate";
164 static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max";   /* in us */
165 static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min";   /* in us */
166 static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg";   /* in us */
167 static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
168 static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
169 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
170 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
171 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
172 static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
173 
174 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on";  /* 0..n */
175 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off";  /* 0..n */
176 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame";  /* 0..n */
177 static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
178 static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
179 static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
180 static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
181 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
182 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
183 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
184 // HDR metrics
185 static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
186 static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
187 static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
188 static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
189 static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
190 static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
191 static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
192 static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
193 static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
194 // array/sync/async/block modes
195 static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
196 static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
197 static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
198 // max size configured by the app
199 static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
200 // max size actually used
201 static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
202 // max size suggested by the codec
203 static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
204 static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
205 static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
206 static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
207 
208 // the kCodecRecent* fields appear only in getMetrics() results
209 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max";      /* in us */
210 static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min";      /* in us */
211 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg";      /* in us */
212 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
213 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist";    /* in us */
214 
215 /* -1: shaper disabled
216    >=0: number of fields changed */
217 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
218 
219 // Render metrics
220 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
221 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
222 static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
223 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
224 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
225 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
226 static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
227 static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
228 static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
229 static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
230 // Freeze
231 static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
232 static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
233 static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
234 static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
235 static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
236 static const char *kCodecFreezeDurationMsHistogram =
237         "android.media.mediacodec.freeze-duration-ms-histogram";
238 static const char *kCodecFreezeDurationMsHistogramBuckets =
239         "android.media.mediacodec.freeze-duration-ms-histogram-buckets";
240 static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
241 static const char *kCodecFreezeDistanceMsHistogram =
242         "android.media.mediacodec.freeze-distance-ms-histogram";
243 static const char *kCodecFreezeDistanceMsHistogramBuckets =
244         "android.media.mediacodec.freeze-distance-ms-histogram-buckets";
245 // Judder
246 static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
247 static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
248 static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
249 static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
250 static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
251 static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
252 static const char *kCodecJudderScoreHistogramBuckets =
253         "android.media.mediacodec.judder-score-histogram-buckets";
254 // Freeze event
255 static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
256 static const char *kFreezeEventKeyName = "videofreeze";
257 static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
258 static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
259 static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
260 static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
261 static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
262 static const char *kFreezeEventDetailsDurationMs =
263         "android.media.mediacodec.freeze.details-duration-ms";
264 static const char *kFreezeEventDetailsDistanceMs =
265         "android.media.mediacodec.freeze.details-distance-ms";
266 // Judder event
267 static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
268 static const char *kJudderEventKeyName = "videojudder";
269 static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
270 static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
271 static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
272 static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
273 static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
274 static const char *kJudderEventDetailsActualDurationUs =
275         "android.media.mediacodec.judder.details-actual-duration-us";
276 static const char *kJudderEventDetailsContentDurationUs =
277         "android.media.mediacodec.judder.details-content-duration-us";
278 static const char *kJudderEventDetailsDistanceMs =
279         "android.media.mediacodec.judder.details-distance-ms";
280 
281 // XXX suppress until we get our representation right
282 static bool kEmitHistogram = false;
283 
284 typedef WrapperObject<std::vector<AccessUnitInfo>> BufferInfosWrapper;
285 
286 // Multi access unit helpers
generateFlagsFromAccessUnitInfo(sp<AMessage> & msg,const sp<BufferInfosWrapper> & bufferInfos)287 static status_t generateFlagsFromAccessUnitInfo(
288         sp<AMessage> &msg, const sp<BufferInfosWrapper> &bufferInfos) {
289     msg->setInt64("timeUs", bufferInfos->value[0].mTimestamp);
290     msg->setInt32("flags", bufferInfos->value[0].mFlags);
291     // will prevent any access-unit info copy.
292     if (bufferInfos->value.size() > 1) {
293         uint32_t bufferFlags = 0;
294         uint32_t flagsInAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODEC_CONFIG;
295         uint32_t andFlags = flagsInAllAU;
296         int infoIdx = 0;
297         bool foundEndOfStream = false;
298         for ( ; infoIdx < bufferInfos->value.size() && !foundEndOfStream; ++infoIdx) {
299             bufferFlags |= bufferInfos->value[infoIdx].mFlags;
300             andFlags &= bufferInfos->value[infoIdx].mFlags;
301             if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
302                 foundEndOfStream = true;
303             }
304         }
305         bufferFlags = bufferFlags & (andFlags | (~flagsInAllAU));
306         if (infoIdx != bufferInfos->value.size()) {
307             ALOGE("Error: incorrect access-units");
308             return -EINVAL;
309         }
310         msg->setInt32("flags", bufferFlags);
311     }
312     return OK;
313 }
314 
getId(IResourceManagerClient const * client)315 static int64_t getId(IResourceManagerClient const * client) {
316     return (int64_t) client;
317 }
318 
getId(const std::shared_ptr<IResourceManagerClient> & client)319 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
320     return getId(client.get());
321 }
322 
isResourceError(status_t err)323 static bool isResourceError(status_t err) {
324     return (err == NO_MEMORY);
325 }
326 
areRenderMetricsEnabled()327 static bool areRenderMetricsEnabled() {
328     std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
329     return v == "true";
330 }
331 
332 static const int kMaxRetry = 2;
333 static const int kMaxReclaimWaitTimeInUs = 500000;  // 0.5s
334 static const int kNumBuffersAlign = 16;
335 
336 static const C2MemoryUsage kDefaultReadWriteUsage{
337     C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
338 
339 ////////////////////////////////////////////////////////////////////////////////
340 
341 /*
342  * Implementation of IResourceManagerClient interrface that facilitates
343  * MediaCodec reclaim for the ResourceManagerService.
344  */
345 struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClientandroid::ResourceManagerClient346     explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
347             mMediaCodec(codec), mPid(pid), mUid(uid) {}
348 
reclaimResourceandroid::ResourceManagerClient349     Status reclaimResource(bool* _aidl_return) override {
350         sp<MediaCodec> codec = mMediaCodec.promote();
351         if (codec == NULL) {
352             // Codec is already gone, so remove the resources as well
353             ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
354             std::shared_ptr<IResourceManagerService> service =
355                     IResourceManagerService::fromBinder(binder);
356             if (service == nullptr) {
357                 ALOGE("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
358                 *_aidl_return = false;
359                 return Status::fromStatus(STATUS_INVALID_OPERATION);
360             }
361             ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
362                                         .uid = static_cast<int32_t>(mUid),
363                                         .id = getId(this)};
364             service->removeClient(clientInfo);
365             *_aidl_return = true;
366             return Status::ok();
367         }
368         status_t err = codec->reclaim();
369         if (err == WOULD_BLOCK) {
370             ALOGD("Wait for the client to release codec.");
371             usleep(kMaxReclaimWaitTimeInUs);
372             ALOGD("Try to reclaim again.");
373             err = codec->reclaim(true /* force */);
374         }
375         if (err != OK) {
376             ALOGW("ResourceManagerClient failed to release codec with err %d", err);
377         }
378         *_aidl_return = (err == OK);
379         return Status::ok();
380     }
381 
getNameandroid::ResourceManagerClient382     Status getName(::std::string* _aidl_return) override {
383         _aidl_return->clear();
384         sp<MediaCodec> codec = mMediaCodec.promote();
385         if (codec == NULL) {
386             // codec is already gone.
387             return Status::ok();
388         }
389 
390         AString name;
391         if (codec->getName(&name) == OK) {
392             *_aidl_return = name.c_str();
393         }
394         return Status::ok();
395     }
396 
~ResourceManagerClientandroid::ResourceManagerClient397     virtual ~ResourceManagerClient() {}
398 
399 private:
400     wp<MediaCodec> mMediaCodec;
401     int32_t mPid;
402     int32_t mUid;
403 
404     DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
405 };
406 
407 /*
408  * Proxy for ResourceManagerService that communicates with the
409  * ResourceManagerService for MediaCodec
410  */
411 struct MediaCodec::ResourceManagerServiceProxy :
412     public std::enable_shared_from_this<ResourceManagerServiceProxy> {
413 
414     // BinderDiedContext defines the cookie that is passed as DeathRecipient.
415     // Since this can maintain more context than a raw pointer, we can
416     // validate the scope of ResourceManagerServiceProxy,
417     // before deferencing it upon the binder death.
418     struct BinderDiedContext {
419         std::weak_ptr<ResourceManagerServiceProxy> mRMServiceProxy;
420     };
421 
422     ResourceManagerServiceProxy(pid_t pid, uid_t uid,
423             const std::shared_ptr<IResourceManagerClient> &client);
424     ~ResourceManagerServiceProxy();
425     status_t init();
426     void addResource(const MediaResourceParcel &resource);
427     void addResource(const std::vector<MediaResourceParcel>& resources);
428     void updateResource(const std::vector<MediaResourceParcel>& resources);
429     void removeResource(const MediaResourceParcel &resource);
430     void removeResource(const std::vector<MediaResourceParcel>& resources);
431     void removeClient();
432     void markClientForPendingRemoval();
433     bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
434     void notifyClientCreated();
435     void notifyClientStarted(ClientConfigParcel& clientConfig);
436     void notifyClientStopped(ClientConfigParcel& clientConfig);
437     void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
438 
setCodecNameandroid::MediaCodec::ResourceManagerServiceProxy439     inline void setCodecName(const char* name) {
440         mCodecName = name;
441     }
442 
setImportanceandroid::MediaCodec::ResourceManagerServiceProxy443     inline void setImportance(int importance) {
444         mImportance = importance;
445     }
446 
447 private:
448     // To get the binder interface to ResourceManagerService.
getServiceandroid::MediaCodec::ResourceManagerServiceProxy449     void getService() {
450         std::scoped_lock lock{mLock};
451         getService_l();
452     }
453 
454     std::shared_ptr<IResourceManagerService> getService_l();
455 
456     // To add/register all the resources currently added/registered with
457     // the ResourceManagerService.
458     // This function will be called right after the death of the Resource
459     // Manager to make sure that the newly started ResourceManagerService
460     // knows about the current resource usage.
461     void reRegisterAllResources_l();
462 
deinitandroid::MediaCodec::ResourceManagerServiceProxy463     void deinit() {
464         std::scoped_lock lock{mLock};
465         // Unregistering from DeathRecipient notification.
466         if (mService != nullptr) {
467             AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
468             mService = nullptr;
469         }
470     }
471 
472     // For binder death handling
473     static void BinderDiedCallback(void* cookie);
474     static void BinderUnlinkedCallback(void* cookie);
475 
binderDiedandroid::MediaCodec::ResourceManagerServiceProxy476     void binderDied() {
477         std::scoped_lock lock{mLock};
478         ALOGE("ResourceManagerService died.");
479         mService = nullptr;
480         mBinderDied = true;
481         // start an async operation that will reconnect with the RM and
482         // re-registers all the resources.
483         mGetServiceFuture = std::async(std::launch::async, [this] { getService(); });
484     }
485 
486     /**
487      * Get the ClientInfo to communicate with the ResourceManager.
488      *
489      * ClientInfo includes:
490      *   - {pid, uid} of the process
491      *   - identifier for the client
492      *   - name of the client/codec
493      *   - importance associated with the client
494      */
getClientInfoandroid::MediaCodec::ResourceManagerServiceProxy495     inline ClientInfoParcel getClientInfo() const {
496         ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
497                                     .uid = static_cast<int32_t>(mUid),
498                                     .id = getId(mClient),
499                                     .name = mCodecName,
500                                     .importance = mImportance};
501         return clientInfo;
502     }
503 
504 private:
505     std::mutex  mLock;
506     bool        mBinderDied = false;
507     pid_t       mPid;
508     uid_t       mUid;
509     int         mImportance = 0;
510     std::string mCodecName;
511     /**
512      * Reconnecting with the ResourceManagerService, after its binder interface dies,
513      * is done asynchronously. It will also make sure that, all the resources
514      * asssociated with this Proxy (MediaCodec) is added with the new instance
515      * of the ResourceManagerService to persist the state of resources.
516      * We must store the reference of the furture to guarantee real asynchronous operation.
517      */
518     std::future<void> mGetServiceFuture;
519     // To maintain the list of all the resources currently added/registered with
520     // the ResourceManagerService.
521     std::set<MediaResourceParcel> mMediaResourceParcel;
522     std::shared_ptr<IResourceManagerClient> mClient;
523     ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
524     std::shared_ptr<IResourceManagerService> mService;
525     BinderDiedContext* mCookie;
526 };
527 
ResourceManagerServiceProxy(pid_t pid,uid_t uid,const std::shared_ptr<IResourceManagerClient> & client)528 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
529         pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client) :
530     mPid(pid), mUid(uid), mClient(client),
531     mDeathRecipient(::ndk::ScopedAIBinder_DeathRecipient(
532             AIBinder_DeathRecipient_new(BinderDiedCallback))),
533     mCookie(nullptr) {
534     if (mUid == MediaCodec::kNoUid) {
535         mUid = AIBinder_getCallingUid();
536     }
537     if (mPid == MediaCodec::kNoPid) {
538         mPid = AIBinder_getCallingPid();
539     }
540     // Setting callback notification when DeathRecipient gets deleted.
541     AIBinder_DeathRecipient_setOnUnlinked(mDeathRecipient.get(), BinderUnlinkedCallback);
542 }
543 
~ResourceManagerServiceProxy()544 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
545     deinit();
546 }
547 
init()548 status_t MediaCodec::ResourceManagerServiceProxy::init() {
549     std::scoped_lock lock{mLock};
550 
551     int callerPid = AIBinder_getCallingPid();
552     int callerUid = AIBinder_getCallingUid();
553 
554     if (mPid != callerPid || mUid != callerUid) {
555         // Media processes don't need special permissions to act on behalf of other processes.
556         if (callerUid != AID_MEDIA) {
557             char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
558             if (!checkCallingPermission(String16(permission))) {
559                 ALOGW("%s is required to override the caller's PID for media resource management.",
560                         permission);
561                 return PERMISSION_DENIED;
562             }
563         }
564     }
565 
566     mService = getService_l();
567     if (mService == nullptr) {
568         return DEAD_OBJECT;
569     }
570 
571     // Kill clients pending removal.
572     mService->reclaimResourcesFromClientsPendingRemoval(mPid);
573     return OK;
574 }
575 
getService_l()576 std::shared_ptr<IResourceManagerService> MediaCodec::ResourceManagerServiceProxy::getService_l() {
577     if (mService != nullptr) {
578         return mService;
579     }
580 
581     // Get binder interface to resource manager.
582     ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
583     mService = IResourceManagerService::fromBinder(binder);
584     if (mService == nullptr) {
585         ALOGE("Failed to get ResourceManagerService");
586         return mService;
587     }
588 
589     // Create the context that is passed as cookie to the binder death notification.
590     // The context gets deleted at BinderUnlinkedCallback.
591     mCookie = new BinderDiedContext{.mRMServiceProxy = weak_from_this()};
592     // Register for the callbacks by linking to death notification.
593     AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), mCookie);
594 
595     // If the RM was restarted, re-register all the resources.
596     if (mBinderDied) {
597         reRegisterAllResources_l();
598         mBinderDied = false;
599     }
600     return mService;
601 }
602 
reRegisterAllResources_l()603 void MediaCodec::ResourceManagerServiceProxy::reRegisterAllResources_l() {
604     if (mMediaResourceParcel.empty()) {
605         ALOGV("No resources to add");
606         return;
607     }
608 
609     if (mService == nullptr) {
610         ALOGW("Service isn't available");
611         return;
612     }
613 
614     std::vector<MediaResourceParcel> resources;
615     std::copy(mMediaResourceParcel.begin(), mMediaResourceParcel.end(),
616               std::back_inserter(resources));
617     mService->addResource(getClientInfo(), mClient, resources);
618 }
619 
BinderDiedCallback(void * cookie)620 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
621     BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
622 
623     // Validate the context and check if the ResourceManagerServiceProxy object is still in scope.
624     if (context != nullptr) {
625         std::shared_ptr<ResourceManagerServiceProxy> thiz = context->mRMServiceProxy.lock();
626         if (thiz != nullptr) {
627             thiz->binderDied();
628         } else {
629             ALOGI("ResourceManagerServiceProxy is out of scope already");
630         }
631     }
632 }
633 
BinderUnlinkedCallback(void * cookie)634 void MediaCodec::ResourceManagerServiceProxy::BinderUnlinkedCallback(void* cookie) {
635     BinderDiedContext* context = reinterpret_cast<BinderDiedContext*>(cookie);
636     // Since we don't need the context anymore, we are deleting it now.
637     delete context;
638 }
639 
addResource(const MediaResourceParcel & resource)640 void MediaCodec::ResourceManagerServiceProxy::addResource(
641         const MediaResourceParcel &resource) {
642     std::vector<MediaResourceParcel> resources;
643     resources.push_back(resource);
644     addResource(resources);
645 }
646 
addResource(const std::vector<MediaResourceParcel> & resources)647 void MediaCodec::ResourceManagerServiceProxy::addResource(
648         const std::vector<MediaResourceParcel>& resources) {
649     std::scoped_lock lock{mLock};
650     std::shared_ptr<IResourceManagerService> service = getService_l();
651     if (service == nullptr) {
652         ALOGW("Service isn't available");
653         return;
654     }
655     service->addResource(getClientInfo(), mClient, resources);
656     std::copy(resources.begin(), resources.end(),
657               std::inserter(mMediaResourceParcel, mMediaResourceParcel.end()));
658 }
659 
updateResource(const std::vector<MediaResourceParcel> & resources)660 void MediaCodec::ResourceManagerServiceProxy::updateResource(
661         const std::vector<MediaResourceParcel>& resources) {
662     std::scoped_lock lock{mLock};
663     std::shared_ptr<IResourceManagerService> service = getService_l();
664     if (service == nullptr) {
665         ALOGW("Service isn't available");
666         return;
667     }
668     service->updateResource(getClientInfo(), resources);
669 }
670 
removeResource(const MediaResourceParcel & resource)671 void MediaCodec::ResourceManagerServiceProxy::removeResource(
672         const MediaResourceParcel &resource) {
673     std::vector<MediaResourceParcel> resources;
674     resources.push_back(resource);
675     removeResource(resources);
676 }
677 
removeResource(const std::vector<MediaResourceParcel> & resources)678 void MediaCodec::ResourceManagerServiceProxy::removeResource(
679         const std::vector<MediaResourceParcel>& resources) {
680     std::scoped_lock lock{mLock};
681     std::shared_ptr<IResourceManagerService> service = getService_l();
682     if (service == nullptr) {
683         ALOGW("Service isn't available");
684         return;
685     }
686     service->removeResource(getClientInfo(), resources);
687     for (const MediaResourceParcel& resource : resources) {
688         mMediaResourceParcel.erase(resource);
689     }
690 }
691 
removeClient()692 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
693     std::scoped_lock lock{mLock};
694     std::shared_ptr<IResourceManagerService> service = getService_l();
695     if (service == nullptr) {
696         ALOGW("Service isn't available");
697         return;
698     }
699     service->removeClient(getClientInfo());
700     mMediaResourceParcel.clear();
701 }
702 
markClientForPendingRemoval()703 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
704     std::scoped_lock lock{mLock};
705     std::shared_ptr<IResourceManagerService> service = getService_l();
706     if (service == nullptr) {
707         ALOGW("Service isn't available");
708         return;
709     }
710     service->markClientForPendingRemoval(getClientInfo());
711     mMediaResourceParcel.clear();
712 }
713 
reclaimResource(const std::vector<MediaResourceParcel> & resources)714 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
715         const std::vector<MediaResourceParcel> &resources) {
716     std::scoped_lock lock{mLock};
717     std::shared_ptr<IResourceManagerService> service = getService_l();
718     if (service == nullptr) {
719         ALOGW("Service isn't available");
720         return false;
721     }
722     bool success;
723     Status status = service->reclaimResource(getClientInfo(), resources, &success);
724     return status.isOk() && success;
725 }
726 
notifyClientCreated()727 void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
728     std::scoped_lock lock{mLock};
729     std::shared_ptr<IResourceManagerService> service = getService_l();
730     if (service == nullptr) {
731         ALOGW("Service isn't available");
732         return;
733     }
734     service->notifyClientCreated(getClientInfo());
735 }
736 
notifyClientStarted(ClientConfigParcel & clientConfig)737 void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
738         ClientConfigParcel& clientConfig) {
739     std::scoped_lock lock{mLock};
740     std::shared_ptr<IResourceManagerService> service = getService_l();
741     if (service == nullptr) {
742         ALOGW("Service isn't available");
743         return;
744     }
745     clientConfig.clientInfo = getClientInfo();
746     service->notifyClientStarted(clientConfig);
747 }
748 
notifyClientStopped(ClientConfigParcel & clientConfig)749 void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
750         ClientConfigParcel& clientConfig) {
751     std::scoped_lock lock{mLock};
752     std::shared_ptr<IResourceManagerService> service = getService_l();
753     if (service == nullptr) {
754         ALOGW("Service isn't available");
755         return;
756     }
757     clientConfig.clientInfo = getClientInfo();
758     service->notifyClientStopped(clientConfig);
759 }
760 
notifyClientConfigChanged(ClientConfigParcel & clientConfig)761 void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
762         ClientConfigParcel& clientConfig) {
763     std::scoped_lock lock{mLock};
764     std::shared_ptr<IResourceManagerService> service = getService_l();
765     if (service == nullptr) {
766         ALOGW("Service isn't available");
767         return;
768     }
769     clientConfig.clientInfo = getClientInfo();
770     service->notifyClientConfigChanged(clientConfig);
771 }
772 
773 ////////////////////////////////////////////////////////////////////////////////
774 
BufferInfo()775 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
776 
777 ////////////////////////////////////////////////////////////////////////////////
778 
779 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
780 class MediaCodec::ReleaseSurface {
781     public:
ReleaseSurface(uint64_t usage)782         explicit ReleaseSurface(uint64_t usage) {
783             std::tie(mConsumer, mSurface) = BufferItemConsumer::create(usage);
784 
785             struct FrameAvailableListener : public BufferItemConsumer::FrameAvailableListener {
786                 FrameAvailableListener(const sp<BufferItemConsumer> &consumer) {
787                     mConsumer = consumer;
788                 }
789                 void onFrameAvailable(const BufferItem&) override {
790                     BufferItem buffer;
791                     // consume buffer
792                     sp<BufferItemConsumer> consumer = mConsumer.promote();
793                     if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
794                         consumer->releaseBuffer(buffer.mGraphicBuffer, buffer.mFence);
795                     }
796                 }
797 
798                 wp<BufferItemConsumer> mConsumer;
799             };
800             mFrameAvailableListener = sp<FrameAvailableListener>::make(mConsumer);
801             mConsumer->setFrameAvailableListener(mFrameAvailableListener);
802             mConsumer->setName(String8{"MediaCodec.release"});
803         }
804 
getSurface()805         const sp<Surface> &getSurface() {
806             return mSurface;
807         }
808 
809     private:
810         sp<BufferItemConsumer> mConsumer;
811         sp<Surface> mSurface;
812         sp<BufferItemConsumer::FrameAvailableListener> mFrameAvailableListener;
813     };
814 #else
815 class MediaCodec::ReleaseSurface {
816 public:
ReleaseSurface(uint64_t usage)817     explicit ReleaseSurface(uint64_t usage) {
818         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
819         mSurface = sp<Surface>::make(mProducer, false /* controlledByApp */);
820         struct ConsumerListener : public IConsumerListener {
821             ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
822                 mConsumer = consumer;
823             }
824             void onFrameAvailable(const BufferItem&) override {
825                 BufferItem buffer;
826                 // consume buffer
827                 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
828                 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
829                     consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber, buffer.mFence);
830                 }
831             }
832 
833             wp<IGraphicBufferConsumer> mConsumer;
834             void onBuffersReleased() override {}
835             void onSidebandStreamChanged() override {}
836         };
837         sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
838         mConsumer->consumerConnect(listener, false);
839         mConsumer->setConsumerName(String8{"MediaCodec.release"});
840         mConsumer->setConsumerUsageBits(usage);
841     }
842 
getSurface()843     const sp<Surface> &getSurface() {
844         return mSurface;
845     }
846 
847 private:
848     sp<IGraphicBufferProducer> mProducer;
849     sp<IGraphicBufferConsumer> mConsumer;
850     sp<Surface> mSurface;
851 };
852 #endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_MEDIA_MIGRATION)
853 
854 ////////////////////////////////////////////////////////////////////////////////
855 
856 namespace {
857 
858 enum {
859     kWhatFillThisBuffer      = 'fill',
860     kWhatDrainThisBuffer     = 'drai',
861     kWhatEOS                 = 'eos ',
862     kWhatStartCompleted      = 'Scom',
863     kWhatStopCompleted       = 'scom',
864     kWhatReleaseCompleted    = 'rcom',
865     kWhatFlushCompleted      = 'fcom',
866     kWhatError               = 'erro',
867     kWhatCryptoError         = 'ercp',
868     kWhatComponentAllocated  = 'cAll',
869     kWhatComponentConfigured = 'cCon',
870     kWhatInputSurfaceCreated = 'isfc',
871     kWhatInputSurfaceAccepted = 'isfa',
872     kWhatSignaledInputEOS    = 'seos',
873     kWhatOutputFramesRendered = 'outR',
874     kWhatOutputBuffersChanged = 'outC',
875     kWhatFirstTunnelFrameReady = 'ftfR',
876     kWhatPollForRenderedBuffers = 'plrb',
877     kWhatMetricsUpdated      = 'mtru',
878     kWhatRequiredResourcesChanged = 'reqR',
879 };
880 
881 class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
882 public:
883 
CryptoAsyncCallback(const sp<AMessage> & notify)884     explicit CryptoAsyncCallback(const sp<AMessage> & notify):mNotify(notify) {
885     }
886 
~CryptoAsyncCallback()887     ~CryptoAsyncCallback() {}
888 
onDecryptComplete(const sp<AMessage> & result)889     void onDecryptComplete(const sp<AMessage> &result) override {
890         (void)result;
891     }
892 
onDecryptError(const std::list<sp<AMessage>> & errorMsgs)893     void onDecryptError(const std::list<sp<AMessage>> &errorMsgs) override {
894         // This error may be decrypt/queue error.
895         status_t errorCode ;
896         for (auto &emsg : errorMsgs) {
897              sp<AMessage> notify(mNotify->dup());
898              if(emsg->findInt32("err", &errorCode)) {
899                  if (isCryptoError(errorCode)) {
900                      notify->setInt32("what", kWhatCryptoError);
901                  } else {
902                      notify->setInt32("what", kWhatError);
903                  }
904                  notify->extend(emsg);
905                  notify->post();
906              } else {
907                  ALOGW("Buffers with no errorCode are not expected");
908              }
909         }
910     }
911 private:
912     const sp<AMessage> mNotify;
913 };
914 
915 class OnBufferReleasedListener : public ::android::SurfaceListener{
916 private:
917     uint32_t mGeneration;
918     std::weak_ptr<BufferChannelBase> mBufferChannel;
919 
notifyBufferReleased()920     void notifyBufferReleased() {
921         auto p = mBufferChannel.lock();
922         if (p) {
923             p->onBufferReleasedFromOutputSurface(mGeneration);
924         }
925     }
926 
notifyBufferAttached()927     void notifyBufferAttached() {
928         auto p = mBufferChannel.lock();
929         if (p) {
930             p->onBufferAttachedToOutputSurface(mGeneration);
931         }
932     }
933 
934 public:
OnBufferReleasedListener(uint32_t generation,const std::shared_ptr<BufferChannelBase> & bufferChannel)935     explicit OnBufferReleasedListener(
936             uint32_t generation,
937             const std::shared_ptr<BufferChannelBase> &bufferChannel)
938             : mGeneration(generation), mBufferChannel(bufferChannel) {}
939 
940     virtual ~OnBufferReleasedListener() = default;
941 
onBufferReleased()942     void onBufferReleased() override {
943         notifyBufferReleased();
944     }
945 
onBuffersDiscarded(const std::vector<sp<GraphicBuffer>> & buffers)946     void onBuffersDiscarded([[maybe_unused]] const std::vector<sp<GraphicBuffer>>& buffers)
947         override { }
948 
onBufferDetached(int slot)949     void onBufferDetached([[maybe_unused]] int slot) override {
950         notifyBufferReleased();
951     }
952 
needsReleaseNotify()953     bool needsReleaseNotify() override { return true; }
954 
955 #if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(BQ_CONSUMER_ATTACH_CALLBACK)
onBufferAttached()956     void onBufferAttached() override {
957         notifyBufferAttached();
958     }
959 
needsAttachNotify()960     bool needsAttachNotify() override { return true; }
961 #endif
962 };
963 
964 class BufferCallback : public CodecBase::BufferCallback {
965 public:
966     explicit BufferCallback(const sp<AMessage> &notify);
967     virtual ~BufferCallback() = default;
968 
969     virtual void onInputBufferAvailable(
970             size_t index, const sp<MediaCodecBuffer> &buffer) override;
971     virtual void onOutputBufferAvailable(
972             size_t index, const sp<MediaCodecBuffer> &buffer) override;
973 private:
974     const sp<AMessage> mNotify;
975 };
976 
BufferCallback(const sp<AMessage> & notify)977 BufferCallback::BufferCallback(const sp<AMessage> &notify)
978     : mNotify(notify) {}
979 
onInputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)980 void BufferCallback::onInputBufferAvailable(
981         size_t index, const sp<MediaCodecBuffer> &buffer) {
982     sp<AMessage> notify(mNotify->dup());
983     notify->setInt32("what", kWhatFillThisBuffer);
984     notify->setSize("index", index);
985     notify->setObject("buffer", buffer);
986     notify->post();
987 }
988 
onOutputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)989 void BufferCallback::onOutputBufferAvailable(
990         size_t index, const sp<MediaCodecBuffer> &buffer) {
991     sp<AMessage> notify(mNotify->dup());
992     notify->setInt32("what", kWhatDrainThisBuffer);
993     notify->setSize("index", index);
994     notify->setObject("buffer", buffer);
995     notify->post();
996 }
997 
998 class CodecCallback : public CodecBase::CodecCallback {
999 public:
1000     explicit CodecCallback(const sp<AMessage> &notify);
1001     virtual ~CodecCallback() = default;
1002 
1003     virtual void onEos(status_t err) override;
1004     virtual void onStartCompleted() override;
1005     virtual void onStopCompleted() override;
1006     virtual void onReleaseCompleted() override;
1007     virtual void onFlushCompleted() override;
1008     virtual void onError(status_t err, enum ActionCode actionCode) override;
1009     virtual void onComponentAllocated(const char *componentName) override;
1010     virtual void onComponentConfigured(
1011             const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
1012     virtual void onInputSurfaceCreated(
1013             const sp<AMessage> &inputFormat,
1014             const sp<AMessage> &outputFormat,
1015             const sp<BufferProducerWrapper> &inputSurface) override;
1016     virtual void onInputSurfaceCreationFailed(status_t err) override;
1017     virtual void onInputSurfaceAccepted(
1018             const sp<AMessage> &inputFormat,
1019             const sp<AMessage> &outputFormat) override;
1020     virtual void onInputSurfaceDeclined(status_t err) override;
1021     virtual void onSignaledInputEOS(status_t err) override;
1022     virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
1023     virtual void onOutputBuffersChanged() override;
1024     virtual void onFirstTunnelFrameReady() override;
1025     virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
1026     virtual void onRequiredResourcesChanged() override;
1027 private:
1028     const sp<AMessage> mNotify;
1029 };
1030 
CodecCallback(const sp<AMessage> & notify)1031 CodecCallback::CodecCallback(const sp<AMessage> &notify) : mNotify(notify) {}
1032 
onEos(status_t err)1033 void CodecCallback::onEos(status_t err) {
1034     sp<AMessage> notify(mNotify->dup());
1035     notify->setInt32("what", kWhatEOS);
1036     notify->setInt32("err", err);
1037     notify->post();
1038 }
1039 
onStartCompleted()1040 void CodecCallback::onStartCompleted() {
1041     sp<AMessage> notify(mNotify->dup());
1042     notify->setInt32("what", kWhatStartCompleted);
1043     notify->post();
1044 }
1045 
onStopCompleted()1046 void CodecCallback::onStopCompleted() {
1047     sp<AMessage> notify(mNotify->dup());
1048     notify->setInt32("what", kWhatStopCompleted);
1049     notify->post();
1050 }
1051 
onReleaseCompleted()1052 void CodecCallback::onReleaseCompleted() {
1053     sp<AMessage> notify(mNotify->dup());
1054     notify->setInt32("what", kWhatReleaseCompleted);
1055     notify->post();
1056 }
1057 
onFlushCompleted()1058 void CodecCallback::onFlushCompleted() {
1059     sp<AMessage> notify(mNotify->dup());
1060     notify->setInt32("what", kWhatFlushCompleted);
1061     notify->post();
1062 }
1063 
onError(status_t err,enum ActionCode actionCode)1064 void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
1065     sp<AMessage> notify(mNotify->dup());
1066     notify->setInt32("what", kWhatError);
1067     notify->setInt32("err", err);
1068     notify->setInt32("actionCode", actionCode);
1069     notify->post();
1070 }
1071 
onComponentAllocated(const char * componentName)1072 void CodecCallback::onComponentAllocated(const char *componentName) {
1073     sp<AMessage> notify(mNotify->dup());
1074     notify->setInt32("what", kWhatComponentAllocated);
1075     notify->setString("componentName", componentName);
1076     notify->post();
1077 }
1078 
onComponentConfigured(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)1079 void CodecCallback::onComponentConfigured(
1080         const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
1081     sp<AMessage> notify(mNotify->dup());
1082     notify->setInt32("what", kWhatComponentConfigured);
1083     notify->setMessage("input-format", inputFormat);
1084     notify->setMessage("output-format", outputFormat);
1085     notify->post();
1086 }
1087 
onInputSurfaceCreated(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat,const sp<BufferProducerWrapper> & inputSurface)1088 void CodecCallback::onInputSurfaceCreated(
1089         const sp<AMessage> &inputFormat,
1090         const sp<AMessage> &outputFormat,
1091         const sp<BufferProducerWrapper> &inputSurface) {
1092     sp<AMessage> notify(mNotify->dup());
1093     notify->setInt32("what", kWhatInputSurfaceCreated);
1094     notify->setMessage("input-format", inputFormat);
1095     notify->setMessage("output-format", outputFormat);
1096     notify->setObject("input-surface", inputSurface);
1097     notify->post();
1098 }
1099 
onInputSurfaceCreationFailed(status_t err)1100 void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
1101     sp<AMessage> notify(mNotify->dup());
1102     notify->setInt32("what", kWhatInputSurfaceCreated);
1103     notify->setInt32("err", err);
1104     notify->post();
1105 }
1106 
onInputSurfaceAccepted(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)1107 void CodecCallback::onInputSurfaceAccepted(
1108         const sp<AMessage> &inputFormat,
1109         const sp<AMessage> &outputFormat) {
1110     sp<AMessage> notify(mNotify->dup());
1111     notify->setInt32("what", kWhatInputSurfaceAccepted);
1112     notify->setMessage("input-format", inputFormat);
1113     notify->setMessage("output-format", outputFormat);
1114     notify->post();
1115 }
1116 
onInputSurfaceDeclined(status_t err)1117 void CodecCallback::onInputSurfaceDeclined(status_t err) {
1118     sp<AMessage> notify(mNotify->dup());
1119     notify->setInt32("what", kWhatInputSurfaceAccepted);
1120     notify->setInt32("err", err);
1121     notify->post();
1122 }
1123 
onSignaledInputEOS(status_t err)1124 void CodecCallback::onSignaledInputEOS(status_t err) {
1125     sp<AMessage> notify(mNotify->dup());
1126     notify->setInt32("what", kWhatSignaledInputEOS);
1127     if (err != OK) {
1128         notify->setInt32("err", err);
1129     }
1130     notify->post();
1131 }
1132 
onOutputFramesRendered(const std::list<RenderedFrameInfo> & done)1133 void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
1134     sp<AMessage> notify(mNotify->dup());
1135     notify->setInt32("what", kWhatOutputFramesRendered);
1136     if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
1137         notify->post();
1138     }
1139 }
1140 
onOutputBuffersChanged()1141 void CodecCallback::onOutputBuffersChanged() {
1142     sp<AMessage> notify(mNotify->dup());
1143     notify->setInt32("what", kWhatOutputBuffersChanged);
1144     notify->post();
1145 }
1146 
onFirstTunnelFrameReady()1147 void CodecCallback::onFirstTunnelFrameReady() {
1148     sp<AMessage> notify(mNotify->dup());
1149     notify->setInt32("what", kWhatFirstTunnelFrameReady);
1150     notify->post();
1151 }
1152 
onMetricsUpdated(const sp<AMessage> & updatedMetrics)1153 void CodecCallback::onMetricsUpdated(const sp<AMessage> &updatedMetrics) {
1154     sp<AMessage> notify(mNotify->dup());
1155     notify->setInt32("what", kWhatMetricsUpdated);
1156     notify->setMessage("updated-metrics", updatedMetrics);
1157     notify->post();
1158 }
1159 
onRequiredResourcesChanged()1160 void CodecCallback::onRequiredResourcesChanged() {
1161     sp<AMessage> notify(mNotify->dup());
1162     notify->setInt32("what", kWhatRequiredResourcesChanged);
1163     notify->post();
1164 }
1165 
toMediaResourceSubType(bool isHardware,MediaCodec::Domain domain)1166 static MediaResourceSubType toMediaResourceSubType(bool isHardware, MediaCodec::Domain domain) {
1167     switch (domain) {
1168     case MediaCodec::DOMAIN_VIDEO:
1169         return isHardware? MediaResourceSubType::kHwVideoCodec :
1170                            MediaResourceSubType::kSwVideoCodec;
1171     case MediaCodec::DOMAIN_AUDIO:
1172         return isHardware? MediaResourceSubType::kHwAudioCodec :
1173                            MediaResourceSubType::kSwAudioCodec;
1174     case MediaCodec::DOMAIN_IMAGE:
1175         return isHardware? MediaResourceSubType::kHwImageCodec :
1176                            MediaResourceSubType::kSwImageCodec;
1177     default:
1178         return MediaResourceSubType::kUnspecifiedSubType;
1179     }
1180 }
1181 
toCodecMode(MediaCodec::Domain domain)1182 static const char * toCodecMode(MediaCodec::Domain domain) {
1183     switch (domain) {
1184         case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
1185         case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
1186         case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
1187         default:                       return kCodecModeUnknown;
1188     }
1189 }
1190 
1191 }  // namespace
1192 
1193 ////////////////////////////////////////////////////////////////////////////////
1194 
1195 // static
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid)1196 sp<MediaCodec> MediaCodec::CreateByType(
1197         const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
1198         uid_t uid) {
1199     sp<AMessage> format;
1200     return CreateByType(looper, mime, encoder, err, pid, uid, format);
1201 }
1202 
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid,sp<AMessage> format)1203 sp<MediaCodec> MediaCodec::CreateByType(
1204         const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
1205         uid_t uid, sp<AMessage> format) {
1206     Vector<AString> matchingCodecs;
1207 
1208     MediaCodecList::findMatchingCodecs(
1209             mime.c_str(),
1210             encoder,
1211             0,
1212             format,
1213             &matchingCodecs);
1214 
1215     if (err != NULL) {
1216         *err = NAME_NOT_FOUND;
1217     }
1218     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
1219         sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
1220         AString componentName = matchingCodecs[i];
1221         status_t ret = codec->init(componentName);
1222         if (err != NULL) {
1223             *err = ret;
1224         }
1225         if (ret == OK) {
1226             return codec;
1227         }
1228         ALOGD("Allocating component '%s' failed (%d), try next one.",
1229                 componentName.c_str(), ret);
1230     }
1231     return NULL;
1232 }
1233 
1234 // static
CreateByComponentName(const sp<ALooper> & looper,const AString & name,status_t * err,pid_t pid,uid_t uid)1235 sp<MediaCodec> MediaCodec::CreateByComponentName(
1236         const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
1237     sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
1238 
1239     const status_t ret = codec->init(name);
1240     if (err != NULL) {
1241         *err = ret;
1242     }
1243     return ret == OK ? codec : NULL; // NULL deallocates codec.
1244 }
1245 
1246 // static
CreatePersistentInputSurface()1247 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
1248     sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
1249     if (pluginSurface != nullptr) {
1250         return pluginSurface;
1251     }
1252 
1253     OMXClient client;
1254     if (client.connect() != OK) {
1255         ALOGE("Failed to connect to OMX to create persistent input surface.");
1256         return NULL;
1257     }
1258 
1259     sp<IOMX> omx = client.interface();
1260 
1261     sp<IGraphicBufferProducer> bufferProducer;
1262     sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
1263 
1264     status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
1265 
1266     if (err != OK) {
1267         ALOGE("Failed to create persistent input surface.");
1268         return NULL;
1269     }
1270 
1271     return new PersistentSurface(bufferProducer, bufferSource);
1272 }
1273 
getResourceType(const std::string & resourceName)1274 inline MediaResourceType getResourceType(const std::string& resourceName) {
1275     // Extract id from the resource name ==> resource name = "componentStoreName-id"
1276     std::size_t pos = resourceName.rfind("-");
1277     if (pos != std::string::npos) {
1278         return static_cast<MediaResourceType>(std::atoi(resourceName.substr(pos).c_str()));
1279     }
1280 
1281     ALOGE("Resource ID missing in resource Name: [%s]!", resourceName.c_str());
1282     return MediaResourceType::kUnspecified;
1283 }
1284 
1285 /**
1286  * Get the float/integer value associated with the given key.
1287  *
1288  * If no such key is found, it will return false without updating
1289  * the value.
1290  */
getValueFor(const sp<AMessage> & msg,const char * key,float * value)1291 static bool getValueFor(const sp<AMessage>& msg,
1292                         const char* key,
1293                         float* value) {
1294     if (msg->findFloat(key, value)) {
1295         return true;
1296     }
1297 
1298     int32_t intValue = 0;
1299     if (msg->findInt32(key, &intValue)) {
1300         *value = (float)intValue;
1301         return true;
1302     }
1303 
1304     return false;
1305 }
1306 
1307 /*
1308  * Use operating frame rate for per frame resource calculation as below:
1309  * - Check if operating-rate is available. If so, use it.
1310  * - If its encoder and if we have capture-rate, use that as frame rate.
1311  * - Else, check if frame-rate is available. If so, use it.
1312  * - Else, use the default value.
1313  *
1314  * NOTE: This function is called with format that could be:
1315  *   - format used to configure the codec
1316  *   - codec's input format
1317  *   - codec's output format
1318  *
1319  * Some of the key's may not be present in either input or output format or
1320  * both.
1321  * For example, "capture-rate", this is currently only used in configure format.
1322  *
1323  * For encoders, in rare cases, we would expect "operating-rate" to be set
1324  * for high-speed capture and it's only used during configuration.
1325  */
getOperatingFrameRate(const sp<AMessage> & format,float defaultFrameRate,bool isEncoder)1326 static float getOperatingFrameRate(const sp<AMessage>& format,
1327                                    float defaultFrameRate,
1328                                    bool isEncoder) {
1329     float operatingRate = 0;
1330     if (getValueFor(format, "operating-rate", &operatingRate)) {
1331         // Use operating rate to convert per-frame resources into a whole.
1332         return operatingRate;
1333     }
1334 
1335     float captureRate = 0;
1336     if (isEncoder && getValueFor(format, "capture-rate", &captureRate)) {
1337         // Use capture rate to convert per-frame resources into a whole.
1338         return captureRate;
1339     }
1340 
1341     // Otherwise use frame-rate (or fallback to the default framerate passed)
1342     float frameRate = defaultFrameRate;
1343     getValueFor(format, "frame-rate", &frameRate);
1344     return frameRate;
1345 }
1346 
getMediaResourceParcel(const InstanceResourceInfo & resourceInfo)1347 inline MediaResourceParcel getMediaResourceParcel(const InstanceResourceInfo& resourceInfo) {
1348     MediaResourceParcel resource;
1349     resource.type = getResourceType(resourceInfo.mName);
1350     resource.value = resourceInfo.mStaticCount;
1351     return resource;
1352 }
1353 
updateResourceUsage(const std::vector<InstanceResourceInfo> & oldResources,const std::vector<InstanceResourceInfo> & newResources)1354 void MediaCodec::updateResourceUsage(
1355         const std::vector<InstanceResourceInfo>& oldResources,
1356         const std::vector<InstanceResourceInfo>& newResources) {
1357     std::vector<MediaResourceParcel> resources;
1358 
1359     // Add all the new resources first.
1360     for (const InstanceResourceInfo& resource : newResources) {
1361         resources.push_back(getMediaResourceParcel(resource));
1362     }
1363 
1364     // Look for resources that aren't required anymore.
1365     for (const InstanceResourceInfo& oldRes : oldResources) {
1366         auto found = std::find_if(newResources.begin(),
1367                                   newResources.end(),
1368                                   [oldRes](const InstanceResourceInfo& newRes) {
1369                                       return oldRes.mName == newRes.mName; });
1370 
1371         // If this old resource isn't found in updated resources, that means its
1372         // not required anymore.
1373         // Set the count to 0, so that it will be removed from the RM.
1374         if (found == newResources.end()) {
1375             MediaResourceParcel res = getMediaResourceParcel(oldRes);
1376             res.value = 0;
1377             resources.push_back(res);
1378         }
1379     }
1380 
1381     // update/notify the RM about change in resource usage.
1382     if (!resources.empty()) {
1383         mResourceManagerProxy->updateResource(resources);
1384     }
1385 }
1386 
getRequiredSystemResources()1387 bool MediaCodec::getRequiredSystemResources() {
1388     bool success = false;
1389     std::vector<InstanceResourceInfo> oldResources;
1390     std::vector<InstanceResourceInfo> newResources;
1391 
1392     if (android::media::codec::codec_availability() &&
1393         android::media::codec::codec_availability_support()) {
1394         Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(
1395                 mRequiredResourceInfo);
1396         // Make a copy of the previous required resources, if there were any.
1397         oldResources = *resourcesLocked;
1398         // Get the required system resources now.
1399         newResources = mCodec->getRequiredSystemResources();
1400         // Update the dynamic resource usage with the current operating frame-rate.
1401         newResources = computeDynamicResources(newResources);
1402         *resourcesLocked = newResources;
1403         success  = !newResources.empty();
1404     }
1405 
1406     // Since the required resources has been updated/changed,
1407     // we should update/notify the RM with the updated usage.
1408     if (!oldResources.empty()) {
1409         updateResourceUsage(oldResources, newResources);
1410     }
1411     return success;
1412 }
1413 
1414 /**
1415  * Convert per frame/input/output resources into static_count
1416  *
1417  * TODO: (girishshetty): In the future, change InstanceResourceInfo to hold:
1418  * - resource type (const, per frame, per input/output)
1419  * - resource count
1420  */
computeDynamicResources(const std::vector<InstanceResourceInfo> & inResources)1421 std::vector<InstanceResourceInfo> MediaCodec::computeDynamicResources(
1422         const std::vector<InstanceResourceInfo>& inResources) {
1423     std::vector<InstanceResourceInfo> dynamicResources;
1424     for (const InstanceResourceInfo& resource : inResources) {
1425         // If mStaticCount isn't 0, nothing to be changed because effectively this is a union.
1426         if (resource.mStaticCount != 0) {
1427             dynamicResources.push_back(resource);
1428             continue;
1429         }
1430         if (resource.mPerFrameCount != 0) {
1431             uint64_t staticCount = resource.mPerFrameCount * mFrameRate;
1432             // We are tracking everything as static count here. So set per frame count to 0.
1433             dynamicResources.emplace_back(resource.mName, staticCount, 0);
1434         }
1435         // TODO: (girishshetty): Add per input/output resource conversion here.
1436     }
1437 
1438     return dynamicResources;
1439 }
1440 
1441 //static
getGloballyAvailableResources(std::vector<GlobalResourceInfo> & resources)1442 status_t MediaCodec::getGloballyAvailableResources(std::vector<GlobalResourceInfo>& resources) {
1443     resources.clear();
1444     // Make sure codec availability feature is on.
1445     if (!android::media::codec::codec_availability() ||
1446         !android::media::codec::codec_availability_support()) {
1447         return ERROR_UNSUPPORTED;
1448     }
1449 
1450     // Get binder interface to resource manager.
1451     ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
1452     std::shared_ptr<IResourceManagerService> service = IResourceManagerService::fromBinder(binder);
1453     if (service == nullptr) {
1454         ALOGE("Failed to get ResourceManagerService");
1455         return INVALID_OPERATION;
1456     }
1457 
1458     // Since Codec availability is implemented only through C2 hal,
1459     // we aren't tracking (accounting for) any resources for omx based codecs.
1460     // But:
1461     // - if c2 component store is implemented, then we can query for the system resource
1462     // - if no such hal, resources will be empty and this function returns with
1463     // ERROR_UNSUPPORTED
1464     resources = CCodec::GetGloballyAvailableResources();
1465     if (!resources.empty()) {
1466         // Get the current resource usage.
1467         std::vector<MediaResourceParcel> currentResourceUsage;
1468         service->getMediaResourceUsageReport(&currentResourceUsage);
1469 
1470         // Subtract the current usage from the globally available resources.
1471         for (GlobalResourceInfo& res : resources) {
1472             MediaResourceType type = getResourceType(res.mName);
1473             auto used = std::find_if(currentResourceUsage.begin(),
1474                                      currentResourceUsage.end(),
1475                                      [type](const MediaResourceParcel& item) {
1476                                          return item.type == type; });
1477 
1478             if (used != currentResourceUsage.end() && used->value > 0) {
1479                 // Exclude the used resources.
1480                 if (res.mAvailable < used->value) {
1481                     ALOGW("%s: Resources used (%jd) is more than the Resource Capacity (%jd)!",
1482                           __func__, used->value, res.mAvailable);
1483                     res.mAvailable = 0;
1484                 } else {
1485                     res.mAvailable -= used->value;
1486                 }
1487             }
1488         }
1489 
1490         return OK;
1491     }
1492 
1493     return ERROR_UNSUPPORTED;
1494 }
1495 
1496 // GenerateCodecId generates a 64bit Random ID for each codec that is created.
1497 // The Codec ID is generated as:
1498 //   - A process-unique random high 32bits
1499 //   - An atomic sequence low 32bits
1500 //
GenerateCodecId()1501 static uint64_t GenerateCodecId() {
1502     static std::atomic_uint64_t sId = [] {
1503         std::random_device rd;
1504         std::mt19937 gen(rd());
1505         std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
1506         uint32_t randomID = distrib(gen);
1507         uint64_t id = randomID;
1508         return id << 32;
1509     }();
1510     return sId++;
1511 }
1512 
MediaCodec(const sp<ALooper> & looper,pid_t pid,uid_t uid,std::function<sp<CodecBase> (const AString &,const char *)> getCodecBase,std::function<status_t (const AString &,sp<MediaCodecInfo> *)> getCodecInfo)1513 MediaCodec::MediaCodec(
1514         const sp<ALooper> &looper, pid_t pid, uid_t uid,
1515         std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
1516         std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
1517     : mState(UNINITIALIZED),
1518       mReleasedByResourceManager(false),
1519       mLooper(looper),
1520       mCodec(NULL),
1521       mReplyID(0),
1522       mFlags(0),
1523       mStickyError(OK),
1524       mSoftRenderer(NULL),
1525       mDomain(DOMAIN_UNKNOWN),
1526       mWidth(0),
1527       mHeight(0),
1528       mRotationDegrees(0),
1529       mDequeueInputTimeoutGeneration(0),
1530       mDequeueInputReplyID(0),
1531       mDequeueOutputTimeoutGeneration(0),
1532       mDequeueOutputReplyID(0),
1533       mTunneledInputWidth(0),
1534       mTunneledInputHeight(0),
1535       mTunneled(false),
1536       mTunnelPeekState(TunnelPeekState::kLegacyMode),
1537       mTunnelPeekEnabled(false),
1538       mHaveInputSurface(false),
1539       mHavePendingInputBuffers(false),
1540       mCpuBoostRequested(false),
1541       mIsSurfaceToDisplay(false),
1542       mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
1543       mVideoRenderQualityTracker(
1544               VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
1545                       GetServerConfigurableFlag)),
1546       mLatencyUnknown(0),
1547       mBytesEncoded(0),
1548       mEarliestEncodedPtsUs(INT64_MAX),
1549       mLatestEncodedPtsUs(INT64_MIN),
1550       mFramesEncoded(0),
1551       mNumLowLatencyEnables(0),
1552       mNumLowLatencyDisables(0),
1553       mIsLowLatencyModeOn(false),
1554       mIndexOfFirstFrameWhenLowLatencyOn(-1),
1555       mInputBufferCounter(0),
1556       mGetCodecBase(getCodecBase),
1557       mGetCodecInfo(getCodecInfo) {
1558     mCodecId = GenerateCodecId();
1559     mResourceManagerProxy = std::make_shared<ResourceManagerServiceProxy>(pid, uid,
1560             ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
1561     if (!mGetCodecBase) {
1562         mGetCodecBase = [](const AString &name, const char *owner) {
1563             return GetCodecBase(name, owner);
1564         };
1565     }
1566     if (!mGetCodecInfo) {
1567         mGetCodecInfo = [&log = mErrorLog](const AString &name,
1568                                            sp<MediaCodecInfo> *info) -> status_t {
1569             *info = nullptr;
1570             const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1571             if (!mcl) {
1572                 log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
1573                 return NO_INIT;  // if called from Java should raise IOException
1574             }
1575             AString tmp = name;
1576             if (tmp.endsWith(".secure")) {
1577                 tmp.erase(tmp.size() - 7, 7);
1578             }
1579             for (const AString &codecName : { name, tmp }) {
1580                 ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
1581                 if (codecIdx < 0) {
1582                     continue;
1583                 }
1584                 *info = mcl->getCodecInfo(codecIdx);
1585                 return OK;
1586             }
1587             log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
1588                                   name.c_str()));
1589             return NAME_NOT_FOUND;
1590         };
1591     }
1592 
1593     // we want an empty metrics record for any early getMetrics() call
1594     // this should be the *only* initMediametrics() call that's not on the Looper thread
1595     initMediametrics();
1596 }
1597 
~MediaCodec()1598 MediaCodec::~MediaCodec() {
1599     CHECK_EQ(mState, UNINITIALIZED);
1600     mResourceManagerProxy->removeClient();
1601 
1602     flushMediametrics();  // this deletes mMetricsHandle
1603     // don't keep the last metrics handle around
1604     if (mLastMetricsHandle != 0) {
1605         mediametrics_delete(mLastMetricsHandle);
1606         mLastMetricsHandle = 0;
1607     }
1608 
1609     // clean any saved metrics info we stored as part of configure()
1610     if (mConfigureMsg != nullptr) {
1611         mediametrics_handle_t metricsHandle;
1612         if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
1613             mediametrics_delete(metricsHandle);
1614         }
1615     }
1616 }
1617 
1618 // except for in constructor, called from the looper thread (and therefore not mutexed)
initMediametrics()1619 void MediaCodec::initMediametrics() {
1620     if (mMetricsHandle == 0) {
1621         mMetricsHandle = mediametrics_create(kCodecKeyName);
1622     }
1623 
1624     mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1625 
1626     {
1627         Mutex::Autolock al(mRecentLock);
1628         for (int i = 0; i<kRecentLatencyFrames; i++) {
1629             mRecentSamples[i] = kRecentSampleInvalid;
1630         }
1631         mRecentHead = 0;
1632     }
1633 
1634     {
1635         Mutex::Autolock al(mLatencyLock);
1636         mBuffersInFlight.clear();
1637         mNumLowLatencyEnables = 0;
1638         mNumLowLatencyDisables = 0;
1639         mIsLowLatencyModeOn = false;
1640         mIndexOfFirstFrameWhenLowLatencyOn = -1;
1641         mInputBufferCounter = 0;
1642     }
1643 
1644     mSubsessionCount = 0;
1645     mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
1646     resetMetricsFields();
1647 }
1648 
resetMetricsFields()1649 void MediaCodec::resetMetricsFields() {
1650     mHdrInfoFlags = 0;
1651 
1652     mApiUsageMetrics = ApiUsageMetrics();
1653     mReliabilityContextMetrics = ReliabilityContextMetrics();
1654 }
1655 
1656 // always called from the looper thread (and therefore not mutexed)
resetSubsessionMetricsFields()1657 void MediaCodec::resetSubsessionMetricsFields() {
1658     mBytesEncoded = 0;
1659     mFramesEncoded = 0;
1660     mFramesInput = 0;
1661     mBytesInput = 0;
1662     mEarliestEncodedPtsUs = INT64_MAX;
1663     mLatestEncodedPtsUs = INT64_MIN;
1664 }
1665 
1666 // always called from the looper thread
updateMediametrics()1667 void MediaCodec::updateMediametrics() {
1668     if (mMetricsHandle == 0) {
1669         ALOGV("no metrics handle found");
1670         return;
1671     }
1672 
1673     Mutex::Autolock _lock(mMetricsLock);
1674 
1675     mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
1676     mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
1677             ((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
1678                     : ApiUsageMetrics::kAsynchronousMode)
1679             : ApiUsageMetrics::kSynchronousMode;
1680     mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
1681     mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
1682             mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
1683 
1684     mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
1685             mApiUsageMetrics.inputBufferSize.appMax);
1686     mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
1687             mApiUsageMetrics.inputBufferSize.usedMax);
1688     mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
1689             mApiUsageMetrics.inputBufferSize.codecMax);
1690 
1691     mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
1692     mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
1693             mReliabilityContextMetrics.setOutputSurfaceCount);
1694     mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
1695             mReliabilityContextMetrics.resolutionChangeCount);
1696 
1697     // Video rendering quality metrics
1698     {
1699         const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
1700         if (m.frameReleasedCount > 0) {
1701             mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
1702             mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
1703             mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
1704             mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
1705             mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
1706             mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
1707             mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
1708             mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
1709             mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
1710         }
1711         if (m.freezeDurationMsHistogram.getCount() >= 1) {
1712             const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
1713             mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
1714             mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
1715             mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
1716             mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
1717             mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
1718             mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
1719             mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
1720                                    h.emitBuckets());
1721         }
1722         if (m.freezeDistanceMsHistogram.getCount() >= 1) {
1723             const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
1724             mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
1725             mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
1726             mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
1727                                    h.emitBuckets());
1728         }
1729         if (m.judderScoreHistogram.getCount() >= 1) {
1730             const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
1731             mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
1732             mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
1733             mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
1734             mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
1735             mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
1736             mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
1737             mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
1738                                    h.emitBuckets());
1739         }
1740         if (m.freezeEventCount != 0) {
1741             mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
1742         }
1743         if (m.judderEventCount != 0) {
1744             mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
1745         }
1746     }
1747 
1748     if (mLatencyHist.getCount() != 0 ) {
1749         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
1750         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
1751         mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
1752         mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
1753 
1754         if (kEmitHistogram) {
1755             // and the histogram itself
1756             std::string hist = mLatencyHist.emit();
1757             mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
1758         }
1759     }
1760     if (mLatencyUnknown > 0) {
1761         mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
1762     }
1763     int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
1764     if (playbackDurationSec > 0) {
1765         mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
1766     }
1767     if (mLifetimeStartNs > 0) {
1768         nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
1769         lifetime = lifetime / (1000 * 1000);    // emitted in ms, truncated not rounded
1770         mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
1771     }
1772 
1773     if (mBytesEncoded) {
1774         Mutex::Autolock al(mOutputStatsLock);
1775 
1776         mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
1777         int64_t duration = 0;
1778         if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
1779             duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
1780         }
1781         mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
1782         mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
1783         mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
1784         mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
1785     }
1786 
1787     {
1788         Mutex::Autolock al(mLatencyLock);
1789         mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
1790         mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
1791         mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
1792                               mIndexOfFirstFrameWhenLowLatencyOn);
1793     }
1794 
1795 #if 0
1796     // enable for short term, only while debugging
1797     updateEphemeralMediametrics(mMetricsHandle);
1798 #endif
1799 }
1800 
updateHdrMetrics(bool isConfig)1801 void MediaCodec::updateHdrMetrics(bool isConfig) {
1802     if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
1803         return;
1804     }
1805 
1806     int32_t colorStandard = -1;
1807     if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
1808         mediametrics_setInt32(mMetricsHandle,
1809                 isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
1810     }
1811     int32_t colorRange = -1;
1812     if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
1813         mediametrics_setInt32(mMetricsHandle,
1814                 isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
1815     }
1816     int32_t colorTransfer = -1;
1817     if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
1818         mediametrics_setInt32(mMetricsHandle,
1819                 isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
1820     }
1821     HDRStaticInfo info;
1822     if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
1823             && ColorUtils::isHDRStaticInfoValid(&info)) {
1824         mHdrInfoFlags |= kFlagHasHdrStaticInfo;
1825     }
1826     mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
1827             (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
1828     sp<ABuffer> hdr10PlusInfo;
1829     if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
1830             && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
1831         mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
1832     }
1833     mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
1834             (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
1835 
1836     // hdr format
1837     sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
1838 
1839     AString mime;
1840     int32_t profile = -1;
1841 
1842     if (codedFormat->findString("mime", &mime)
1843             && codedFormat->findInt32(KEY_PROFILE, &profile)
1844             && colorTransfer != -1) {
1845         hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
1846         mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
1847     }
1848 }
1849 
getHdrFormat(const AString & mime,const int32_t profile,const int32_t colorTransfer)1850 hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
1851         const int32_t colorTransfer) {
1852     return (mFlags & kFlagIsEncoder)
1853             ? getHdrFormatForEncoder(mime, profile, colorTransfer)
1854             : getHdrFormatForDecoder(mime, profile, colorTransfer);
1855 }
1856 
getHdrFormatForEncoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1857 hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
1858         const int32_t colorTransfer) {
1859     switch (colorTransfer) {
1860         case COLOR_TRANSFER_ST2084:
1861             if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1862                 switch (profile) {
1863                     case VP9Profile2HDR:
1864                         return HDR_FORMAT_HDR10;
1865                     case VP9Profile2HDR10Plus:
1866                         return HDR_FORMAT_HDR10PLUS;
1867                     default:
1868                         return HDR_FORMAT_NONE;
1869                 }
1870             } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1871                 switch (profile) {
1872                     case AV1ProfileMain10HDR10:
1873                         return HDR_FORMAT_HDR10;
1874                     case AV1ProfileMain10HDR10Plus:
1875                         return HDR_FORMAT_HDR10PLUS;
1876                     default:
1877                         return HDR_FORMAT_NONE;
1878                 }
1879             } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1880                 switch (profile) {
1881                     case HEVCProfileMain10HDR10:
1882                         return HDR_FORMAT_HDR10;
1883                     case HEVCProfileMain10HDR10Plus:
1884                         return HDR_FORMAT_HDR10PLUS;
1885                     default:
1886                         return HDR_FORMAT_NONE;
1887                 }
1888             } else {
1889                 return HDR_FORMAT_NONE;
1890             }
1891         case COLOR_TRANSFER_HLG:
1892             if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1893                 return HDR_FORMAT_HLG;
1894             } else {
1895                 // TODO: DOLBY format
1896                 return HDR_FORMAT_NONE;
1897             }
1898         default:
1899             return HDR_FORMAT_NONE;
1900     }
1901 }
1902 
getHdrFormatForDecoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1903 hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
1904         const int32_t colorTransfer) {
1905     switch (colorTransfer) {
1906         case COLOR_TRANSFER_ST2084:
1907             if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
1908                 return HDR_FORMAT_NONE;
1909             }
1910             return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
1911         case COLOR_TRANSFER_HLG:
1912             if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1913                 return HDR_FORMAT_HLG;
1914             }
1915             // TODO: DOLBY format
1916     }
1917     return HDR_FORMAT_NONE;
1918 }
1919 
profileSupport10Bits(const AString & mime,const int32_t profile)1920 bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
1921     if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1922         return true;
1923     } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1924         switch (profile) {
1925             case VP9Profile2:
1926             case VP9Profile3:
1927             case VP9Profile2HDR:
1928             case VP9Profile3HDR:
1929             case VP9Profile2HDR10Plus:
1930             case VP9Profile3HDR10Plus:
1931                 return true;
1932         }
1933     } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1934         switch (profile) {
1935             case HEVCProfileMain10:
1936             case HEVCProfileMain10HDR10:
1937             case HEVCProfileMain10HDR10Plus:
1938                 return true;
1939         }
1940     }
1941     return false;
1942 }
1943 
1944 
1945 // called to update info being passed back via getMetrics(), which is a
1946 // unique copy for that call, no concurrent access worries.
updateEphemeralMediametrics(mediametrics_handle_t item)1947 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
1948     ALOGD("MediaCodec::updateEphemeralMediametrics()");
1949 
1950     if (item == 0) {
1951         return;
1952     }
1953 
1954     // build an empty histogram
1955     MediaHistogram<int64_t> recentHist;
1956     recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1957 
1958     // stuff it with the samples in the ring buffer
1959     {
1960         Mutex::Autolock al(mRecentLock);
1961 
1962         for (int i = 0; i < kRecentLatencyFrames; i++) {
1963             if (mRecentSamples[i] != kRecentSampleInvalid) {
1964                 recentHist.insert(mRecentSamples[i]);
1965             }
1966         }
1967     }
1968 
1969     // spit the data (if any) into the supplied analytics record
1970     if (recentHist.getCount() != 0 ) {
1971         mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
1972         mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
1973         mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
1974         mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
1975 
1976         if (kEmitHistogram) {
1977             // and the histogram itself
1978             std::string hist = recentHist.emit();
1979             mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
1980         }
1981     }
1982 }
1983 
emitVector(std::vector<int32_t> vector)1984 static std::string emitVector(std::vector<int32_t> vector) {
1985     std::ostringstream sstr;
1986     for (size_t i = 0; i < vector.size(); ++i) {
1987         if (i != 0) {
1988             sstr << ',';
1989         }
1990         sstr << vector[i];
1991     }
1992     return sstr.str();
1993 }
1994 
reportToMediaMetricsIfValid(const FreezeEvent & e)1995 static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
1996     if (e.valid) {
1997         mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
1998         mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
1999         mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
2000         mediametrics_setInt64(handle, kFreezeEventCount, e.count);
2001         mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
2002         mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
2003         mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
2004                                emitVector(e.details.durationMs));
2005         mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
2006                                emitVector(e.details.distanceMs));
2007         mediametrics_selfRecord(handle);
2008         mediametrics_delete(handle);
2009     }
2010 }
2011 
reportToMediaMetricsIfValid(const JudderEvent & e)2012 static void reportToMediaMetricsIfValid(const JudderEvent &e) {
2013     if (e.valid) {
2014         mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
2015         mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
2016         mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
2017         mediametrics_setInt64(handle, kJudderEventCount, e.count);
2018         mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
2019         mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
2020         mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
2021                                emitVector(e.details.actualRenderDurationUs));
2022         mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
2023                                emitVector(e.details.contentRenderDurationUs));
2024         mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
2025                                emitVector(e.details.distanceMs));
2026         mediametrics_selfRecord(handle);
2027         mediametrics_delete(handle);
2028     }
2029 }
2030 
2031 // except for in destructor, called from the looper thread
flushMediametrics()2032 void MediaCodec::flushMediametrics() {
2033     ALOGV("flushMediametrics");
2034 
2035     // update does its own mutex locking
2036     updateMediametrics();
2037     resetMetricsFields();
2038 
2039     // ensure mutex while we do our own work
2040     Mutex::Autolock _lock(mMetricsLock);
2041     if (mMetricsHandle != 0) {
2042         if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
2043             mediametrics_selfRecord(mMetricsHandle);
2044         }
2045         // keep previous metrics handle for subsequent getMetrics() calls.
2046         // NOTE: There could be multiple error events, each flushing the metrics.
2047         // We keep the last non-empty metrics handle, so getMetrics() in the
2048         // next call will get the latest metrics prior to the errors.
2049         if (mLastMetricsHandle != 0) {
2050             mediametrics_delete(mLastMetricsHandle);
2051         }
2052         mLastMetricsHandle = mMetricsHandle;
2053         mMetricsHandle = 0;
2054     }
2055     // we no longer have anything pending upload
2056     mMetricsToUpload = false;
2057 
2058     // Freeze and judder events are reported separately
2059     reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
2060     reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
2061 }
2062 
updateLowLatency(const sp<AMessage> & msg)2063 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
2064     int32_t lowLatency = 0;
2065     if (msg->findInt32("low-latency", &lowLatency)) {
2066         Mutex::Autolock al(mLatencyLock);
2067         if (lowLatency > 0) {
2068             ++mNumLowLatencyEnables;
2069             // This is just an estimate since low latency mode change happens ONLY at key frame
2070             mIsLowLatencyModeOn = true;
2071         } else if (lowLatency == 0) {
2072             ++mNumLowLatencyDisables;
2073             // This is just an estimate since low latency mode change happens ONLY at key frame
2074             mIsLowLatencyModeOn = false;
2075         }
2076     }
2077 }
2078 
updateCodecImportance(const sp<AMessage> & msg)2079 void MediaCodec::updateCodecImportance(const sp<AMessage>& msg) {
2080     // Update the codec importance.
2081     int32_t importance = 0;
2082     if (msg->findInt32(KEY_IMPORTANCE, &importance)) {
2083         // Ignoring the negative importance.
2084         if (importance >= 0) {
2085             // Notify RM about the change in the importance.
2086             mResourceManagerProxy->setImportance(importance);
2087             ClientConfigParcel clientConfig;
2088             initClientConfigParcel(clientConfig);
2089             mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
2090         }
2091     }
2092 }
2093 
updatePictureProfile(const sp<AMessage> & msg,bool applyDefaultProfile)2094 void MediaCodec::updatePictureProfile(const sp<AMessage>& msg, bool applyDefaultProfile) {
2095     if (!(msg->contains(KEY_PICTURE_PROFILE_HANDLE) || msg->contains(KEY_PICTURE_PROFILE_ID) ||
2096           applyDefaultProfile)) {
2097         return;
2098     }
2099 
2100     sp<IMediaQualityManager> mediaQualityMgr =
2101             waitForDeclaredService<IMediaQualityManager>(String16("media_quality"));
2102     if (mediaQualityMgr == nullptr) {
2103         ALOGE("Media Quality Service not found.");
2104         return;
2105     }
2106 
2107     int64_t pictureProfileHandle;
2108     AString pictureProfileId;
2109 
2110     if (msg->findInt64(KEY_PICTURE_PROFILE_HANDLE, &pictureProfileHandle)) {
2111         binder::Status status =
2112                 mediaQualityMgr->notifyPictureProfileHandleSelection(pictureProfileHandle, 0);
2113         if (!status.isOk()) {
2114             ALOGE("unexpected status when calling "
2115                   "MediaQualityManager.notifyPictureProfileHandleSelection(): %s",
2116                   status.toString8().c_str());
2117         }
2118         msg->setInt64(KEY_PICTURE_PROFILE_HANDLE, pictureProfileHandle);
2119         return;
2120     } else if (msg->findString(KEY_PICTURE_PROFILE_ID, &pictureProfileId)) {
2121         binder::Status status = mediaQualityMgr->getPictureProfileHandleValue(
2122                 String16(pictureProfileId.c_str()), 0, &pictureProfileHandle);
2123         if (status.isOk()) {
2124             if (pictureProfileHandle != -1) {
2125                 msg->setInt64(KEY_PICTURE_PROFILE_HANDLE, pictureProfileHandle);
2126             } else {
2127                 ALOGW("PictureProfileHandle not found for pictureProfileId %s",
2128                       pictureProfileId.c_str());
2129             }
2130         } else {
2131             ALOGE("unexpected status when calling "
2132                   "MediaQualityManager.getPictureProfileHandleValue(): %s",
2133                   status.toString8().c_str());
2134         }
2135         return;
2136     } else {  // applyDefaultProfile
2137         binder::Status status =
2138                 mediaQualityMgr->getDefaultPictureProfileHandleValue(0, &pictureProfileHandle);
2139         if (status.isOk()) {
2140             if (pictureProfileHandle != -1) {
2141                 msg->setInt64(KEY_PICTURE_PROFILE_HANDLE, pictureProfileHandle);
2142             } else {
2143                 ALOGW("Default PictureProfileHandle not found");
2144             }
2145         } else {
2146             ALOGE("unexpected status when calling "
2147                   "MediaQualityManager.getDefaultPictureProfileHandleValue(): %s",
2148                   status.toString8().c_str());
2149         }
2150         return;
2151     }
2152 }
2153 
asString(TunnelPeekState state,const char * default_string)2154 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
2155     switch(state) {
2156         case TunnelPeekState::kLegacyMode:
2157             return "LegacyMode";
2158         case TunnelPeekState::kEnabledNoBuffer:
2159             return "EnabledNoBuffer";
2160         case TunnelPeekState::kDisabledNoBuffer:
2161             return "DisabledNoBuffer";
2162         case TunnelPeekState::kBufferDecoded:
2163             return "BufferDecoded";
2164         case TunnelPeekState::kBufferRendered:
2165             return "BufferRendered";
2166         case TunnelPeekState::kDisabledQueued:
2167             return "DisabledQueued";
2168         case TunnelPeekState::kEnabledQueued:
2169             return "EnabledQueued";
2170         default:
2171             return default_string;
2172     }
2173 }
2174 
updateTunnelPeek(const sp<AMessage> & msg)2175 void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
2176     int32_t tunnelPeek = 0;
2177     if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
2178         return;
2179     }
2180 
2181     TunnelPeekState previousState = mTunnelPeekState;
2182     if(tunnelPeek == 0){
2183         mTunnelPeekEnabled = false;
2184         switch (mTunnelPeekState) {
2185             case TunnelPeekState::kLegacyMode:
2186                 msg->setInt32("android._tunnel-peek-set-legacy", 0);
2187                 [[fallthrough]];
2188             case TunnelPeekState::kEnabledNoBuffer:
2189                 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
2190                 break;
2191             case TunnelPeekState::kEnabledQueued:
2192                 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
2193                 break;
2194             default:
2195                 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
2196                 return;
2197         }
2198     } else {
2199         mTunnelPeekEnabled = true;
2200         switch (mTunnelPeekState) {
2201             case TunnelPeekState::kLegacyMode:
2202                 msg->setInt32("android._tunnel-peek-set-legacy", 0);
2203                 [[fallthrough]];
2204             case TunnelPeekState::kDisabledNoBuffer:
2205                 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
2206                 break;
2207             case TunnelPeekState::kDisabledQueued:
2208                 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
2209                 break;
2210             case TunnelPeekState::kBufferDecoded:
2211                 msg->setInt32("android._trigger-tunnel-peek", 1);
2212                 mTunnelPeekState = TunnelPeekState::kBufferRendered;
2213                 break;
2214             default:
2215                 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
2216                 return;
2217         }
2218     }
2219 
2220     ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
2221 }
2222 
processRenderedFrames(const sp<AMessage> & msg)2223 void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
2224     int what = 0;
2225     msg->findInt32("what", &what);
2226     if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
2227         static bool logged = false;
2228         if (!logged) {
2229             logged = true;
2230             ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
2231         }
2232         return;
2233     }
2234     // Rendered frames only matter if they're being sent to the display
2235     if (mIsSurfaceToDisplay) {
2236         int64_t renderTimeNs;
2237         for (size_t index = 0;
2238             msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
2239             index++) {
2240             // Capture metrics for playback duration
2241             mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
2242             // Capture metrics for quality
2243             int64_t mediaTimeUs = 0;
2244             if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
2245                 ALOGE("processRenderedFrames: no media time found");
2246                 continue;
2247             }
2248             // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
2249             // rendered frame.
2250             if (!mTunneled || mediaTimeUs != INT64_MAX) {
2251                 FreezeEvent freezeEvent;
2252                 JudderEvent judderEvent;
2253                 mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
2254                                                            &judderEvent);
2255                 reportToMediaMetricsIfValid(freezeEvent);
2256                 reportToMediaMetricsIfValid(judderEvent);
2257             }
2258         }
2259     }
2260 }
2261 
2262 // when we send a buffer to the codec;
statsBufferSent(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)2263 void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
2264 
2265     // only enqueue if we have a legitimate time
2266     if (presentationUs <= 0) {
2267         ALOGV("presentation time: %" PRId64, presentationUs);
2268         return;
2269     }
2270 
2271     if (mBatteryChecker != nullptr) {
2272         mBatteryChecker->onCodecActivity([this] () {
2273             mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
2274         });
2275     }
2276 
2277     // NOTE: these were erroneously restricted to video encoders, but we want them for all
2278     // codecs.
2279     if (android::media::codec::provider_->subsession_metrics()
2280             || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
2281         mBytesInput += buffer->size();
2282         mFramesInput++;
2283     }
2284 
2285     // mutex access to mBuffersInFlight and other stats
2286     Mutex::Autolock al(mLatencyLock);
2287 
2288     // XXX: we *could* make sure that the time is later than the end of queue
2289     // as part of a consistency check...
2290     if (!mTunneled) {
2291         const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
2292         BufferFlightTiming_t startdata = { presentationUs, nowNs };
2293         mBuffersInFlight.push_back(startdata);
2294     }
2295 
2296     if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
2297         mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
2298     }
2299     ++mInputBufferCounter;
2300 }
2301 
2302 // when we get a buffer back from the codec, always called from the looper thread
statsBufferReceived(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)2303 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
2304 
2305     CHECK_NE(mState, UNINITIALIZED);
2306 
2307     // NOTE: these were erroneously restricted to video encoders, but we want them for all
2308     // codecs.
2309     if (android::media::codec::provider_->subsession_metrics()
2310             || (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder))) {
2311         int32_t flags = 0;
2312         (void) buffer->meta()->findInt32("flags", &flags);
2313 
2314         // some of these frames, we don't want to count
2315         // standalone EOS.... has an invalid timestamp
2316         if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
2317             mBytesEncoded += buffer->size();
2318             mFramesEncoded++;
2319 
2320             Mutex::Autolock al(mOutputStatsLock);
2321             int64_t timeUs = 0;
2322             if (buffer->meta()->findInt64("timeUs", &timeUs)) {
2323                 if (timeUs > mLatestEncodedPtsUs) {
2324                     mLatestEncodedPtsUs = timeUs;
2325                 }
2326                 // can't chain as an else-if or this never triggers
2327                 if (timeUs < mEarliestEncodedPtsUs) {
2328                     mEarliestEncodedPtsUs = timeUs;
2329                 }
2330             }
2331         }
2332     }
2333 
2334     // mutex access to mBuffersInFlight and other stats
2335     Mutex::Autolock al(mLatencyLock);
2336 
2337     // how long this buffer took for the round trip through the codec
2338     // NB: pipelining can/will make these times larger. e.g., if each packet
2339     // is always 2 msec and we have 3 in flight at any given time, we're going to
2340     // see "6 msec" as an answer.
2341 
2342     // ignore stuff with no presentation time
2343     if (presentationUs <= 0) {
2344         ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
2345         mLatencyUnknown++;
2346         return;
2347     }
2348 
2349     if (mBatteryChecker != nullptr) {
2350         mBatteryChecker->onCodecActivity([this] () {
2351             mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource(mIsHardware));
2352         });
2353     }
2354 
2355     BufferFlightTiming_t startdata;
2356     bool valid = false;
2357     while (mBuffersInFlight.size() > 0) {
2358         startdata = *mBuffersInFlight.begin();
2359         ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
2360               startdata.presentationUs, startdata.startedNs);
2361         if (startdata.presentationUs == presentationUs) {
2362             // a match
2363             ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
2364                   startdata.presentationUs, presentationUs);
2365             mBuffersInFlight.pop_front();
2366             valid = true;
2367             break;
2368         } else if (startdata.presentationUs < presentationUs) {
2369             // we must have missed the match for this, drop it and keep looking
2370             ALOGV("--  drop entry for %" PRId64 ", before our frame of %" PRId64,
2371                   startdata.presentationUs, presentationUs);
2372             mBuffersInFlight.pop_front();
2373             continue;
2374         } else {
2375             // head is after, so we don't have a frame for ourselves
2376             ALOGV("--  found entry for %" PRId64 ", AFTER our frame of %" PRId64
2377                   " we have nothing to pair with",
2378                   startdata.presentationUs, presentationUs);
2379             mLatencyUnknown++;
2380             return;
2381         }
2382     }
2383     if (!valid) {
2384         ALOGV("-- empty queue, so ignore that.");
2385         mLatencyUnknown++;
2386         return;
2387     }
2388 
2389     // now start our calculations
2390     const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
2391     int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
2392 
2393     mLatencyHist.insert(latencyUs);
2394 
2395     // push into the recent samples
2396     {
2397         Mutex::Autolock al(mRecentLock);
2398 
2399         if (mRecentHead >= kRecentLatencyFrames) {
2400             mRecentHead = 0;
2401         }
2402         mRecentSamples[mRecentHead++] = latencyUs;
2403     }
2404 }
2405 
discardDecodeOnlyOutputBuffer(size_t index)2406 bool MediaCodec::discardDecodeOnlyOutputBuffer(size_t index) {
2407     Mutex::Autolock al(mBufferLock);
2408     BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
2409     sp<MediaCodecBuffer> buffer = info->mData;
2410     int32_t flags;
2411     CHECK(buffer->meta()->findInt32("flags", &flags));
2412     if (flags & BUFFER_FLAG_DECODE_ONLY) {
2413         ALOGV("discardDecodeOnlyOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
2414         info->mOwnedByClient = false;
2415         info->mData.clear();
2416         mBufferChannel->discardBuffer(buffer);
2417         return true;
2418     }
2419     return false;
2420 }
2421 
2422 // static
PostAndAwaitResponse(const sp<AMessage> & msg,sp<AMessage> * response)2423 status_t MediaCodec::PostAndAwaitResponse(
2424         const sp<AMessage> &msg, sp<AMessage> *response) {
2425     status_t err = msg->postAndAwaitResponse(response);
2426 
2427     if (err != OK) {
2428         return err;
2429     }
2430 
2431     if (!(*response)->findInt32("err", &err)) {
2432         err = OK;
2433     }
2434 
2435     return err;
2436 }
2437 
PostReplyWithError(const sp<AMessage> & msg,int32_t err)2438 void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
2439     sp<AReplyToken> replyID;
2440     CHECK(msg->senderAwaitsResponse(&replyID));
2441     PostReplyWithError(replyID, err);
2442 }
2443 
PostReplyWithError(const sp<AReplyToken> & replyID,int32_t err)2444 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
2445     int32_t finalErr = err;
2446     if (mReleasedByResourceManager) {
2447         // override the err code if MediaCodec has been released by ResourceManager.
2448         finalErr = DEAD_OBJECT;
2449     }
2450 
2451     sp<AMessage> response = new AMessage;
2452     response->setInt32("err", finalErr);
2453     response->postReply(replyID);
2454 }
2455 
CreateCCodec()2456 static CodecBase *CreateCCodec() {
2457     return new CCodec;
2458 }
2459 
2460 //static
GetCodecBase(const AString & name,const char * owner)2461 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
2462     if (owner) {
2463         if (strcmp(owner, "default") == 0) {
2464             return new ACodec;
2465         } else if (strncmp(owner, "codec2", 6) == 0) {
2466             return CreateCCodec();
2467         }
2468     }
2469 
2470     if (name.startsWithIgnoreCase("c2.")) {
2471         return CreateCCodec();
2472     } else if (name.startsWithIgnoreCase("omx.")) {
2473         // at this time only ACodec specifies a mime type.
2474         return new ACodec;
2475     } else {
2476         return NULL;
2477     }
2478 }
2479 
2480 struct CodecListCache {
CodecListCacheandroid::CodecListCache2481     CodecListCache()
2482         : mCodecInfoMap{[] {
2483               const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
2484               size_t count = mcl->countCodecs();
2485               std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
2486               for (size_t i = 0; i < count; ++i) {
2487                   sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
2488                   codecInfoMap.emplace(info->getCodecName(), info);
2489               }
2490               return codecInfoMap;
2491           }()} {
2492     }
2493 
2494     const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
2495 };
2496 
GetCodecListCache()2497 static const CodecListCache &GetCodecListCache() {
2498     static CodecListCache sCache{};
2499     return sCache;
2500 }
2501 
init(const AString & name)2502 status_t MediaCodec::init(const AString &name) {
2503     ScopedTrace trace(ATRACE_TAG, "MediaCodec::Init#native");
2504     status_t err = mResourceManagerProxy->init();
2505     if (err != OK) {
2506         mErrorLog.log(LOG_TAG, base::StringPrintf(
2507                 "Fatal error: failed to initialize ResourceManager (err=%d)", err));
2508         mCodec = NULL; // remove the codec
2509         return err;
2510     }
2511 
2512     // save init parameters for reset
2513     mInitName = name;
2514 
2515     // Current video decoders do not return from OMX_FillThisBuffer
2516     // quickly, violating the OpenMAX specs, until that is remedied
2517     // we need to invest in an extra looper to free the main event
2518     // queue.
2519 
2520     mCodecInfo.clear();
2521 
2522     bool secureCodec = false;
2523     const char *owner = "";
2524     if (!name.startsWith("android.filter.")) {
2525         err = mGetCodecInfo(name, &mCodecInfo);
2526         if (err != OK) {
2527             mErrorLog.log(LOG_TAG, base::StringPrintf(
2528                     "Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
2529             mCodec = NULL;  // remove the codec.
2530             return err;
2531         }
2532         if (mCodecInfo == nullptr) {
2533             mErrorLog.log(LOG_TAG, base::StringPrintf(
2534                     "Getting codec info with name '%s' failed", name.c_str()));
2535             return NAME_NOT_FOUND;
2536         }
2537         secureCodec = name.endsWith(".secure");
2538         Vector<AString> mediaTypes;
2539         mCodecInfo->getSupportedMediaTypes(&mediaTypes);
2540         for (size_t i = 0; i < mediaTypes.size(); ++i) {
2541             if (mediaTypes[i].startsWith("video/")) {
2542                 mDomain = DOMAIN_VIDEO;
2543                 break;
2544             } else if (mediaTypes[i].startsWith("audio/")) {
2545                 mDomain = DOMAIN_AUDIO;
2546                 break;
2547             } else if (mediaTypes[i].startsWith("image/")) {
2548                 mDomain = DOMAIN_IMAGE;
2549                 break;
2550             }
2551         }
2552         owner = mCodecInfo->getOwnerName();
2553     }
2554 
2555     mCodec = mGetCodecBase(name, owner);
2556     if (mCodec == NULL) {
2557         mErrorLog.log(LOG_TAG, base::StringPrintf(
2558                 "Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
2559         return NAME_NOT_FOUND;
2560     }
2561 
2562     if (mDomain == DOMAIN_VIDEO) {
2563         // video codec needs dedicated looper
2564         if (mCodecLooper == NULL) {
2565             status_t err = OK;
2566             mCodecLooper = new ALooper;
2567             mCodecLooper->setName("CodecLooper");
2568             err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
2569             if (OK != err) {
2570                 mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
2571                 return err;
2572             }
2573         }
2574 
2575         mCodecLooper->registerHandler(mCodec);
2576     } else {
2577         mLooper->registerHandler(mCodec);
2578     }
2579 
2580     mLooper->registerHandler(this);
2581 
2582     mCodec->setCallback(
2583             std::unique_ptr<CodecBase::CodecCallback>(
2584                     new CodecCallback(new AMessage(kWhatCodecNotify, this))));
2585     mBufferChannel = mCodec->getBufferChannel();
2586     mBufferChannel->setCallback(
2587             std::unique_ptr<CodecBase::BufferCallback>(
2588                     new BufferCallback(new AMessage(kWhatCodecNotify, this))));
2589     sp<AMessage> msg = new AMessage(kWhatInit, this);
2590     if (mCodecInfo) {
2591         msg->setObject("codecInfo", mCodecInfo);
2592         // name may be different from mCodecInfo->getCodecName() if we stripped
2593         // ".secure"
2594     }
2595     msg->setString("name", name);
2596 
2597     // initial naming setup covers the period before the first call to ::configure().
2598     // after that, we manage this through ::configure() and the setup message.
2599     if (mMetricsHandle != 0) {
2600         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
2601         mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
2602     }
2603 
2604     if (mDomain == DOMAIN_VIDEO) {
2605         mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
2606     }
2607 
2608     // If the ComponentName is not set yet, use the name passed by the user.
2609     if (mComponentName.empty()) {
2610         mIsHardware = !MediaCodecList::isSoftwareCodec(name);
2611         mResourceManagerProxy->setCodecName(name.c_str());
2612     }
2613 
2614     std::vector<MediaResourceParcel> resources;
2615     resources.push_back(MediaResource::CodecResource(secureCodec,
2616                                                      toMediaResourceSubType(mIsHardware, mDomain)));
2617 
2618     for (int i = 0; i <= kMaxRetry; ++i) {
2619         if (i > 0) {
2620             // Don't try to reclaim resource for the first time.
2621             if (!mResourceManagerProxy->reclaimResource(resources)) {
2622                 break;
2623             }
2624         }
2625 
2626         sp<AMessage> response;
2627         err = PostAndAwaitResponse(msg, &response);
2628         if (!isResourceError(err)) {
2629             break;
2630         }
2631     }
2632 
2633     if (OK == err) {
2634         // Notify the ResourceManager that, this codec has been created
2635         // (initialized) successfully.
2636         mResourceManagerProxy->notifyClientCreated();
2637     }
2638     return err;
2639 }
2640 
setCallback(const sp<AMessage> & callback)2641 status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
2642     sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
2643     msg->setMessage("callback", callback);
2644 
2645     sp<AMessage> response;
2646     return PostAndAwaitResponse(msg, &response);
2647 }
2648 
setOnFrameRenderedNotification(const sp<AMessage> & notify)2649 status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> &notify) {
2650     sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2651     msg->setMessage("on-frame-rendered", notify);
2652     return msg->post();
2653 }
2654 
setOnFirstTunnelFrameReadyNotification(const sp<AMessage> & notify)2655 status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> &notify) {
2656     sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2657     msg->setMessage("first-tunnel-frame-ready", notify);
2658     return msg->post();
2659 }
2660 
2661 /*
2662  * MediaFormat Shaping forward declarations
2663  * including the property name we use for control.
2664  */
2665 static int enableMediaFormatShapingDefault = 1;
2666 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
2667 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2668                       bool reverse);
2669 
createMediaMetrics(const sp<AMessage> & format,uint32_t flags,status_t * err)2670 mediametrics_handle_t MediaCodec::createMediaMetrics(const sp<AMessage>& format,
2671                                                      uint32_t flags,
2672                                                      status_t* err) {
2673     *err = OK;
2674     mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
2675     bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
2676 
2677     // TODO: validity check log-session-id: it should be a 32-hex-digit.
2678     format->findString("log-session-id", &mLogSessionId);
2679 
2680     if (nextMetricsHandle != 0) {
2681         mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
2682         int32_t profile = 0;
2683         if (format->findInt32("profile", &profile)) {
2684             mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
2685         }
2686         int32_t level = 0;
2687         if (format->findInt32("level", &level)) {
2688             mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
2689         }
2690         mediametrics_setInt32(nextMetricsHandle, kCodecEncoder, isEncoder);
2691 
2692         if (!mLogSessionId.empty()) {
2693             mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
2694         }
2695 
2696         // moved here from ::init()
2697         mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
2698         mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
2699     }
2700 
2701     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
2702         format->findInt32("width", &mWidth);
2703         format->findInt32("height", &mHeight);
2704         if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
2705             mRotationDegrees = 0;
2706         }
2707         if (nextMetricsHandle != 0) {
2708             mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
2709             mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
2710             mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
2711             int32_t maxWidth = 0;
2712             if (format->findInt32("max-width", &maxWidth)) {
2713                 mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
2714             }
2715             int32_t maxHeight = 0;
2716             if (format->findInt32("max-height", &maxHeight)) {
2717                 mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
2718             }
2719             int32_t colorFormat = -1;
2720             if (format->findInt32("color-format", &colorFormat)) {
2721                 mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
2722             }
2723             int32_t appMaxInputSize = -1;
2724             if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
2725                 mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
2726             }
2727             if (mDomain == DOMAIN_VIDEO) {
2728                 float frameRate = -1.0;
2729                 if (format->findFloat("frame-rate", &frameRate)) {
2730                     mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
2731                 }
2732                 float captureRate = -1.0;
2733                 if (format->findFloat("capture-rate", &captureRate)) {
2734                     mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
2735                 }
2736                 float operatingRate = -1.0;
2737                 if (format->findFloat("operating-rate", &operatingRate)) {
2738                     mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
2739                 }
2740                 int32_t priority = -1;
2741                 if (format->findInt32("priority", &priority)) {
2742                     mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
2743                 }
2744             }
2745         }
2746 
2747         // Prevent possible integer overflow in downstream code.
2748         if (mWidth < 0 || mHeight < 0 ||
2749                (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
2750             mErrorLog.log(LOG_TAG, base::StringPrintf(
2751                     "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
2752             mediametrics_delete(nextMetricsHandle);
2753             // Set the error code and return null handle.
2754             *err = BAD_VALUE;
2755             return 0;
2756         }
2757 
2758     } else {
2759         if (nextMetricsHandle != 0) {
2760             int32_t channelCount;
2761             if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
2762                 mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
2763             }
2764             int32_t sampleRate;
2765             if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
2766                 mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
2767             }
2768         }
2769     }
2770 
2771     if (isEncoder) {
2772         int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
2773                                                  enableMediaFormatShapingDefault);
2774         if (!enableShaping) {
2775             ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
2776             if (nextMetricsHandle != 0) {
2777                 mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
2778             }
2779         } else {
2780             (void) shapeMediaFormat(format, flags, nextMetricsHandle);
2781             // XXX: do we want to do this regardless of shaping enablement?
2782             mapFormat(mComponentName, format, nullptr, false);
2783         }
2784     }
2785 
2786     // push min/max QP to MediaMetrics after shaping
2787     if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
2788         int32_t qpIMin = -1;
2789         if (format->findInt32("video-qp-i-min", &qpIMin)) {
2790             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
2791         }
2792         int32_t qpIMax = -1;
2793         if (format->findInt32("video-qp-i-max", &qpIMax)) {
2794             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
2795         }
2796         int32_t qpPMin = -1;
2797         if (format->findInt32("video-qp-p-min", &qpPMin)) {
2798             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
2799         }
2800         int32_t qpPMax = -1;
2801         if (format->findInt32("video-qp-p-max", &qpPMax)) {
2802             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
2803         }
2804         int32_t qpBMin = -1;
2805         if (format->findInt32("video-qp-b-min", &qpBMin)) {
2806             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
2807         }
2808         int32_t qpBMax = -1;
2809         if (format->findInt32("video-qp-b-max", &qpBMax)) {
2810             mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
2811         }
2812     }
2813 
2814     updateLowLatency(format);
2815 
2816     return nextMetricsHandle;
2817 }
2818 
configure(const sp<AMessage> & format,const sp<Surface> & nativeWindow,const sp<ICrypto> & crypto,uint32_t flags)2819 status_t MediaCodec::configure(
2820         const sp<AMessage> &format,
2821         const sp<Surface> &nativeWindow,
2822         const sp<ICrypto> &crypto,
2823         uint32_t flags) {
2824     return configure(format, nativeWindow, crypto, NULL, flags);
2825 }
2826 
configure(const sp<AMessage> & format,const sp<Surface> & surface,const sp<ICrypto> & crypto,const sp<IDescrambler> & descrambler,uint32_t flags)2827 status_t MediaCodec::configure(
2828         const sp<AMessage> &format,
2829         const sp<Surface> &surface,
2830         const sp<ICrypto> &crypto,
2831         const sp<IDescrambler> &descrambler,
2832         uint32_t flags) {
2833     ScopedTrace trace(ATRACE_TAG, "MediaCodec::configure#native");
2834     // Update the codec importance.
2835     updateCodecImportance(format);
2836 
2837     if (android::media::tv::flags::apply_picture_profiles()) {
2838         updatePictureProfile(format, true /* applyDefaultProfile */);
2839     }
2840 
2841     // Create and set up metrics for this codec.
2842     status_t err = OK;
2843     mediametrics_handle_t nextMetricsHandle = createMediaMetrics(format, flags, &err);
2844     if (err != OK) {
2845         return err;
2846     }
2847 
2848     sp<AMessage> msg = new AMessage(kWhatConfigure, this);
2849     msg->setMessage("format", format);
2850     msg->setInt32("flags", flags);
2851     msg->setObject("surface", surface);
2852 
2853     if (crypto != NULL || descrambler != NULL) {
2854         if (crypto != NULL) {
2855             msg->setPointer("crypto", crypto.get());
2856         } else {
2857             msg->setPointer("descrambler", descrambler.get());
2858         }
2859         if (nextMetricsHandle != 0) {
2860             mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
2861         }
2862     } else if (mFlags & kFlagIsSecure) {
2863         // We'll catch this later when we process the buffers.
2864         ALOGW("Crypto or descrambler should be given for secure codec");
2865     }
2866 
2867     if (mConfigureMsg != nullptr) {
2868         // if re-configuring, we have one of these from before.
2869         // Recover the space before we discard the old mConfigureMsg
2870         mediametrics_handle_t metricsHandle;
2871         if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
2872             mediametrics_delete(metricsHandle);
2873         }
2874     }
2875     msg->setInt64("metrics", nextMetricsHandle);
2876 
2877     // save msg for reset
2878     mConfigureMsg = msg;
2879 
2880     sp<AMessage> callback = mCallback;
2881 
2882     if (mDomain == DOMAIN_VIDEO) {
2883         // Use format to compute initial operating frame rate.
2884         // After the successful configuration (and also possibly when output
2885         // format change notification), this value will be recalculated.
2886         bool isEncoder = (flags & CONFIGURE_FLAG_ENCODE);
2887         mFrameRate = getOperatingFrameRate(format, mFrameRate, isEncoder);
2888     }
2889 
2890     std::vector<MediaResourceParcel> resources;
2891     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
2892             toMediaResourceSubType(mIsHardware, mDomain)));
2893     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
2894         // Don't know the buffer size at this point, but it's fine to use 1 because
2895         // the reclaimResource call doesn't consider the requester's buffer size for now.
2896         resources.push_back(MediaResource::GraphicMemoryResource(1));
2897     }
2898     for (int i = 0; i <= kMaxRetry; ++i) {
2899         sp<AMessage> response;
2900         err = PostAndAwaitResponse(msg, &response);
2901         if (err != OK && err != INVALID_OPERATION) {
2902             if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
2903                 break;
2904             }
2905             // MediaCodec now set state to UNINITIALIZED upon any fatal error.
2906             // To maintain backward-compatibility, do a reset() to put codec
2907             // back into INITIALIZED state.
2908             // But don't reset if the err is INVALID_OPERATION, which means
2909             // the configure failure is due to wrong state.
2910 
2911             ALOGE("configure failed with err 0x%08x, resetting...", err);
2912             status_t err2 = reset();
2913             if (err2 != OK) {
2914                 ALOGE("retrying configure: failed to reset codec (%08x)", err2);
2915                 break;
2916             }
2917             if (callback != nullptr) {
2918                 err2 = setCallback(callback);
2919                 if (err2 != OK) {
2920                     ALOGE("retrying configure: failed to set callback (%08x)", err2);
2921                     break;
2922                 }
2923             }
2924         }
2925         if (!isResourceError(err)) {
2926             break;
2927         }
2928     }
2929 
2930     return err;
2931 }
2932 
getRequiredResources(std::vector<InstanceResourceInfo> & resources)2933 status_t MediaCodec::getRequiredResources(std::vector<InstanceResourceInfo>& resources) {
2934     resources.clear();
2935     // Make sure codec availability feature is on.
2936     if (!android::media::codec::codec_availability() ||
2937         !android::media::codec::codec_availability_support()) {
2938         return ERROR_UNSUPPORTED;
2939     }
2940     // Make sure that the codec was configured already.
2941     if (mState != CONFIGURED && mState != STARTING && mState != STARTED &&
2942         mState != FLUSHING && mState != FLUSHED) {
2943         ALOGE("Codec wasn't configured yet!");
2944         return INVALID_OPERATION;
2945     }
2946 
2947     Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(mRequiredResourceInfo);
2948     if (!(*resourcesLocked).empty()) {
2949         resources = *resourcesLocked;
2950         return OK;
2951     }
2952 
2953     return ERROR_UNSUPPORTED;
2954 }
2955 
2956 // Media Format Shaping support
2957 //
2958 
2959 static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
2960 static bool sIsHandheld = true;
2961 
connectFormatShaper()2962 static bool connectFormatShaper() {
2963     static std::once_flag sCheckOnce;
2964 
2965     ALOGV("connectFormatShaper...");
2966 
2967     std::call_once(sCheckOnce, [&](){
2968 
2969         void *libHandle = NULL;
2970         nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
2971 
2972         // prefer any copy in the mainline module
2973         //
2974         android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
2975         AString libraryName = "libmediaformatshaper.so";
2976 
2977         if (mediaNs != NULL) {
2978             static const android_dlextinfo dlextinfo = {
2979                 .flags = ANDROID_DLEXT_USE_NAMESPACE,
2980                 .library_namespace = mediaNs,
2981             };
2982 
2983             AString libraryMainline = "/apex/com.android.media/";
2984 #if __LP64__
2985             libraryMainline.append("lib64/");
2986 #else
2987             libraryMainline.append("lib/");
2988 #endif
2989             libraryMainline.append(libraryName);
2990 
2991             libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
2992                                                  &dlextinfo);
2993 
2994             if (libHandle != NULL) {
2995                 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2996                                 dlsym(libHandle, "shaper_ops");
2997             } else {
2998                 ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
2999                       libraryMainline.c_str());
3000             }
3001         } else {
3002             ALOGV("connectFormatShaper: couldn't find media namespace.");
3003         }
3004 
3005         // fall back to the system partition, if present.
3006         //
3007         if (sShaperOps == NULL) {
3008 
3009             libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
3010 
3011             if (libHandle != NULL) {
3012                 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
3013                                 dlsym(libHandle, "shaper_ops");
3014             } else {
3015                 ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
3016             }
3017         }
3018 
3019         if (sShaperOps != nullptr
3020             && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
3021             ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
3022                   sShaperOps->version);
3023             sShaperOps = nullptr;
3024         }
3025 
3026         if (sShaperOps != nullptr) {
3027             ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
3028         }
3029 
3030         nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
3031         ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
3032               (loading_finished - loading_started)/1000);
3033 
3034 
3035         // we also want to know whether this is a handheld device
3036         // start with assumption that the device is handheld.
3037         sIsHandheld = true;
3038         sp<IServiceManager> serviceMgr = defaultServiceManager();
3039         sp<content::pm::IPackageManagerNative> packageMgr;
3040         if (serviceMgr.get() != nullptr) {
3041             sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
3042             packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
3043         }
3044         // if we didn't get serviceMgr, we'll leave packageMgr as default null
3045         if (packageMgr != nullptr) {
3046 
3047             // MUST have these
3048             static const String16 featuresNeeded[] = {
3049                 String16("android.hardware.touchscreen")
3050             };
3051             // these must be present to be a handheld
3052             for (::android::String16 required : featuresNeeded) {
3053                 bool hasFeature = false;
3054                 binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
3055                 if (!status.isOk()) {
3056                     ALOGE("%s: hasSystemFeature failed: %s",
3057                         __func__, status.exceptionMessage().c_str());
3058                     continue;
3059                 }
3060                 ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
3061                 if (!hasFeature) {
3062                     ALOGV("... which means we are not handheld");
3063                     sIsHandheld = false;
3064                     break;
3065                 }
3066             }
3067 
3068             // MUST NOT have these
3069             static const String16 featuresDisallowed[] = {
3070                 String16("android.hardware.type.automotive"),
3071                 String16("android.hardware.type.television"),
3072                 String16("android.hardware.type.watch")
3073             };
3074             // any of these present -- we aren't a handheld
3075             for (::android::String16 forbidden : featuresDisallowed) {
3076                 bool hasFeature = false;
3077                 binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
3078                 if (!status.isOk()) {
3079                     ALOGE("%s: hasSystemFeature failed: %s",
3080                         __func__, status.exceptionMessage().c_str());
3081                     continue;
3082                 }
3083                 ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
3084                 if (hasFeature) {
3085                     ALOGV("... which means we are not handheld");
3086                     sIsHandheld = false;
3087                     break;
3088                 }
3089             }
3090         }
3091 
3092     });
3093 
3094     return true;
3095 }
3096 
3097 
3098 #if 0
3099 // a construct to force the above dlopen() to run very early.
3100 // goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
3101 // failure of this means that cold start of those apps is slower by the time to dlopen()
3102 // TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
3103 //
3104 static bool forceEarlyLoadingShaper = connectFormatShaper();
3105 #endif
3106 
3107 // parse the codec's properties: mapping, whether it meets min quality, etc
3108 // and pass them into the video quality code
3109 //
loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,sp<MediaCodecInfo> codecInfo,AString mediaType)3110 static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
3111                                   sp<MediaCodecInfo> codecInfo, AString mediaType) {
3112 
3113     sp<MediaCodecInfo::Capabilities> capabilities =
3114                     codecInfo->getCapabilitiesFor(mediaType.c_str());
3115     if (capabilities == nullptr) {
3116         ALOGI("no capabilities as part of the codec?");
3117     } else {
3118         const sp<AMessage> &details = capabilities->getDetails();
3119         AString mapTarget;
3120         int count = details->countEntries();
3121         for(int ix = 0; ix < count; ix++) {
3122             AMessage::Type entryType;
3123             const char *mapSrc = details->getEntryNameAt(ix, &entryType);
3124             // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
3125             //
3126             static const char *featurePrefix = "feature-";
3127             static const int featurePrefixLen = strlen(featurePrefix);
3128             static const char *tuningPrefix = "tuning-";
3129             static const int tuningPrefixLen = strlen(tuningPrefix);
3130             static const char *mappingPrefix = "mapping-";
3131             static const int mappingPrefixLen = strlen(mappingPrefix);
3132 
3133             if (mapSrc == NULL) {
3134                 continue;
3135             } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
3136                 int32_t intValue;
3137                 if (details->findInt32(mapSrc, &intValue)) {
3138                     ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
3139                     (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
3140                                                    intValue);
3141                 }
3142                 continue;
3143             } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
3144                 AString value;
3145                 if (details->findString(mapSrc, &value)) {
3146                     ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
3147                     (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
3148                                                    value.c_str());
3149                 }
3150                 continue;
3151             } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
3152                 AString target;
3153                 if (details->findString(mapSrc, &target)) {
3154                     ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
3155                           target.c_str());
3156                     // key is really "kind-key"
3157                     // separate that, so setMap() sees the triple  kind, key, value
3158                     const char *kind = &mapSrc[mappingPrefixLen];
3159                     const char *sep = strchr(kind, '-');
3160                     const char *key = sep+1;
3161                     if (sep != NULL) {
3162                          std::string xkind = std::string(kind, sep-kind);
3163                         (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
3164                                                    key, target.c_str());
3165                     }
3166                 }
3167             }
3168         }
3169     }
3170 
3171     // we also carry in the codec description whether we are on a handheld device.
3172     // this info is eventually used by both the Codec and the C2 machinery to inform
3173     // the underlying codec whether to do any shaping.
3174     //
3175     if (sIsHandheld) {
3176         // set if we are indeed a handheld device (or in future 'any eligible device'
3177         // missing on devices that aren't eligible for minimum quality enforcement.
3178         (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
3179         // strictly speaking, it's a tuning, but those are strings and feature stores int
3180         (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
3181     }
3182 }
3183 
setupFormatShaper(AString mediaType)3184 status_t MediaCodec::setupFormatShaper(AString mediaType) {
3185     ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
3186           mComponentName.c_str(), mediaType.c_str());
3187 
3188     nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
3189 
3190     // someone might have beaten us to it.
3191     mediaformatshaper::shaperHandle_t shaperHandle;
3192     shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
3193     if (shaperHandle != nullptr) {
3194         ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
3195         return OK;
3196     }
3197 
3198     // we get to build & register one
3199     shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
3200     if (shaperHandle == nullptr) {
3201         ALOGW("unable to create a shaper for cocodec %s mediaType %s",
3202               mComponentName.c_str(), mediaType.c_str());
3203         return OK;
3204     }
3205 
3206     (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
3207 
3208     shaperHandle = sShaperOps->registerShaper(shaperHandle,
3209                                               mComponentName.c_str(), mediaType.c_str());
3210 
3211     nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
3212     ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
3213           mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
3214 
3215     return OK;
3216 }
3217 
3218 
3219 // Format Shaping
3220 //      Mapping and Manipulation of encoding parameters
3221 //
3222 //      All of these decisions are pushed into the shaper instead of here within MediaCodec.
3223 //      this includes decisions based on whether the codec implements minimum quality bars
3224 //      itself or needs to be shaped outside of the codec.
3225 //      This keeps all those decisions in one place.
3226 //      It also means that we push some extra decision information (is this a handheld device
3227 //      or one that is otherwise eligible for minimum quality manipulation, which generational
3228 //      quality target is in force, etc).  This allows those values to be cached in the
3229 //      per-codec structures that are done 1 time within a process instead of for each
3230 //      codec instantiation.
3231 //
3232 
shapeMediaFormat(const sp<AMessage> & format,uint32_t flags,mediametrics_handle_t metricsHandle)3233 status_t MediaCodec::shapeMediaFormat(
3234             const sp<AMessage> &format,
3235             uint32_t flags,
3236             mediametrics_handle_t metricsHandle) {
3237     ALOGV("shapeMediaFormat entry");
3238 
3239     if (!(flags & CONFIGURE_FLAG_ENCODE)) {
3240         ALOGW("shapeMediaFormat: not encoder");
3241         return OK;
3242     }
3243     if (mCodecInfo == NULL) {
3244         ALOGW("shapeMediaFormat: no codecinfo");
3245         return OK;
3246     }
3247 
3248     AString mediaType;
3249     if (!format->findString("mime", &mediaType)) {
3250         ALOGW("shapeMediaFormat: no mediaType information");
3251         return OK;
3252     }
3253 
3254     // make sure we have the function entry points for the shaper library
3255     //
3256 
3257     connectFormatShaper();
3258     if (sShaperOps == nullptr) {
3259         ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
3260         return OK;
3261     }
3262 
3263     // find the shaper information for this codec+mediaType pair
3264     //
3265     mediaformatshaper::shaperHandle_t shaperHandle;
3266     shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
3267     if (shaperHandle == nullptr)  {
3268         setupFormatShaper(mediaType);
3269         shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
3270     }
3271     if (shaperHandle == nullptr) {
3272         ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
3273               mComponentName.c_str(), mediaType.c_str());
3274         return OK;
3275     }
3276 
3277     // run the shaper
3278     //
3279 
3280     ALOGV("Shaping input: %s", format->debugString(0).c_str());
3281 
3282     sp<AMessage> updatedFormat = format->dup();
3283     AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
3284 
3285     int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
3286     if (result == 0) {
3287         AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
3288 
3289         sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
3290         size_t changeCount = deltas->countEntries();
3291         ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
3292         if (metricsHandle != 0) {
3293             mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
3294         }
3295         if (changeCount > 0) {
3296             if (metricsHandle != 0) {
3297                 // save some old properties before we fold in the new ones
3298                 int32_t bitrate;
3299                 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
3300                     mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
3301                 }
3302                 int32_t qpIMin = -1;
3303                 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
3304                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
3305                 }
3306                 int32_t qpIMax = -1;
3307                 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
3308                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
3309                 }
3310                 int32_t qpPMin = -1;
3311                 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
3312                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
3313                 }
3314                 int32_t qpPMax = -1;
3315                 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
3316                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
3317                 }
3318                  int32_t qpBMin = -1;
3319                 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
3320                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
3321                 }
3322                 int32_t qpBMax = -1;
3323                 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
3324                     mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
3325                 }
3326             }
3327             // NB: for any field in both format and deltas, the deltas copy wins
3328             format->extend(deltas);
3329         }
3330     }
3331 
3332     AMediaFormat_delete(updatedNdkFormat);
3333     return OK;
3334 }
3335 
mapFormat(AString componentName,const sp<AMessage> & format,const char * kind,bool reverse)3336 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
3337                       bool reverse) {
3338     AString mediaType;
3339     if (!format->findString("mime", &mediaType)) {
3340         ALOGV("mapFormat: no mediaType information");
3341         return;
3342     }
3343     ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
3344           mediaType.c_str(), kind ? kind : "<all>", reverse);
3345 
3346     // make sure we have the function entry points for the shaper library
3347     //
3348 
3349 #if 0
3350     // let's play the faster "only do mapping if we've already loaded the library
3351     connectFormatShaper();
3352 #endif
3353     if (sShaperOps == nullptr) {
3354         ALOGV("mapFormat: no MediaFormatShaper hooks available");
3355         return;
3356     }
3357 
3358     // find the shaper information for this codec+mediaType pair
3359     //
3360     mediaformatshaper::shaperHandle_t shaperHandle;
3361     shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
3362     if (shaperHandle == nullptr) {
3363         ALOGV("mapFormat: no shaper handle");
3364         return;
3365     }
3366 
3367     const char **mappings;
3368     if (reverse)
3369         mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
3370     else
3371         mappings = sShaperOps->getMappings(shaperHandle, kind);
3372 
3373     if (mappings == nullptr) {
3374         ALOGV("no mappings returned");
3375         return;
3376     }
3377 
3378     ALOGV("Pre-mapping: %s",  format->debugString(2).c_str());
3379     // do the mapping
3380     //
3381     int entries = format->countEntries();
3382     for (int i = 0; ; i += 2) {
3383         if (mappings[i] == nullptr) {
3384             break;
3385         }
3386 
3387         size_t ix = format->findEntryByName(mappings[i]);
3388         if (ix < entries) {
3389             ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
3390             status_t status = format->setEntryNameAt(ix, mappings[i+1]);
3391             if (status != OK) {
3392                 ALOGW("Unable to map from '%s' to '%s': status %d",
3393                       mappings[i], mappings[i+1], status);
3394             }
3395         }
3396     }
3397     ALOGV("Post-mapping: %s",  format->debugString(2).c_str());
3398 
3399 
3400     // reclaim the mapping memory
3401     for (int i = 0; ; i += 2) {
3402         if (mappings[i] == nullptr) {
3403             break;
3404         }
3405         free((void*)mappings[i]);
3406         free((void*)mappings[i + 1]);
3407     }
3408     free(mappings);
3409     mappings = nullptr;
3410 }
3411 
3412 //
3413 // end of Format Shaping hooks within MediaCodec
3414 //
3415 
releaseCrypto()3416 status_t MediaCodec::releaseCrypto()
3417 {
3418     ALOGV("releaseCrypto");
3419 
3420     sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
3421 
3422     sp<AMessage> response;
3423     status_t status = msg->postAndAwaitResponse(&response);
3424 
3425     if (status == OK && response != NULL) {
3426         CHECK(response->findInt32("status", &status));
3427         ALOGV("releaseCrypto ret: %d ", status);
3428     }
3429     else {
3430         ALOGE("releaseCrypto err: %d", status);
3431     }
3432 
3433     return status;
3434 }
3435 
onReleaseCrypto(const sp<AMessage> & msg)3436 void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
3437 {
3438     status_t status = INVALID_OPERATION;
3439     if (mCrypto != NULL) {
3440         ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
3441         mBufferChannel->setCrypto(NULL);
3442         // TODO change to ALOGV
3443         ALOGD("onReleaseCrypto: [before clear]  mCrypto: %p (%d)",
3444                 mCrypto.get(), mCrypto->getStrongCount());
3445         mCrypto.clear();
3446 
3447         status = OK;
3448     }
3449     else {
3450         ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
3451     }
3452 
3453     sp<AMessage> response = new AMessage;
3454     response->setInt32("status", status);
3455 
3456     sp<AReplyToken> replyID;
3457     CHECK(msg->senderAwaitsResponse(&replyID));
3458     response->postReply(replyID);
3459 }
3460 
setInputSurface(const sp<PersistentSurface> & surface)3461 status_t MediaCodec::setInputSurface(
3462         const sp<PersistentSurface> &surface) {
3463     sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
3464     msg->setObject("input-surface", surface.get());
3465 
3466     sp<AMessage> response;
3467     return PostAndAwaitResponse(msg, &response);
3468 }
3469 
detachOutputSurface()3470 status_t MediaCodec::detachOutputSurface() {
3471     sp<AMessage> msg = new AMessage(kWhatDetachSurface, this);
3472 
3473     sp<AMessage> response;
3474     return PostAndAwaitResponse(msg, &response);
3475 }
3476 
setSurface(const sp<Surface> & surface)3477 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
3478     sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
3479     msg->setObject("surface", surface);
3480 
3481     sp<AMessage> response;
3482     return PostAndAwaitResponse(msg, &response);
3483 }
3484 
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)3485 status_t MediaCodec::createInputSurface(
3486         sp<IGraphicBufferProducer>* bufferProducer) {
3487     sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
3488 
3489     sp<AMessage> response;
3490     status_t err = PostAndAwaitResponse(msg, &response);
3491     if (err == NO_ERROR) {
3492         // unwrap the sp<IGraphicBufferProducer>
3493         sp<RefBase> obj;
3494         bool found = response->findObject("input-surface", &obj);
3495         CHECK(found);
3496         sp<BufferProducerWrapper> wrapper(
3497                 static_cast<BufferProducerWrapper*>(obj.get()));
3498         *bufferProducer = wrapper->getBufferProducer();
3499     } else {
3500         ALOGW("createInputSurface failed, err=%d", err);
3501     }
3502     return err;
3503 }
3504 
getGraphicBufferSize()3505 uint64_t MediaCodec::getGraphicBufferSize() {
3506     if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
3507         return 0;
3508     }
3509 
3510     uint64_t size = 0;
3511     size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
3512     for (size_t i = 0; i < portNum; ++i) {
3513         // TODO: this is just an estimation, we should get the real buffer size from ACodec.
3514         size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
3515     }
3516     return size;
3517 }
3518 
start()3519 status_t MediaCodec::start() {
3520     ScopedTrace trace(ATRACE_TAG, "MediaCodec::start#native");
3521     sp<AMessage> msg = new AMessage(kWhatStart, this);
3522 
3523     sp<AMessage> callback;
3524 
3525     status_t err;
3526     std::vector<MediaResourceParcel> resources;
3527     resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
3528             toMediaResourceSubType(mIsHardware, mDomain)));
3529     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
3530         // Don't know the buffer size at this point, but it's fine to use 1 because
3531         // the reclaimResource call doesn't consider the requester's buffer size for now.
3532         resources.push_back(MediaResource::GraphicMemoryResource(1));
3533     }
3534     for (int i = 0; i <= kMaxRetry; ++i) {
3535         if (i > 0) {
3536             // Don't try to reclaim resource for the first time.
3537             if (!mResourceManagerProxy->reclaimResource(resources)) {
3538                 break;
3539             }
3540             // Recover codec from previous error before retry start.
3541             err = reset();
3542             if (err != OK) {
3543                 ALOGE("retrying start: failed to reset codec");
3544                 break;
3545             }
3546             if (callback != nullptr) {
3547                 err = setCallback(callback);
3548                 if (err != OK) {
3549                     ALOGE("retrying start: failed to set callback");
3550                     break;
3551                 }
3552                 ALOGD("succeed to set callback for reclaim");
3553             }
3554             sp<AMessage> response;
3555             err = PostAndAwaitResponse(mConfigureMsg, &response);
3556             if (err != OK) {
3557                 ALOGE("retrying start: failed to configure codec");
3558                 break;
3559             }
3560         }
3561 
3562         // Keep callback message after the first iteration if necessary.
3563         if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
3564             callback = mCallback;
3565             ALOGD("keep callback message for reclaim");
3566         }
3567 
3568         sp<AMessage> response;
3569         err = PostAndAwaitResponse(msg, &response);
3570         if (!isResourceError(err)) {
3571             break;
3572         }
3573     }
3574     return err;
3575 }
3576 
stop()3577 status_t MediaCodec::stop() {
3578     ScopedTrace trace(ATRACE_TAG, "MediaCodec::stop#native");
3579     sp<AMessage> msg = new AMessage(kWhatStop, this);
3580 
3581     sp<AMessage> response;
3582     return PostAndAwaitResponse(msg, &response);
3583 }
3584 
hasPendingBuffer(int portIndex)3585 bool MediaCodec::hasPendingBuffer(int portIndex) {
3586     return std::any_of(
3587             mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
3588             [](const BufferInfo &info) { return info.mOwnedByClient; });
3589 }
3590 
hasPendingBuffer()3591 bool MediaCodec::hasPendingBuffer() {
3592     return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
3593 }
3594 
reclaim(bool force)3595 status_t MediaCodec::reclaim(bool force) {
3596     ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
3597     sp<AMessage> msg = new AMessage(kWhatRelease, this);
3598     msg->setInt32("reclaimed", 1);
3599     msg->setInt32("force", force ? 1 : 0);
3600 
3601     sp<AMessage> response;
3602     status_t ret = PostAndAwaitResponse(msg, &response);
3603     if (ret == -ENOENT) {
3604         ALOGD("MediaCodec looper is gone, skip reclaim");
3605         ret = OK;
3606     }
3607     return ret;
3608 }
3609 
release()3610 status_t MediaCodec::release() {
3611     sp<AMessage> msg = new AMessage(kWhatRelease, this);
3612     sp<AMessage> response;
3613     return PostAndAwaitResponse(msg, &response);
3614 }
3615 
releaseAsync(const sp<AMessage> & notify)3616 status_t MediaCodec::releaseAsync(const sp<AMessage> &notify) {
3617     sp<AMessage> msg = new AMessage(kWhatRelease, this);
3618     msg->setMessage("async", notify);
3619     sp<AMessage> response;
3620     return PostAndAwaitResponse(msg, &response);
3621 }
3622 
reset()3623 status_t MediaCodec::reset() {
3624     /* When external-facing MediaCodec object is created,
3625        it is already initialized.  Thus, reset is essentially
3626        release() followed by init(), plus clearing the state */
3627     ScopedTrace trace(ATRACE_TAG, "MediaCodec::reset#native");
3628     status_t err = release();
3629 
3630     // unregister handlers
3631     if (mCodec != NULL) {
3632         if (mCodecLooper != NULL) {
3633             mCodecLooper->unregisterHandler(mCodec->id());
3634         } else {
3635             mLooper->unregisterHandler(mCodec->id());
3636         }
3637         mCodec = NULL;
3638     }
3639     mLooper->unregisterHandler(id());
3640 
3641     mFlags = 0;    // clear all flags
3642     mStickyError = OK;
3643 
3644     // reset state not reset by setState(UNINITIALIZED)
3645     mDequeueInputReplyID = 0;
3646     mDequeueOutputReplyID = 0;
3647     mDequeueInputTimeoutGeneration = 0;
3648     mDequeueOutputTimeoutGeneration = 0;
3649     mHaveInputSurface = false;
3650 
3651     if (err == OK) {
3652         err = init(mInitName);
3653     }
3654     return err;
3655 }
3656 
queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)3657 status_t MediaCodec::queueInputBuffer(
3658         size_t index,
3659         size_t offset,
3660         size_t size,
3661         int64_t presentationTimeUs,
3662         uint32_t flags,
3663         AString *errorDetailMsg) {
3664     ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueInputBuffer#native");
3665     if (errorDetailMsg != NULL) {
3666         errorDetailMsg->clear();
3667     }
3668 
3669     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3670     msg->setSize("index", index);
3671     msg->setSize("offset", offset);
3672     msg->setSize("size", size);
3673     msg->setInt64("timeUs", presentationTimeUs);
3674     msg->setInt32("flags", flags);
3675     msg->setPointer("errorDetailMsg", errorDetailMsg);
3676     sp<AMessage> response;
3677     return PostAndAwaitResponse(msg, &response);
3678 }
3679 
queueInputBuffers(size_t index,size_t offset,size_t size,const sp<BufferInfosWrapper> & infos,AString * errorDetailMsg)3680 status_t MediaCodec::queueInputBuffers(
3681         size_t index,
3682         size_t offset,
3683         size_t size,
3684         const sp<BufferInfosWrapper> &infos,
3685         AString *errorDetailMsg) {
3686     ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueInputBuffers#native");
3687     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3688     uint32_t bufferFlags = 0;
3689     uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
3690     uint32_t andFlags = flagsinAllAU;
3691     if (infos == nullptr || infos->value.empty()) {
3692         ALOGE("ERROR: Large Audio frame with no BufferInfo");
3693         return BAD_VALUE;
3694     }
3695     int infoIdx = 0;
3696     std::vector<AccessUnitInfo> &accessUnitInfo = infos->value;
3697     int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
3698     bool foundEndOfStream = false;
3699     for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
3700         bufferFlags |= accessUnitInfo[infoIdx].mFlags;
3701         andFlags &= accessUnitInfo[infoIdx].mFlags;
3702         if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
3703             foundEndOfStream = true;
3704         }
3705     }
3706     bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
3707     if (infoIdx != accessUnitInfo.size()) {
3708         ALOGE("queueInputBuffers has incorrect access-units");
3709         return -EINVAL;
3710     }
3711     msg->setSize("index", index);
3712     msg->setSize("offset", offset);
3713     msg->setSize("size", size);
3714     msg->setInt64("timeUs", minTimeUs);
3715     // Make this represent flags for the entire buffer
3716     // decodeOnly Flag is set only when all buffers are decodeOnly
3717     msg->setInt32("flags", bufferFlags);
3718     msg->setObject("accessUnitInfo", infos);
3719     msg->setPointer("errorDetailMsg", errorDetailMsg);
3720     sp<AMessage> response;
3721     return PostAndAwaitResponse(msg, &response);
3722 }
3723 
queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)3724 status_t MediaCodec::queueSecureInputBuffer(
3725         size_t index,
3726         size_t offset,
3727         const CryptoPlugin::SubSample *subSamples,
3728         size_t numSubSamples,
3729         const uint8_t key[16],
3730         const uint8_t iv[16],
3731         CryptoPlugin::Mode mode,
3732         const CryptoPlugin::Pattern &pattern,
3733         int64_t presentationTimeUs,
3734         uint32_t flags,
3735         AString *errorDetailMsg) {
3736     ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueSecureInputBuffer#native");
3737     if (errorDetailMsg != NULL) {
3738         errorDetailMsg->clear();
3739     }
3740 
3741     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3742     msg->setSize("index", index);
3743     msg->setSize("offset", offset);
3744     msg->setPointer("subSamples", (void *)subSamples);
3745     msg->setSize("numSubSamples", numSubSamples);
3746     msg->setPointer("key", (void *)key);
3747     msg->setPointer("iv", (void *)iv);
3748     msg->setInt32("mode", mode);
3749     msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
3750     msg->setInt32("skipBlocks", pattern.mSkipBlocks);
3751     msg->setInt64("timeUs", presentationTimeUs);
3752     msg->setInt32("flags", flags);
3753     msg->setPointer("errorDetailMsg", errorDetailMsg);
3754 
3755     sp<AMessage> response;
3756     status_t err = PostAndAwaitResponse(msg, &response);
3757 
3758     return err;
3759 }
3760 
queueSecureInputBuffers(size_t index,size_t offset,size_t size,const sp<BufferInfosWrapper> & auInfo,const sp<CryptoInfosWrapper> & cryptoInfos,AString * errorDetailMsg)3761 status_t MediaCodec::queueSecureInputBuffers(
3762         size_t index,
3763         size_t offset,
3764         size_t size,
3765         const sp<BufferInfosWrapper> &auInfo,
3766         const sp<CryptoInfosWrapper> &cryptoInfos,
3767         AString *errorDetailMsg) {
3768     ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueSecureInputBuffers#native");
3769     if (errorDetailMsg != NULL) {
3770         errorDetailMsg->clear();
3771     }
3772     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3773     uint32_t bufferFlags = 0;
3774     uint32_t flagsinAllAU = BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_CODECCONFIG;
3775     uint32_t andFlags = flagsinAllAU;
3776     if (auInfo == nullptr
3777             || auInfo->value.empty()
3778             || cryptoInfos == nullptr
3779             || cryptoInfos->value.empty()) {
3780         ALOGE("ERROR: Large Audio frame with no BufferInfo/CryptoInfo");
3781         return BAD_VALUE;
3782     }
3783     int infoIdx = 0;
3784     std::vector<AccessUnitInfo> &accessUnitInfo = auInfo->value;
3785     int64_t minTimeUs = accessUnitInfo.front().mTimestamp;
3786     bool foundEndOfStream = false;
3787     for ( ; infoIdx < accessUnitInfo.size() && !foundEndOfStream; ++infoIdx) {
3788         bufferFlags |= accessUnitInfo[infoIdx].mFlags;
3789         andFlags &= accessUnitInfo[infoIdx].mFlags;
3790         if (bufferFlags & BUFFER_FLAG_END_OF_STREAM) {
3791             foundEndOfStream = true;
3792         }
3793     }
3794     bufferFlags = bufferFlags & (andFlags | (~flagsinAllAU));
3795     if (infoIdx != accessUnitInfo.size()) {
3796         ALOGE("queueInputBuffers has incorrect access-units");
3797         return -EINVAL;
3798     }
3799     msg->setSize("index", index);
3800     msg->setSize("offset", offset);
3801     msg->setSize("ssize", size);
3802     msg->setInt64("timeUs", minTimeUs);
3803     msg->setInt32("flags", bufferFlags);
3804     msg->setObject("accessUnitInfo", auInfo);
3805     msg->setObject("cryptoInfos", cryptoInfos);
3806     msg->setPointer("errorDetailMsg", errorDetailMsg);
3807 
3808     sp<AMessage> response;
3809     status_t err = PostAndAwaitResponse(msg, &response);
3810 
3811     return err;
3812 }
3813 
queueBuffer(size_t index,const std::shared_ptr<C2Buffer> & buffer,const sp<BufferInfosWrapper> & bufferInfos,const sp<AMessage> & tunings,AString * errorDetailMsg)3814 status_t MediaCodec::queueBuffer(
3815         size_t index,
3816         const std::shared_ptr<C2Buffer> &buffer,
3817         const sp<BufferInfosWrapper> &bufferInfos,
3818         const sp<AMessage> &tunings,
3819         AString *errorDetailMsg) {
3820     ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueBuffer#native");
3821     if (errorDetailMsg != NULL) {
3822         errorDetailMsg->clear();
3823     }
3824     if (bufferInfos == nullptr || bufferInfos->value.empty()) {
3825         return BAD_VALUE;
3826     }
3827     status_t err = OK;
3828     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3829     msg->setSize("index", index);
3830     sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
3831         new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
3832     msg->setObject("c2buffer", obj);
3833     if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
3834         return err;
3835     }
3836     msg->setObject("accessUnitInfo", bufferInfos);
3837     if (tunings && tunings->countEntries() > 0) {
3838         msg->setMessage("tunings", tunings);
3839     }
3840     msg->setPointer("errorDetailMsg", errorDetailMsg);
3841     sp<AMessage> response;
3842     err = PostAndAwaitResponse(msg, &response);
3843 
3844     return err;
3845 }
3846 
queueEncryptedBuffer(size_t index,const sp<hardware::HidlMemory> & buffer,size_t offset,size_t size,const sp<BufferInfosWrapper> & bufferInfos,const sp<CryptoInfosWrapper> & cryptoInfos,const sp<AMessage> & tunings,AString * errorDetailMsg)3847 status_t MediaCodec::queueEncryptedBuffer(
3848         size_t index,
3849         const sp<hardware::HidlMemory> &buffer,
3850         size_t offset,
3851         size_t size,
3852         const sp<BufferInfosWrapper> &bufferInfos,
3853         const sp<CryptoInfosWrapper> &cryptoInfos,
3854         const sp<AMessage> &tunings,
3855         AString *errorDetailMsg) {
3856     ScopedTrace trace(ATRACE_TAG, "MediaCodec::queueEncryptedBuffer#native");
3857     if (errorDetailMsg != NULL) {
3858         errorDetailMsg->clear();
3859     }
3860     if (bufferInfos == nullptr || bufferInfos->value.empty()) {
3861         return BAD_VALUE;
3862     }
3863     status_t err = OK;
3864     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3865     msg->setSize("index", index);
3866     sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
3867         new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
3868     msg->setObject("memory", memory);
3869     msg->setSize("offset", offset);
3870     if (cryptoInfos != nullptr) {
3871         msg->setSize("ssize", size);
3872         msg->setObject("cryptoInfos", cryptoInfos);
3873     } else {
3874         msg->setSize("size", size);
3875     }
3876     msg->setObject("accessUnitInfo", bufferInfos);
3877     if (OK != (err = generateFlagsFromAccessUnitInfo(msg, bufferInfos))) {
3878         return err;
3879     }
3880     if (tunings && tunings->countEntries() > 0) {
3881         msg->setMessage("tunings", tunings);
3882     }
3883     msg->setPointer("errorDetailMsg", errorDetailMsg);
3884 
3885     sp<AMessage> response;
3886     err = PostAndAwaitResponse(msg, &response);
3887 
3888     return err;
3889 }
3890 
dequeueInputBuffer(size_t * index,int64_t timeoutUs)3891 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
3892     sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
3893     msg->setInt64("timeoutUs", timeoutUs);
3894 
3895     sp<AMessage> response;
3896     status_t err;
3897     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3898         return err;
3899     }
3900 
3901     CHECK(response->findSize("index", index));
3902 
3903     return OK;
3904 }
3905 
dequeueOutputBuffer(size_t * index,size_t * offset,size_t * size,int64_t * presentationTimeUs,uint32_t * flags,int64_t timeoutUs)3906 status_t MediaCodec::dequeueOutputBuffer(
3907         size_t *index,
3908         size_t *offset,
3909         size_t *size,
3910         int64_t *presentationTimeUs,
3911         uint32_t *flags,
3912         int64_t timeoutUs) {
3913     ScopedTrace trace(ATRACE_TAG, "MediaCodec::dequeueOutputBuffer#native");
3914     sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
3915     msg->setInt64("timeoutUs", timeoutUs);
3916 
3917     sp<AMessage> response;
3918     status_t err;
3919     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3920         return err;
3921     }
3922 
3923     CHECK(response->findSize("index", index));
3924     CHECK(response->findSize("offset", offset));
3925     CHECK(response->findSize("size", size));
3926     CHECK(response->findInt64("timeUs", presentationTimeUs));
3927     CHECK(response->findInt32("flags", (int32_t *)flags));
3928 
3929     return OK;
3930 }
3931 
renderOutputBufferAndRelease(size_t index)3932 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
3933     ScopedTrace (ATRACE_TAG, "MediaCodec::renderOutputBufferAndRelease#native");
3934     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3935     msg->setSize("index", index);
3936     msg->setInt32("render", true);
3937 
3938     sp<AMessage> response;
3939     return PostAndAwaitResponse(msg, &response);
3940 }
3941 
renderOutputBufferAndRelease(size_t index,int64_t timestampNs)3942 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
3943     ScopedTrace trace(ATRACE_TAG, "MediaCodec::renderOutputBufferAndRelease#native");
3944     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3945     msg->setSize("index", index);
3946     msg->setInt32("render", true);
3947     msg->setInt64("timestampNs", timestampNs);
3948 
3949     sp<AMessage> response;
3950     return PostAndAwaitResponse(msg, &response);
3951 }
3952 
releaseOutputBuffer(size_t index)3953 status_t MediaCodec::releaseOutputBuffer(size_t index) {
3954     ScopedTrace trace(ATRACE_TAG, "MediaCodec::releaseOutputBuffer#native");
3955     sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3956     msg->setSize("index", index);
3957 
3958     sp<AMessage> response;
3959     return PostAndAwaitResponse(msg, &response);
3960 }
3961 
signalEndOfInputStream()3962 status_t MediaCodec::signalEndOfInputStream() {
3963     sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
3964 
3965     sp<AMessage> response;
3966     return PostAndAwaitResponse(msg, &response);
3967 }
3968 
getOutputFormat(sp<AMessage> * format) const3969 status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
3970     sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
3971 
3972     sp<AMessage> response;
3973     status_t err;
3974     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3975         return err;
3976     }
3977 
3978     CHECK(response->findMessage("format", format));
3979 
3980     return OK;
3981 }
3982 
getInputFormat(sp<AMessage> * format) const3983 status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
3984     sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
3985 
3986     sp<AMessage> response;
3987     status_t err;
3988     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3989         return err;
3990     }
3991 
3992     CHECK(response->findMessage("format", format));
3993 
3994     return OK;
3995 }
3996 
getName(AString * name) const3997 status_t MediaCodec::getName(AString *name) const {
3998     sp<AMessage> msg = new AMessage(kWhatGetName, this);
3999 
4000     sp<AMessage> response;
4001     status_t err;
4002     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
4003         return err;
4004     }
4005 
4006     CHECK(response->findString("name", name));
4007 
4008     return OK;
4009 }
4010 
getCodecInfo(sp<MediaCodecInfo> * codecInfo) const4011 status_t MediaCodec::getCodecInfo(sp<MediaCodecInfo> *codecInfo) const {
4012     sp<AMessage> msg = new AMessage(kWhatGetCodecInfo, this);
4013 
4014     sp<AMessage> response;
4015     status_t err;
4016     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
4017         return err;
4018     }
4019 
4020     sp<RefBase> obj;
4021     CHECK(response->findObject("codecInfo", &obj));
4022     *codecInfo = static_cast<MediaCodecInfo *>(obj.get());
4023 
4024     return OK;
4025 }
4026 
4027 // this is the user-callable entry point
getMetrics(mediametrics_handle_t & reply)4028 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
4029 
4030     reply = 0;
4031 
4032     sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
4033     sp<AMessage> response;
4034     status_t err;
4035     if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
4036         return err;
4037     }
4038 
4039     CHECK(response->findInt64("metrics", &reply));
4040 
4041     return OK;
4042 }
4043 
4044 // runs on the looper thread (for mutex purposes)
onGetMetrics(const sp<AMessage> & msg)4045 void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
4046 
4047     mediametrics_handle_t results = 0;
4048 
4049     sp<AReplyToken> replyID;
4050     CHECK(msg->senderAwaitsResponse(&replyID));
4051 
4052     if (mMetricsHandle != 0) {
4053         updateMediametrics();
4054         results = mediametrics_dup(mMetricsHandle);
4055         updateEphemeralMediametrics(results);
4056     } else if (mLastMetricsHandle != 0) {
4057         // After error, mMetricsHandle is cleared, but we keep the last
4058         // metrics around so that it can be queried by getMetrics().
4059         results = mediametrics_dup(mLastMetricsHandle);
4060     } else {
4061         results = mediametrics_dup(mMetricsHandle);
4062     }
4063 
4064     sp<AMessage> response = new AMessage;
4065     response->setInt64("metrics", results);
4066     response->postReply(replyID);
4067 }
4068 
getInputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const4069 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
4070     sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
4071     msg->setInt32("portIndex", kPortIndexInput);
4072     msg->setPointer("buffers", buffers);
4073 
4074     sp<AMessage> response;
4075     return PostAndAwaitResponse(msg, &response);
4076 }
4077 
getOutputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const4078 status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
4079     sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
4080     msg->setInt32("portIndex", kPortIndexOutput);
4081     msg->setPointer("buffers", buffers);
4082 
4083     sp<AMessage> response;
4084     return PostAndAwaitResponse(msg, &response);
4085 }
4086 
getOutputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)4087 status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
4088     sp<AMessage> format;
4089     return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
4090 }
4091 
getOutputFormat(size_t index,sp<AMessage> * format)4092 status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
4093     sp<MediaCodecBuffer> buffer;
4094     return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
4095 }
4096 
getInputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)4097 status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
4098     sp<AMessage> format;
4099     return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
4100 }
4101 
isExecuting() const4102 bool MediaCodec::isExecuting() const {
4103     return mState == STARTED || mState == FLUSHED;
4104 }
4105 
getBufferAndFormat(size_t portIndex,size_t index,sp<MediaCodecBuffer> * buffer,sp<AMessage> * format)4106 status_t MediaCodec::getBufferAndFormat(
4107         size_t portIndex, size_t index,
4108         sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
4109     // use mutex instead of a context switch
4110     if (mReleasedByResourceManager) {
4111         mErrorLog.log(LOG_TAG, "resource already released");
4112         return DEAD_OBJECT;
4113     }
4114 
4115     if (buffer == NULL) {
4116         mErrorLog.log(LOG_TAG, "null buffer");
4117         return INVALID_OPERATION;
4118     }
4119 
4120     if (format == NULL) {
4121         mErrorLog.log(LOG_TAG, "null format");
4122         return INVALID_OPERATION;
4123     }
4124 
4125     buffer->clear();
4126     format->clear();
4127 
4128     if (!isExecuting()) {
4129         mErrorLog.log(LOG_TAG, base::StringPrintf(
4130                 "Invalid to call %s; only valid in Executing states",
4131                 apiStateString().c_str()));
4132         return INVALID_OPERATION;
4133     }
4134 
4135     // we do not want mPortBuffers to change during this section
4136     // we also don't want mOwnedByClient to change during this
4137     Mutex::Autolock al(mBufferLock);
4138 
4139     std::vector<BufferInfo> &buffers = mPortBuffers[portIndex];
4140     if (index >= buffers.size()) {
4141         ALOGE("getBufferAndFormat - trying to get buffer with "
4142               "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
4143         mErrorLog.log(LOG_TAG, base::StringPrintf("Bad index (index=%zu)", index));
4144         return INVALID_OPERATION;
4145     }
4146 
4147     const BufferInfo &info = buffers[index];
4148     if (!info.mOwnedByClient) {
4149         ALOGE("getBufferAndFormat - invalid operation "
4150               "(the index %zu is not owned by client)", index);
4151         mErrorLog.log(LOG_TAG, base::StringPrintf("index %zu is not owned by client", index));
4152         return INVALID_OPERATION;
4153     }
4154 
4155     *buffer = info.mData;
4156     *format = info.mData->format();
4157 
4158     return OK;
4159 }
4160 
flush()4161 status_t MediaCodec::flush() {
4162     ScopedTrace trace(ATRACE_TAG, "MediaCodec::flush#native");
4163     sp<AMessage> msg = new AMessage(kWhatFlush, this);
4164 
4165     sp<AMessage> response;
4166     return PostAndAwaitResponse(msg, &response);
4167 }
4168 
requestIDRFrame()4169 status_t MediaCodec::requestIDRFrame() {
4170     (new AMessage(kWhatRequestIDRFrame, this))->post();
4171 
4172     return OK;
4173 }
4174 
querySupportedVendorParameters(std::vector<std::string> * names)4175 status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
4176     return mCodec->querySupportedParameters(names);
4177 }
4178 
describeParameter(const std::string & name,CodecParameterDescriptor * desc)4179 status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
4180     return mCodec->describeParameter(name, desc);
4181 }
4182 
subscribeToVendorParameters(const std::vector<std::string> & names)4183 status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
4184     return mCodec->subscribeToParameters(names);
4185 }
4186 
unsubscribeFromVendorParameters(const std::vector<std::string> & names)4187 status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
4188     return mCodec->unsubscribeFromParameters(names);
4189 }
4190 
requestActivityNotification(const sp<AMessage> & notify)4191 void MediaCodec::requestActivityNotification(const sp<AMessage> &notify) {
4192     sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
4193     msg->setMessage("notify", notify);
4194     msg->post();
4195 }
4196 
requestCpuBoostIfNeeded()4197 void MediaCodec::requestCpuBoostIfNeeded() {
4198     if (mCpuBoostRequested) {
4199         return;
4200     }
4201     int32_t colorFormat;
4202     if (mOutputFormat->contains("hdr-static-info")
4203             && mOutputFormat->findInt32("color-format", &colorFormat)
4204             // check format for OMX only, for C2 the format is always opaque since the
4205             // software rendering doesn't go through client
4206             && ((mSoftRenderer != NULL && colorFormat == OMX_COLOR_FormatYUV420Planar16)
4207                     || mOwnerName.equalsIgnoreCase("codec2::software"))) {
4208         int32_t left, top, right, bottom, width, height;
4209         int64_t totalPixel = 0;
4210         if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
4211             totalPixel = (right - left + 1) * (bottom - top + 1);
4212         } else if (mOutputFormat->findInt32("width", &width)
4213                 && mOutputFormat->findInt32("height", &height)) {
4214             totalPixel = width * height;
4215         }
4216         if (totalPixel >= 1920 * 1080) {
4217             mResourceManagerProxy->addResource(MediaResource::CpuBoostResource());
4218             mCpuBoostRequested = true;
4219         }
4220     }
4221 }
4222 
BatteryChecker(const sp<AMessage> & msg,int64_t timeoutUs)4223 BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
4224     : mTimeoutUs(timeoutUs)
4225     , mLastActivityTimeUs(-1ll)
4226     , mBatteryStatNotified(false)
4227     , mBatteryCheckerGeneration(0)
4228     , mIsExecuting(false)
4229     , mBatteryCheckerMsg(msg) {}
4230 
onCodecActivity(std::function<void ()> batteryOnCb)4231 void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
4232     if (!isExecuting()) {
4233         // ignore if not executing
4234         return;
4235     }
4236     if (!mBatteryStatNotified) {
4237         batteryOnCb();
4238         mBatteryStatNotified = true;
4239         sp<AMessage> msg = mBatteryCheckerMsg->dup();
4240         msg->setInt32("generation", mBatteryCheckerGeneration);
4241 
4242         // post checker and clear last activity time
4243         msg->post(mTimeoutUs);
4244         mLastActivityTimeUs = -1ll;
4245     } else {
4246         // update last activity time
4247         mLastActivityTimeUs = ALooper::GetNowUs();
4248     }
4249 }
4250 
onCheckBatteryTimer(const sp<AMessage> & msg,std::function<void ()> batteryOffCb)4251 void BatteryChecker::onCheckBatteryTimer(
4252         const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
4253     // ignore if this checker already expired because the client resource was removed
4254     int32_t generation;
4255     if (!msg->findInt32("generation", &generation)
4256             || generation != mBatteryCheckerGeneration) {
4257         return;
4258     }
4259 
4260     if (mLastActivityTimeUs < 0ll) {
4261         // timed out inactive, do not repost checker
4262         batteryOffCb();
4263         mBatteryStatNotified = false;
4264     } else {
4265         // repost checker and clear last activity time
4266         msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
4267         mLastActivityTimeUs = -1ll;
4268     }
4269 }
4270 
onClientRemoved()4271 void BatteryChecker::onClientRemoved() {
4272     mBatteryStatNotified = false;
4273     mBatteryCheckerGeneration++;
4274 }
4275 
4276 ////////////////////////////////////////////////////////////////////////////////
4277 
cancelPendingDequeueOperations()4278 void MediaCodec::cancelPendingDequeueOperations() {
4279     if (mFlags & kFlagDequeueInputPending) {
4280         mErrorLog.log(LOG_TAG, "Pending dequeue input buffer request cancelled");
4281         PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
4282 
4283         ++mDequeueInputTimeoutGeneration;
4284         mDequeueInputReplyID = 0;
4285         mFlags &= ~kFlagDequeueInputPending;
4286     }
4287 
4288     if (mFlags & kFlagDequeueOutputPending) {
4289         mErrorLog.log(LOG_TAG, "Pending dequeue output buffer request cancelled");
4290         PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
4291 
4292         ++mDequeueOutputTimeoutGeneration;
4293         mDequeueOutputReplyID = 0;
4294         mFlags &= ~kFlagDequeueOutputPending;
4295     }
4296 }
4297 
handleDequeueInputBuffer(const sp<AReplyToken> & replyID,bool newRequest)4298 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
4299     if (!isExecuting()) {
4300         mErrorLog.log(LOG_TAG, base::StringPrintf(
4301                 "Invalid to call %s; only valid in executing state",
4302                 apiStateString().c_str()));
4303         PostReplyWithError(replyID, INVALID_OPERATION);
4304     } else if (mFlags & kFlagIsAsync) {
4305         mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
4306         PostReplyWithError(replyID, INVALID_OPERATION);
4307     } else if (newRequest && (mFlags & kFlagDequeueInputPending)) {
4308         mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue input request is pending");
4309         PostReplyWithError(replyID, INVALID_OPERATION);
4310         return true;
4311     } else if (mFlags & kFlagStickyError) {
4312         PostReplyWithError(replyID, getStickyError());
4313         return true;
4314     }
4315 
4316     ssize_t index = dequeuePortBuffer(kPortIndexInput);
4317 
4318     if (index < 0) {
4319         CHECK_EQ(index, -EAGAIN);
4320         return false;
4321     }
4322 
4323     sp<AMessage> response = new AMessage;
4324     response->setSize("index", index);
4325     response->postReply(replyID);
4326 
4327     return true;
4328 }
4329 
4330 // always called from the looper thread
handleDequeueOutputBuffer(const sp<AReplyToken> & replyID,bool newRequest)4331 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
4332         const sp<AReplyToken> &replyID, bool newRequest) {
4333     if (!isExecuting()) {
4334         mErrorLog.log(LOG_TAG, base::StringPrintf(
4335                 "Invalid to call %s; only valid in executing state",
4336                 apiStateString().c_str()));
4337         PostReplyWithError(replyID, INVALID_OPERATION);
4338     } else if (mFlags & kFlagIsAsync) {
4339         mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
4340         PostReplyWithError(replyID, INVALID_OPERATION);
4341     } else if (newRequest && (mFlags & kFlagDequeueOutputPending)) {
4342         mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue output request is pending");
4343         PostReplyWithError(replyID, INVALID_OPERATION);
4344     } else if (mFlags & kFlagStickyError) {
4345         PostReplyWithError(replyID, getStickyError());
4346     } else if (mFlags & kFlagOutputBuffersChanged) {
4347         PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
4348         mFlags &= ~kFlagOutputBuffersChanged;
4349     } else {
4350         sp<AMessage> response = new AMessage;
4351         BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
4352         if (!info) {
4353             return DequeueOutputResult::kNoBuffer;
4354         }
4355 
4356         // In synchronous mode, output format change should be handled
4357         // at dequeue to put the event at the correct order.
4358 
4359         const sp<MediaCodecBuffer> &buffer = info->mData;
4360         handleOutputFormatChangeIfNeeded(buffer);
4361         if (mFlags & kFlagOutputFormatChanged) {
4362             PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
4363             mFlags &= ~kFlagOutputFormatChanged;
4364             return DequeueOutputResult::kRepliedWithError;
4365         }
4366 
4367         ssize_t index = dequeuePortBuffer(kPortIndexOutput);
4368         if (discardDecodeOnlyOutputBuffer(index)) {
4369             return DequeueOutputResult::kDiscardedBuffer;
4370         }
4371 
4372         response->setSize("index", index);
4373         response->setSize("offset", buffer->offset());
4374         response->setSize("size", buffer->size());
4375 
4376         int64_t timeUs;
4377         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
4378 
4379         response->setInt64("timeUs", timeUs);
4380 
4381         int32_t flags;
4382         CHECK(buffer->meta()->findInt32("flags", &flags));
4383 
4384         response->setInt32("flags", flags);
4385 
4386         // NOTE: we must account the stats for an output buffer only after we
4387         // already handled a potential output format change that could have
4388         // started a new subsession.
4389         statsBufferReceived(timeUs, buffer);
4390 
4391         response->postReply(replyID);
4392         return DequeueOutputResult::kSuccess;
4393     }
4394 
4395     return DequeueOutputResult::kRepliedWithError;
4396 }
4397 
4398 
initClientConfigParcel(ClientConfigParcel & clientConfig)4399 inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
4400     clientConfig.codecType = toMediaResourceSubType(mIsHardware, mDomain);
4401     clientConfig.isEncoder = mFlags & kFlagIsEncoder;
4402     clientConfig.width = mWidth;
4403     clientConfig.height = mHeight;
4404     clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
4405     clientConfig.id = mCodecId;
4406 }
4407 
stopCryptoAsync()4408 void MediaCodec::stopCryptoAsync() {
4409     if (mCryptoAsync) {
4410         sp<RefBase> obj;
4411         sp<MediaCodecBuffer> buffer;
4412         std::list<sp<AMessage>> stalebuffers;
4413         mCryptoAsync->stop(&stalebuffers);
4414         for (sp<AMessage> &msg : stalebuffers) {
4415             if (msg->findObject("buffer", &obj)) {
4416                 buffer = decltype(buffer.get())(obj.get());
4417                 mBufferChannel->discardBuffer(buffer);
4418             }
4419         }
4420     }
4421 }
4422 
onMessageReceived(const sp<AMessage> & msg)4423 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
4424     switch (msg->what()) {
4425         case kWhatCodecNotify:
4426         {
4427             int32_t what;
4428             CHECK(msg->findInt32("what", &what));
4429             AString codecErrorState;
4430             switch (what) {
4431                 case kWhatError:
4432                 case kWhatCryptoError:
4433                 {
4434                     int32_t err, actionCode;
4435                     CHECK(msg->findInt32("err", &err));
4436                     CHECK(msg->findInt32("actionCode", &actionCode));
4437 
4438                     ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
4439                                               err, StrMediaError(err).c_str(), actionCode,
4440                                               mState, stateString(mState).c_str());
4441                     if (err == DEAD_OBJECT) {
4442                         mFlags |= kFlagSawMediaServerDie;
4443                         mFlags &= ~kFlagIsComponentAllocated;
4444                     }
4445                     bool sendErrorResponse = true;
4446                     std::string origin;
4447                     if (what == kWhatCryptoError) {
4448                         origin = "kWhatCryptoError:";
4449                     } else {
4450                         origin = "kWhatError:";
4451                         //TODO: add a new error state
4452                     }
4453                     codecErrorState = kCodecErrorState;
4454                     origin += stateString(mState);
4455                     stopCryptoAsync();
4456                     switch (mState) {
4457                         case INITIALIZING:
4458                         {
4459                             // Resource error during INITIALIZING state needs to be logged
4460                             // through metrics, to be able to track such occurrences.
4461                             if (isResourceError(err)) {
4462                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4463                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4464                                                         stateString(mState).c_str());
4465                                 flushMediametrics();
4466                                 initMediametrics();
4467                             }
4468                             setState(UNINITIALIZED);
4469                             break;
4470                         }
4471 
4472                         case CONFIGURING:
4473                         {
4474                             if (actionCode == ACTION_CODE_FATAL) {
4475                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4476                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4477                                                         stateString(mState).c_str());
4478                                 flushMediametrics();
4479                                 initMediametrics();
4480                             }
4481                             setState(actionCode == ACTION_CODE_FATAL ?
4482                                     UNINITIALIZED : INITIALIZED);
4483                             break;
4484                         }
4485 
4486                         case STARTING:
4487                         {
4488                             if (actionCode == ACTION_CODE_FATAL) {
4489                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4490                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4491                                                         stateString(mState).c_str());
4492                                 flushMediametrics();
4493                                 initMediametrics();
4494                             }
4495                             setState(actionCode == ACTION_CODE_FATAL ?
4496                                     UNINITIALIZED : CONFIGURED);
4497                             break;
4498                         }
4499 
4500                         case RELEASING:
4501                         {
4502                             // Ignore the error, assuming we'll still get
4503                             // the shutdown complete notification. If we
4504                             // don't, we'll timeout and force release.
4505                             sendErrorResponse = false;
4506                             FALLTHROUGH_INTENDED;
4507                         }
4508                         case STOPPING:
4509                         {
4510                             if (mFlags & kFlagSawMediaServerDie) {
4511                                 if (mState == RELEASING && !mReplyID) {
4512                                     ALOGD("Releasing asynchronously, so nothing to reply here.");
4513                                 }
4514                                 // MediaServer died, there definitely won't
4515                                 // be a shutdown complete notification after
4516                                 // all.
4517 
4518                                 // note that we may be directly going from
4519                                 // STOPPING->UNINITIALIZED, instead of the
4520                                 // usual STOPPING->INITIALIZED state.
4521                                 setState(UNINITIALIZED);
4522                                 if (mState == RELEASING) {
4523                                     mComponentName.clear();
4524                                 }
4525                                 if (mReplyID) {
4526                                     postPendingRepliesAndDeferredMessages(origin + ":dead");
4527                                 } else {
4528                                     ALOGD("no pending replies: %s:dead following %s",
4529                                           origin.c_str(), mLastReplyOrigin.c_str());
4530                                 }
4531                                 sendErrorResponse = false;
4532                             } else if (!mReplyID) {
4533                                 sendErrorResponse = false;
4534                             }
4535                             break;
4536                         }
4537 
4538                         case FLUSHING:
4539                         {
4540                             if (actionCode == ACTION_CODE_FATAL) {
4541                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4542                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4543                                                         stateString(mState).c_str());
4544                                 flushMediametrics();
4545                                 initMediametrics();
4546 
4547                                 setState(UNINITIALIZED);
4548                             } else {
4549                                 setState((mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
4550                             }
4551                             break;
4552                         }
4553 
4554                         case FLUSHED:
4555                         case STARTED:
4556                         {
4557                             sendErrorResponse = (mReplyID != nullptr);
4558 
4559                             setStickyError(err);
4560                             postActivityNotificationIfPossible();
4561 
4562                             cancelPendingDequeueOperations();
4563 
4564                             if (mFlags & kFlagIsAsync) {
4565                                 if (what == kWhatError) {
4566                                     onError(err, actionCode);
4567                                 } else if (what == kWhatCryptoError) {
4568                                     onCryptoError(msg);
4569                                 }
4570                             }
4571                             switch (actionCode) {
4572                             case ACTION_CODE_TRANSIENT:
4573                                 break;
4574                             case ACTION_CODE_RECOVERABLE:
4575                                 setState(INITIALIZED);
4576                                 break;
4577                             default:
4578                                 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
4579                                 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
4580                                                         stateString(mState).c_str());
4581                                 flushMediametrics();
4582                                 initMediametrics();
4583                                 setState(UNINITIALIZED);
4584                                 break;
4585                             }
4586                             break;
4587                         }
4588 
4589                         default:
4590                         {
4591                             sendErrorResponse = (mReplyID != nullptr);
4592 
4593                             setStickyError(err);
4594                             postActivityNotificationIfPossible();
4595 
4596                             // actionCode in an uninitialized state is always fatal.
4597                             if (mState == UNINITIALIZED) {
4598                                 actionCode = ACTION_CODE_FATAL;
4599                             }
4600                             if (mFlags & kFlagIsAsync) {
4601                                 if (what == kWhatError) {
4602                                     onError(err, actionCode);
4603                                 } else if (what == kWhatCryptoError) {
4604                                     onCryptoError(msg);
4605                                 }
4606                             }
4607                             switch (actionCode) {
4608                             case ACTION_CODE_TRANSIENT:
4609                                 break;
4610                             case ACTION_CODE_RECOVERABLE:
4611                                 setState(INITIALIZED);
4612                                 break;
4613                             default:
4614                                 setState(UNINITIALIZED);
4615                                 break;
4616                             }
4617                             break;
4618                         }
4619                     }
4620 
4621                     if (sendErrorResponse) {
4622                         // TRICKY: replicate PostReplyWithError logic for
4623                         //         err code override
4624                         int32_t finalErr = err;
4625                         if (mReleasedByResourceManager) {
4626                             // override the err code if MediaCodec has been
4627                             // released by ResourceManager.
4628                             finalErr = DEAD_OBJECT;
4629                         }
4630                         postPendingRepliesAndDeferredMessages(origin, finalErr);
4631                     }
4632                     break;
4633                 }
4634 
4635                 case kWhatComponentAllocated:
4636                 {
4637                     if (mState == RELEASING || mState == UNINITIALIZED) {
4638                         // In case a kWhatError or kWhatRelease message came in and replied,
4639                         // we log a warning and ignore.
4640                         ALOGW("allocate interrupted by error or release, current state %d/%s",
4641                               mState, stateString(mState).c_str());
4642                         break;
4643                     }
4644                     CHECK_EQ(mState, INITIALIZING);
4645                     setState(INITIALIZED);
4646                     mFlags |= kFlagIsComponentAllocated;
4647 
4648                     CHECK(msg->findString("componentName", &mComponentName));
4649 
4650                     if (mComponentName.c_str()) {
4651                         mIsHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
4652                         mediametrics_setCString(mMetricsHandle, kCodecCodec,
4653                                                 mComponentName.c_str());
4654                         // Update the codec name.
4655                         mResourceManagerProxy->setCodecName(mComponentName.c_str());
4656                     }
4657 
4658                     const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
4659                     if (mComponentName.startsWith("OMX.google.")
4660                             && strncmp(owner, "default", 8) == 0) {
4661                         mFlags |= kFlagUsesSoftwareRenderer;
4662                     } else {
4663                         mFlags &= ~kFlagUsesSoftwareRenderer;
4664                     }
4665                     mOwnerName = owner;
4666 
4667                     if (mComponentName.endsWith(".secure")) {
4668                         mFlags |= kFlagIsSecure;
4669                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 1);
4670                     } else {
4671                         mFlags &= ~kFlagIsSecure;
4672                         mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
4673                     }
4674 
4675                     mediametrics_setInt32(mMetricsHandle, kCodecHardware,
4676                                           MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
4677 
4678                     mResourceManagerProxy->addResource(MediaResource::CodecResource(
4679                             mFlags & kFlagIsSecure, toMediaResourceSubType(mIsHardware, mDomain)));
4680 
4681                     postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
4682                     break;
4683                 }
4684 
4685                 case kWhatComponentConfigured:
4686                 {
4687                     if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
4688                         // In case a kWhatError or kWhatRelease message came in and replied,
4689                         // we log a warning and ignore.
4690                         ALOGW("configure interrupted by error or release, current state %d/%s",
4691                               mState, stateString(mState).c_str());
4692                         break;
4693                     }
4694                     CHECK_EQ(mState, CONFIGURING);
4695 
4696                     // reset input surface flag
4697                     mHaveInputSurface = false;
4698 
4699                     CHECK(msg->findMessage("input-format", &mInputFormat));
4700                     CHECK(msg->findMessage("output-format", &mOutputFormat));
4701 
4702                     // limit to confirming the opt-in behavior to minimize any behavioral change
4703                     if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
4704                         // signal frame dropping mode in the input format as this may also be
4705                         // meaningful and confusing for an encoder in a transcoder scenario
4706                         mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
4707                     }
4708                     sp<AMessage> interestingFormat =
4709                             (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
4710                     ALOGV("[%s] configured as input format: %s, output format: %s",
4711                             mComponentName.c_str(),
4712                             mInputFormat->debugString(4).c_str(),
4713                             mOutputFormat->debugString(4).c_str());
4714                     int32_t usingSwRenderer;
4715                     if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
4716                             && usingSwRenderer) {
4717                         mFlags |= kFlagUsesSoftwareRenderer;
4718                     }
4719 
4720                     // Use input and output formats to get operating frame-rate.
4721                     bool isEncoder = mFlags & kFlagIsEncoder;
4722                     mFrameRate = getOperatingFrameRate(mInputFormat, mFrameRate, isEncoder);
4723                     mFrameRate = getOperatingFrameRate(mOutputFormat, mFrameRate, isEncoder);
4724                     getRequiredSystemResources();
4725 
4726                     setState(CONFIGURED);
4727                     postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
4728 
4729                     // augment our media metrics info, now that we know more things
4730                     // such as what the codec extracted from any CSD passed in.
4731                     if (mMetricsHandle != 0) {
4732                         sp<AMessage> format;
4733                         if (mConfigureMsg != NULL &&
4734                             mConfigureMsg->findMessage("format", &format)) {
4735                                 // format includes: mime
4736                                 AString mime;
4737                                 if (format->findString("mime", &mime)) {
4738                                     mediametrics_setCString(mMetricsHandle, kCodecMime,
4739                                                             mime.c_str());
4740                                 }
4741                             }
4742                         // perhaps video only?
4743                         int32_t profile = 0;
4744                         if (interestingFormat->findInt32("profile", &profile)) {
4745                             mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
4746                         }
4747                         int32_t level = 0;
4748                         if (interestingFormat->findInt32("level", &level)) {
4749                             mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
4750                         }
4751                         sp<AMessage> uncompressedFormat =
4752                                 (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
4753                         int32_t componentColorFormat  = -1;
4754                         if (uncompressedFormat->findInt32("android._color-format",
4755                                 &componentColorFormat)) {
4756                             mediametrics_setInt32(mMetricsHandle,
4757                                     kCodecComponentColorFormat, componentColorFormat);
4758                         }
4759                         updateHdrMetrics(true /* isConfig */);
4760                         int32_t codecMaxInputSize = -1;
4761                         if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
4762                             mApiUsageMetrics.inputBufferSize.codecMax = codecMaxInputSize;
4763                         }
4764                         // bitrate and bitrate mode, encoder only
4765                         if (mFlags & kFlagIsEncoder) {
4766                             // encoder specific values
4767                             int32_t bitrate_mode = -1;
4768                             if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
4769                                     mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
4770                                           asString_BitrateMode(bitrate_mode));
4771                             }
4772                             int32_t bitrate = -1;
4773                             if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
4774                                     mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
4775                             }
4776                         } else {
4777                             // decoder specific values
4778                         }
4779                     }
4780                     break;
4781                 }
4782 
4783                 case kWhatInputSurfaceCreated:
4784                 {
4785                     if (mState != CONFIGURED) {
4786                         // state transitioned unexpectedly; we should have replied already.
4787                         ALOGD("received kWhatInputSurfaceCreated message in state %s",
4788                                 stateString(mState).c_str());
4789                         break;
4790                     }
4791                     // response to initiateCreateInputSurface()
4792                     status_t err = NO_ERROR;
4793                     sp<AMessage> response = new AMessage;
4794                     if (!msg->findInt32("err", &err)) {
4795                         sp<RefBase> obj;
4796                         msg->findObject("input-surface", &obj);
4797                         CHECK(msg->findMessage("input-format", &mInputFormat));
4798                         CHECK(msg->findMessage("output-format", &mOutputFormat));
4799                         ALOGV("[%s] input surface created as input format: %s, output format: %s",
4800                                 mComponentName.c_str(),
4801                                 mInputFormat->debugString(4).c_str(),
4802                                 mOutputFormat->debugString(4).c_str());
4803                         CHECK(obj != NULL);
4804                         response->setObject("input-surface", obj);
4805                         mHaveInputSurface = true;
4806                     } else {
4807                         response->setInt32("err", err);
4808                     }
4809                     postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
4810                     break;
4811                 }
4812 
4813                 case kWhatInputSurfaceAccepted:
4814                 {
4815                     if (mState != CONFIGURED) {
4816                         // state transitioned unexpectedly; we should have replied already.
4817                         ALOGD("received kWhatInputSurfaceAccepted message in state %s",
4818                                 stateString(mState).c_str());
4819                         break;
4820                     }
4821                     // response to initiateSetInputSurface()
4822                     status_t err = NO_ERROR;
4823                     sp<AMessage> response = new AMessage();
4824                     if (!msg->findInt32("err", &err)) {
4825                         CHECK(msg->findMessage("input-format", &mInputFormat));
4826                         CHECK(msg->findMessage("output-format", &mOutputFormat));
4827                         mHaveInputSurface = true;
4828                     } else {
4829                         response->setInt32("err", err);
4830                     }
4831                     postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
4832                     break;
4833                 }
4834 
4835                 case kWhatSignaledInputEOS:
4836                 {
4837                     if (!isExecuting()) {
4838                         // state transitioned unexpectedly; we should have replied already.
4839                         ALOGD("received kWhatSignaledInputEOS message in state %s",
4840                                 stateString(mState).c_str());
4841                         break;
4842                     }
4843                     // response to signalEndOfInputStream()
4844                     sp<AMessage> response = new AMessage;
4845                     status_t err;
4846                     if (msg->findInt32("err", &err)) {
4847                         response->setInt32("err", err);
4848                     }
4849                     postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
4850                     break;
4851                 }
4852 
4853                 case kWhatStartCompleted:
4854                 {
4855                     if (mState == RELEASING || mState == UNINITIALIZED) {
4856                         // In case a kWhatRelease message came in and replied,
4857                         // we log a warning and ignore.
4858                         ALOGW("start interrupted by release, current state %d/%s",
4859                               mState, stateString(mState).c_str());
4860                         break;
4861                     }
4862 
4863                     CHECK_EQ(mState, STARTING);
4864 
4865                     // Add the codec resources upon start.
4866                     std::vector<MediaResourceParcel> resources;
4867                     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
4868                         resources.push_back(
4869                                 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
4870                     }
4871                     if (android::media::codec::codec_availability() &&
4872                         android::media::codec::codec_availability_support()) {
4873                         Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(
4874                                 mRequiredResourceInfo);
4875                         for (const InstanceResourceInfo& resource : *resourcesLocked) {
4876                             resources.push_back(getMediaResourceParcel(resource));
4877                         }
4878                     }
4879                     if (!resources.empty()) {
4880                         mResourceManagerProxy->addResource(resources);
4881                     }
4882                     // Notify the RM that the codec is in use (has been started).
4883                     ClientConfigParcel clientConfig;
4884                     initClientConfigParcel(clientConfig);
4885                     mResourceManagerProxy->notifyClientStarted(clientConfig);
4886 
4887                     setState(STARTED);
4888                     postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
4889 
4890                     // Now that the codec has started, configure, by default, the peek behavior to
4891                     // be undefined for backwards compatibility with older releases. Later, if an
4892                     // app explicitly enables or disables peek, the parameter will be turned off and
4893                     // the legacy undefined behavior is disallowed.
4894                     // See updateTunnelPeek called in onSetParameters for more details.
4895                     if (mTunneled && mTunnelPeekState == TunnelPeekState::kLegacyMode) {
4896                         sp<AMessage> params = new AMessage;
4897                         params->setInt32("android._tunnel-peek-set-legacy", 1);
4898                         mCodec->signalSetParameters(params);
4899                     }
4900                     break;
4901                 }
4902 
4903                 case kWhatOutputBuffersChanged:
4904                 {
4905                     mFlags |= kFlagOutputBuffersChanged;
4906                     postActivityNotificationIfPossible();
4907                     break;
4908                 }
4909 
4910                 case kWhatOutputFramesRendered:
4911                 {
4912                     // ignore these in all states except running
4913                     if (mState != STARTED) {
4914                         break;
4915                     }
4916                     TunnelPeekState previousState = mTunnelPeekState;
4917                     if (mTunnelPeekState != TunnelPeekState::kLegacyMode) {
4918                         mTunnelPeekState = TunnelPeekState::kBufferRendered;
4919                         ALOGV("TunnelPeekState: %s -> %s",
4920                                 asString(previousState),
4921                                 asString(TunnelPeekState::kBufferRendered));
4922                     }
4923                     processRenderedFrames(msg);
4924                     // check that we have a notification set
4925                     if (mOnFrameRenderedNotification != NULL) {
4926                         sp<AMessage> notify = mOnFrameRenderedNotification->dup();
4927                         notify->setMessage("data", msg);
4928                         notify->post();
4929                     }
4930                     break;
4931                 }
4932 
4933                 case kWhatFirstTunnelFrameReady:
4934                 {
4935                     if (mState != STARTED) {
4936                         break;
4937                     }
4938                     TunnelPeekState previousState = mTunnelPeekState;
4939                     switch(mTunnelPeekState) {
4940                         case TunnelPeekState::kDisabledNoBuffer:
4941                         case TunnelPeekState::kDisabledQueued:
4942                             mTunnelPeekState = TunnelPeekState::kBufferDecoded;
4943                             ALOGV("First tunnel frame ready");
4944                             ALOGV("TunnelPeekState: %s -> %s",
4945                                   asString(previousState),
4946                                   asString(mTunnelPeekState));
4947                             break;
4948                         case TunnelPeekState::kEnabledNoBuffer:
4949                         case TunnelPeekState::kEnabledQueued:
4950                             {
4951                                 sp<AMessage> parameters = new AMessage();
4952                                 parameters->setInt32("android._trigger-tunnel-peek", 1);
4953                                 mCodec->signalSetParameters(parameters);
4954                             }
4955                             mTunnelPeekState = TunnelPeekState::kBufferRendered;
4956                             ALOGV("First tunnel frame ready");
4957                             ALOGV("TunnelPeekState: %s -> %s",
4958                                   asString(previousState),
4959                                   asString(mTunnelPeekState));
4960                             break;
4961                         default:
4962                             ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
4963                                   asString(mTunnelPeekState));
4964                             break;
4965                     }
4966 
4967                     if (mOnFirstTunnelFrameReadyNotification != nullptr) {
4968                         sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
4969                         notify->setMessage("data", msg);
4970                         notify->post();
4971                     }
4972                     break;
4973                 }
4974 
4975                 case kWhatFillThisBuffer:
4976                 {
4977                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
4978 
4979                     bool inStateToReturnBuffers =
4980                         mState == FLUSHING || mState == STOPPING || mState == RELEASING;
4981                     if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
4982                         // Late callbacks from the codec could arrive here
4983                         // after the codec is already stopped or released.
4984                         inStateToReturnBuffers = mState == FLUSHING ||
4985                                                  mState == STOPPING || mState == INITIALIZED ||
4986                                                  mState == RELEASING || mState == UNINITIALIZED;
4987                     }
4988                     if (inStateToReturnBuffers) {
4989                         returnBuffersToCodecOnPort(kPortIndexInput);
4990                         break;
4991                     }
4992 
4993                     if (!mCSD.empty()) {
4994                         ssize_t index = dequeuePortBuffer(kPortIndexInput);
4995                         CHECK_GE(index, 0);
4996 
4997                         // If codec specific data had been specified as
4998                         // part of the format in the call to configure and
4999                         // if there's more csd left, we submit it here
5000                         // clients only get access to input buffers once
5001                         // this data has been exhausted.
5002 
5003                         status_t err = queueCSDInputBuffer(index);
5004 
5005                         if (err != OK) {
5006                             ALOGE("queueCSDInputBuffer failed w/ error %d",
5007                                   err);
5008 
5009                             setStickyError(err);
5010                             postActivityNotificationIfPossible();
5011 
5012                             cancelPendingDequeueOperations();
5013                         }
5014                         break;
5015                     }
5016                     if (!mLeftover.empty()) {
5017                         ssize_t index = dequeuePortBuffer(kPortIndexInput);
5018                         CHECK_GE(index, 0);
5019 
5020                         status_t err = handleLeftover(index);
5021                         if (err != OK) {
5022                             setStickyError(err);
5023                             postActivityNotificationIfPossible();
5024                             cancelPendingDequeueOperations();
5025                         }
5026                         break;
5027                     }
5028 
5029                     if (mFlags & kFlagIsAsync) {
5030                         if (!mHaveInputSurface) {
5031                             if (mState == FLUSHED) {
5032                                 mHavePendingInputBuffers = true;
5033                             } else {
5034                                 onInputBufferAvailable();
5035                             }
5036                         }
5037                     } else if (mFlags & kFlagDequeueInputPending) {
5038                         CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
5039 
5040                         ++mDequeueInputTimeoutGeneration;
5041                         mFlags &= ~kFlagDequeueInputPending;
5042                         mDequeueInputReplyID = 0;
5043                     } else {
5044                         postActivityNotificationIfPossible();
5045                     }
5046                     break;
5047                 }
5048 
5049                 case kWhatDrainThisBuffer:
5050                 {
5051                     if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
5052                         sp<RefBase> obj;
5053                         CHECK(msg->findObject("buffer", &obj));
5054                         sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
5055                         if (mFlags & kFlagIsAsync) {
5056                             // In asynchronous mode, output format change is processed immediately.
5057                             handleOutputFormatChangeIfNeeded(buffer);
5058                         } else {
5059                             postActivityNotificationIfPossible();
5060                         }
5061                         mBufferChannel->discardBuffer(buffer);
5062                         break;
5063                     }
5064 
5065                     /* size_t index = */updateBuffers(kPortIndexOutput, msg);
5066 
5067                     bool inStateToReturnBuffers =
5068                         mState == FLUSHING || mState == STOPPING || mState == RELEASING;
5069                     if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
5070                         // Late callbacks from the codec could arrive here
5071                         // after the codec is already stopped or released.
5072                         inStateToReturnBuffers = mState == FLUSHING ||
5073                                                  mState == STOPPING || mState == INITIALIZED ||
5074                                                  mState == RELEASING || mState == UNINITIALIZED;
5075                     }
5076                     if (inStateToReturnBuffers) {
5077                         returnBuffersToCodecOnPort(kPortIndexOutput);
5078                         break;
5079                     }
5080 
5081                     if (mFlags & kFlagIsAsync) {
5082                         sp<RefBase> obj;
5083                         CHECK(msg->findObject("buffer", &obj));
5084                         sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
5085 
5086                         // In asynchronous mode, output format change is processed immediately.
5087                         handleOutputFormatChangeIfNeeded(buffer);
5088                         onOutputBufferAvailable();
5089                     } else if (mFlags & kFlagDequeueOutputPending) {
5090                         DequeueOutputResult dequeueResult =
5091                             handleDequeueOutputBuffer(mDequeueOutputReplyID);
5092                         switch (dequeueResult) {
5093                             case DequeueOutputResult::kNoBuffer:
5094                                 TRESPASS();
5095                                 break;
5096                             case DequeueOutputResult::kDiscardedBuffer:
5097                                 break;
5098                             case DequeueOutputResult::kRepliedWithError:
5099                                 [[fallthrough]];
5100                             case DequeueOutputResult::kSuccess:
5101                             {
5102                                 ++mDequeueOutputTimeoutGeneration;
5103                                 mFlags &= ~kFlagDequeueOutputPending;
5104                                 mDequeueOutputReplyID = 0;
5105                                 break;
5106                             }
5107                             default:
5108                                 TRESPASS();
5109                         }
5110                     } else {
5111                         postActivityNotificationIfPossible();
5112                     }
5113 
5114                     break;
5115                 }
5116 
5117                 case kWhatMetricsUpdated:
5118                 {
5119                     sp<AMessage> updatedMetrics;
5120                     CHECK(msg->findMessage("updated-metrics", &updatedMetrics));
5121 
5122                     size_t numEntries = updatedMetrics->countEntries();
5123                     AMessage::Type type;
5124                     for (size_t i = 0; i < numEntries; ++i) {
5125                         const char *name = updatedMetrics->getEntryNameAt(i, &type);
5126                         AMessage::ItemData itemData = updatedMetrics->getEntryAt(i);
5127                         switch (type) {
5128                             case AMessage::kTypeInt32: {
5129                                 int32_t metricValue;
5130                                 itemData.find(&metricValue);
5131                                 mediametrics_setInt32(mMetricsHandle, name, metricValue);
5132                                 break;
5133                             }
5134                             case AMessage::kTypeInt64: {
5135                                 int64_t metricValue;
5136                                 itemData.find(&metricValue);
5137                                 mediametrics_setInt64(mMetricsHandle, name, metricValue);
5138                                 break;
5139                             }
5140                             case AMessage::kTypeDouble: {
5141                                 double metricValue;
5142                                 itemData.find(&metricValue);
5143                                 mediametrics_setDouble(mMetricsHandle, name, metricValue);
5144                                 break;
5145                             }
5146                             case AMessage::kTypeString: {
5147                                 AString metricValue;
5148                                 itemData.find(&metricValue);
5149                                 mediametrics_setCString(mMetricsHandle, name, metricValue.c_str());
5150                                 break;
5151                             }
5152                             // ToDo: add support for other types
5153                             default:
5154                                 ALOGW("Updated metrics type not supported.");
5155                         }
5156                     }
5157                     break;
5158                 }
5159 
5160                 case kWhatRequiredResourcesChanged:
5161                 {
5162                     // Get the updated required system resources.
5163                     if (getRequiredSystemResources()) {
5164                         onRequiredResourcesChanged();
5165                     }
5166 
5167                     break;
5168                 }
5169 
5170                 case kWhatEOS:
5171                 {
5172                     // We already notify the client of this by using the
5173                     // corresponding flag in "onOutputBufferReady".
5174                     break;
5175                 }
5176 
5177                 case kWhatStopCompleted:
5178                 {
5179                     if (mState != STOPPING) {
5180                         ALOGW("Received kWhatStopCompleted in state %d/%s",
5181                               mState, stateString(mState).c_str());
5182                         break;
5183                     }
5184 
5185                     if (mIsSurfaceToDisplay) {
5186                         mVideoRenderQualityTracker.resetForDiscontinuity();
5187                     }
5188 
5189                     // Remove the codec resources upon stop.
5190                     std::vector<MediaResourceParcel> resources;
5191                     if (android::media::codec::codec_availability() &&
5192                         android::media::codec::codec_availability_support()) {
5193                         Mutexed<std::vector<InstanceResourceInfo>>::Locked resourcesLocked(
5194                                 mRequiredResourceInfo);
5195                         for (const InstanceResourceInfo& resource : *resourcesLocked) {
5196                             resources.push_back(getMediaResourceParcel(resource));
5197                         }
5198                         (*resourcesLocked).clear();
5199                     }
5200                     // Notify the RM to remove those resources.
5201                     if (!resources.empty()) {
5202                         mResourceManagerProxy->removeResource(resources);
5203                     }
5204 
5205                     // Notify the RM that the codec has been stopped.
5206                     ClientConfigParcel clientConfig;
5207                     initClientConfigParcel(clientConfig);
5208                     mResourceManagerProxy->notifyClientStopped(clientConfig);
5209 
5210                     setState(INITIALIZED);
5211                     if (mReplyID) {
5212                         postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
5213                     } else {
5214                         ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
5215                               "but the operation completed anyway. (last reply origin=%s)",
5216                               mLastReplyOrigin.c_str());
5217                     }
5218                     break;
5219                 }
5220 
5221                 case kWhatReleaseCompleted:
5222                 {
5223                     if (mState != RELEASING) {
5224                         ALOGW("Received kWhatReleaseCompleted in state %d/%s",
5225                               mState, stateString(mState).c_str());
5226                         break;
5227                     }
5228                     setState(UNINITIALIZED);
5229                     mComponentName.clear();
5230 
5231                     mFlags &= ~kFlagIsComponentAllocated;
5232 
5233                     // off since we're removing all resources including the battery on
5234                     if (mBatteryChecker != nullptr) {
5235                         mBatteryChecker->onClientRemoved();
5236                     }
5237 
5238                     mResourceManagerProxy->removeClient();
5239                     mDetachedSurface.reset();
5240 
5241                     if (mReplyID != nullptr) {
5242                         postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
5243                     }
5244                     if (mAsyncReleaseCompleteNotification != nullptr) {
5245                         flushMediametrics();
5246                         mAsyncReleaseCompleteNotification->post();
5247                         mAsyncReleaseCompleteNotification.clear();
5248                     }
5249                     break;
5250                 }
5251 
5252                 case kWhatFlushCompleted:
5253                 {
5254                     if (mState != FLUSHING) {
5255                         ALOGW("received FlushCompleted message in state %d/%s",
5256                                 mState, stateString(mState).c_str());
5257                         break;
5258                     }
5259 
5260                     if (mIsSurfaceToDisplay) {
5261                         mVideoRenderQualityTracker.resetForDiscontinuity();
5262                     }
5263 
5264                     if (mFlags & kFlagIsAsync) {
5265                         setState(FLUSHED);
5266                     } else {
5267                         setState(STARTED);
5268                         mCodec->signalResume();
5269                     }
5270                     mReliabilityContextMetrics.flushCount++;
5271 
5272                     postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
5273                     break;
5274                 }
5275 
5276                 default:
5277                     TRESPASS();
5278             }
5279             break;
5280         }
5281 
5282         case kWhatInit:
5283         {
5284             if (mState != UNINITIALIZED) {
5285                 PostReplyWithError(msg, INVALID_OPERATION);
5286                 break;
5287             }
5288 
5289             if (mReplyID) {
5290                 mDeferredMessages.push_back(msg);
5291                 break;
5292             }
5293             sp<AReplyToken> replyID;
5294             CHECK(msg->senderAwaitsResponse(&replyID));
5295 
5296             mReplyID = replyID;
5297             setState(INITIALIZING);
5298 
5299             sp<RefBase> codecInfo;
5300             (void)msg->findObject("codecInfo", &codecInfo);
5301             AString name;
5302             CHECK(msg->findString("name", &name));
5303 
5304             sp<AMessage> format = new AMessage;
5305             if (codecInfo) {
5306                 format->setObject("codecInfo", codecInfo);
5307             }
5308             format->setString("componentName", name);
5309 
5310             mCodec->initiateAllocateComponent(format);
5311             break;
5312         }
5313 
5314         case kWhatSetNotification:
5315         {
5316             sp<AMessage> notify;
5317             if (msg->findMessage("on-frame-rendered", &notify)) {
5318                 mOnFrameRenderedNotification = notify;
5319             }
5320             if (msg->findMessage("first-tunnel-frame-ready", &notify)) {
5321                 mOnFirstTunnelFrameReadyNotification = notify;
5322             }
5323             break;
5324         }
5325 
5326         case kWhatSetCallback:
5327         {
5328             sp<AReplyToken> replyID;
5329             CHECK(msg->senderAwaitsResponse(&replyID));
5330 
5331             if (mState == UNINITIALIZED
5332                     || mState == INITIALIZING
5333                     || isExecuting()) {
5334                 // callback can't be set after codec is executing,
5335                 // or before it's initialized (as the callback
5336                 // will be cleared when it goes to INITIALIZED)
5337                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5338                         "Invalid to call %s; only valid at Initialized state",
5339                         apiStateString().c_str()));
5340                 PostReplyWithError(replyID, INVALID_OPERATION);
5341                 break;
5342             }
5343 
5344             sp<AMessage> callback;
5345             CHECK(msg->findMessage("callback", &callback));
5346 
5347             mCallback = callback;
5348 
5349             if (mCallback != NULL) {
5350                 ALOGI("MediaCodec will operate in async mode");
5351                 mFlags |= kFlagIsAsync;
5352             } else {
5353                 mFlags &= ~kFlagIsAsync;
5354             }
5355 
5356             sp<AMessage> response = new AMessage;
5357             response->postReply(replyID);
5358             break;
5359         }
5360 
5361         case kWhatGetMetrics:
5362         {
5363             onGetMetrics(msg);
5364             break;
5365         }
5366 
5367 
5368         case kWhatConfigure:
5369         {
5370             if (mState != INITIALIZED) {
5371                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5372                         "configure() is valid only at Initialized state; currently %s",
5373                         apiStateString().c_str()));
5374                 PostReplyWithError(msg, INVALID_OPERATION);
5375                 break;
5376             }
5377 
5378             if (mReplyID) {
5379                 mDeferredMessages.push_back(msg);
5380                 break;
5381             }
5382             sp<AReplyToken> replyID;
5383             CHECK(msg->senderAwaitsResponse(&replyID));
5384 
5385             sp<RefBase> obj;
5386             CHECK(msg->findObject("surface", &obj));
5387 
5388             sp<AMessage> format;
5389             CHECK(msg->findMessage("format", &format));
5390 
5391             // start with a copy of the passed metrics info for use in this run
5392             mediametrics_handle_t handle;
5393             CHECK(msg->findInt64("metrics", &handle));
5394             if (handle != 0) {
5395                 if (mMetricsHandle != 0) {
5396                     flushMediametrics();
5397                 }
5398                 mMetricsHandle = mediametrics_dup(handle);
5399                 // and set some additional metrics values
5400                 initMediametrics();
5401             }
5402 
5403             // from this point forward, in this configure/use/release lifecycle, we want to
5404             // upload our data
5405             mMetricsToUpload = true;
5406 
5407             int32_t push;
5408             if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
5409                 mFlags |= kFlagPushBlankBuffersOnShutdown;
5410             }
5411 
5412             uint32_t flags;
5413             CHECK(msg->findInt32("flags", (int32_t *)&flags));
5414 
5415             if (android::media::codec::provider_->null_output_surface_support()) {
5416                 if (obj == nullptr
5417                         && (flags & CONFIGURE_FLAG_DETACHED_SURFACE)
5418                         && !(flags & CONFIGURE_FLAG_ENCODE)) {
5419                     sp<Surface> surface = getOrCreateDetachedSurface();
5420                     if (surface == nullptr) {
5421                         mErrorLog.log(
5422                                 LOG_TAG, "Detached surface mode is not supported by this codec");
5423                         PostReplyWithError(replyID, INVALID_OPERATION);
5424                     }
5425                     obj = surface;
5426                 }
5427             }
5428 
5429             if (obj != NULL) {
5430                 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
5431                     // allow frame dropping by surface by default
5432                     mAllowFrameDroppingBySurface = true;
5433                 }
5434 
5435                 format->setObject("native-window", obj);
5436                 status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
5437                 if (err != OK) {
5438                     PostReplyWithError(replyID, err);
5439                     break;
5440                 }
5441                 uint32_t generation = mSurfaceGeneration;
5442                 format->setInt32("native-window-generation", generation);
5443             } else {
5444                 // we are not using surface so this variable is not used, but initialize sensibly anyway
5445                 mAllowFrameDroppingBySurface = false;
5446 
5447                 handleSetSurface(NULL);
5448             }
5449 
5450             mApiUsageMetrics.isUsingOutputSurface = true;
5451 
5452             if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
5453                 flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
5454                 if (!(mFlags & kFlagIsAsync)) {
5455                     mErrorLog.log(
5456                             LOG_TAG, "Block model is only valid with callback set (async mode)");
5457                     PostReplyWithError(replyID, INVALID_OPERATION);
5458                     break;
5459                 }
5460                 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
5461                     mFlags |= kFlagUseBlockModel;
5462                 }
5463                 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
5464                     mFlags |= kFlagUseCryptoAsync;
5465                     if ((mFlags & kFlagUseBlockModel)) {
5466                         ALOGW("CrytoAsync not yet enabled for block model, "
5467                                 "falling back to normal");
5468                     }
5469                 }
5470             }
5471             int32_t largeFrameParamMax = 0, largeFrameParamThreshold = 0;
5472             if (format->findInt32(KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, &largeFrameParamMax) ||
5473                     format->findInt32(KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
5474                     &largeFrameParamThreshold)) {
5475                 if (largeFrameParamMax > 0 || largeFrameParamThreshold > 0) {
5476                     if(mComponentName.startsWith("OMX")) {
5477                         mErrorLog.log(LOG_TAG,
5478                                 "Large Frame params are not supported on OMX codecs."
5479                                 "Currently only supported on C2 audio codec.");
5480                         PostReplyWithError(replyID, INVALID_OPERATION);
5481                         break;
5482                     }
5483                     AString mime;
5484                     CHECK(format->findString("mime", &mime));
5485                     if (!mime.startsWith("audio")) {
5486                         mErrorLog.log(LOG_TAG,
5487                                 "Large Frame params only works with audio codec");
5488                         PostReplyWithError(replyID, INVALID_OPERATION);
5489                         break;
5490                     }
5491                     if (!(mFlags & kFlagIsAsync)) {
5492                             mErrorLog.log(LOG_TAG, "Large Frame audio" \
5493                                     "config works only with async mode");
5494                         PostReplyWithError(replyID, INVALID_OPERATION);
5495                         break;
5496                     }
5497                 }
5498             }
5499 
5500             mReplyID = replyID;
5501             setState(CONFIGURING);
5502 
5503             void *crypto;
5504             if (!msg->findPointer("crypto", &crypto)) {
5505                 crypto = NULL;
5506             }
5507 
5508             ALOGV("kWhatConfigure: Old mCrypto: %p (%d)",
5509                     mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
5510 
5511             mCrypto = static_cast<ICrypto *>(crypto);
5512             mBufferChannel->setCrypto(mCrypto);
5513 
5514             ALOGV("kWhatConfigure: New mCrypto: %p (%d)",
5515                     mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
5516 
5517             void *descrambler;
5518             if (!msg->findPointer("descrambler", &descrambler)) {
5519                 descrambler = NULL;
5520             }
5521 
5522             mDescrambler = static_cast<IDescrambler *>(descrambler);
5523             mBufferChannel->setDescrambler(mDescrambler);
5524             if ((mFlags & kFlagUseCryptoAsync) && mCrypto) {
5525                 // set kFlagUseCryptoAsync but do-not use this for block model
5526                 // this is to propagate the error in onCryptoError()
5527                 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
5528                 //                     with CONFIGURE_FLAG_USE_BLOCK_MODEL)
5529                 if (!(mFlags & kFlagUseBlockModel)) {
5530                     mCryptoAsync = new CryptoAsync(mBufferChannel);
5531                     mCryptoAsync->setCallback(
5532                     std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
5533                     mCryptoLooper = new ALooper();
5534                     mCryptoLooper->setName("CryptoAsyncLooper");
5535                     mCryptoLooper->registerHandler(mCryptoAsync);
5536                     status_t err = mCryptoLooper->start();
5537                     if (err != OK) {
5538                         ALOGE("Crypto Looper failed to start");
5539                         mCryptoAsync = nullptr;
5540                         mCryptoLooper = nullptr;
5541                     }
5542                 }
5543             }
5544 
5545             format->setInt32("flags", flags);
5546             if (flags & CONFIGURE_FLAG_ENCODE) {
5547                 format->setInt32("encoder", true);
5548                 mFlags |= kFlagIsEncoder;
5549             }
5550 
5551             extractCSD(format);
5552 
5553             int32_t tunneled;
5554             if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
5555                 ALOGI("Configuring TUNNELED video playback.");
5556                 mTunneled = true;
5557             } else {
5558                 mTunneled = false;
5559             }
5560             mediametrics_setInt32(mMetricsHandle, kCodecTunneled, mTunneled ? 1 : 0);
5561 
5562             int32_t background = 0;
5563             if (format->findInt32("android._background-mode", &background) && background) {
5564                 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
5565             }
5566 
5567             mCodec->initiateConfigureComponent(format);
5568             break;
5569         }
5570 
5571         case kWhatDetachSurface:
5572         {
5573             // detach surface is equivalent to setSurface(mDetachedSurface)
5574             sp<Surface> surface = getOrCreateDetachedSurface();
5575 
5576             if (surface == nullptr) {
5577                 sp<AReplyToken> replyID;
5578                 CHECK(msg->senderAwaitsResponse(&replyID));
5579                 mErrorLog.log(LOG_TAG, "Detaching surface is not supported by the codec.");
5580                 PostReplyWithError(replyID, INVALID_OPERATION);
5581                 break;
5582             }
5583 
5584             msg->setObject("surface", surface);
5585         }
5586         [[fallthrough]];
5587 
5588         case kWhatSetSurface:
5589         {
5590             sp<AReplyToken> replyID;
5591             CHECK(msg->senderAwaitsResponse(&replyID));
5592 
5593             status_t err = OK;
5594 
5595             switch (mState) {
5596                 case CONFIGURED:
5597                 case STARTED:
5598                 case FLUSHED:
5599                 {
5600                     sp<RefBase> obj;
5601                     (void)msg->findObject("surface", &obj);
5602                     sp<Surface> surface = static_cast<Surface *>(obj.get());
5603                     if (mSurface == NULL) {
5604                         // do not support setting surface if it was not set
5605                         mErrorLog.log(LOG_TAG, base::StringPrintf(
5606                                       "Cannot %s surface if the codec is not configured with "
5607                                       "a surface already",
5608                                       msg->what() == kWhatDetachSurface ? "detach" : "set"));
5609                         err = INVALID_OPERATION;
5610                     } else if (obj == NULL) {
5611                         // do not support unsetting surface
5612                         mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
5613                         err = BAD_VALUE;
5614                     } else if (android::media::codec::provider_->null_output_surface_support()) {
5615                         err = handleSetSurface(surface, true /* callCodec */);
5616                     } else {
5617                         uint32_t generation;
5618                         err = connectToSurface(surface, &generation);
5619                         if (err == ALREADY_EXISTS) {
5620                             // reconnecting to same surface
5621                             err = OK;
5622                         } else {
5623                             if (err == OK) {
5624                                 if (mFlags & kFlagUsesSoftwareRenderer) {
5625                                     if (mSoftRenderer != NULL
5626                                             && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
5627                                         pushBlankBuffersToNativeWindow(mSurface.get());
5628                                     }
5629                                     surface->setDequeueTimeout(-1);
5630                                     mSoftRenderer = new SoftwareRenderer(surface);
5631                                     // TODO: check if this was successful
5632                                 } else {
5633                                     err = mCodec->setSurface(surface, generation);
5634                                 }
5635                             }
5636                             if (err == OK) {
5637                                 (void)disconnectFromSurface();
5638                                 mSurface = surface;
5639                                 mSurfaceGeneration = generation;
5640                             }
5641                             mReliabilityContextMetrics.setOutputSurfaceCount++;
5642                         }
5643                     }
5644                     break;
5645                 }
5646 
5647                 default:
5648                     mErrorLog.log(LOG_TAG, base::StringPrintf(
5649                             "%sSurface() is valid only at Executing states; currently %s",
5650                             msg->what() == kWhatDetachSurface ? "detach" : "set",
5651                             apiStateString().c_str()));
5652                     err = INVALID_OPERATION;
5653                     break;
5654             }
5655 
5656             PostReplyWithError(replyID, err);
5657             break;
5658         }
5659 
5660         case kWhatCreateInputSurface:
5661         case kWhatSetInputSurface:
5662         {
5663             // Must be configured, but can't have been started yet.
5664             if (mState != CONFIGURED) {
5665                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5666                         "setInputSurface() is valid only at Configured state; currently %s",
5667                         apiStateString().c_str()));
5668                 PostReplyWithError(msg, INVALID_OPERATION);
5669                 break;
5670             }
5671 
5672             if (mReplyID) {
5673                 mDeferredMessages.push_back(msg);
5674                 break;
5675             }
5676             sp<AReplyToken> replyID;
5677             CHECK(msg->senderAwaitsResponse(&replyID));
5678 
5679             mReplyID = replyID;
5680             if (msg->what() == kWhatCreateInputSurface) {
5681                 mCodec->initiateCreateInputSurface();
5682             } else {
5683                 sp<RefBase> obj;
5684                 CHECK(msg->findObject("input-surface", &obj));
5685 
5686                 mCodec->initiateSetInputSurface(
5687                         static_cast<PersistentSurface *>(obj.get()));
5688             }
5689             break;
5690         }
5691         case kWhatStart:
5692         {
5693             if (mState == FLUSHED) {
5694                 setState(STARTED);
5695                 if (mHavePendingInputBuffers) {
5696                     onInputBufferAvailable();
5697                     mHavePendingInputBuffers = false;
5698                 }
5699                 mCodec->signalResume();
5700                 PostReplyWithError(msg, OK);
5701                 break;
5702             } else if (mState != CONFIGURED) {
5703                 mErrorLog.log(LOG_TAG, base::StringPrintf(
5704                         "start() is valid only at Configured state; currently %s",
5705                         apiStateString().c_str()));
5706                 PostReplyWithError(msg, INVALID_OPERATION);
5707                 break;
5708             }
5709 
5710             if (mReplyID) {
5711                 mDeferredMessages.push_back(msg);
5712                 break;
5713             }
5714             sp<AReplyToken> replyID;
5715             CHECK(msg->senderAwaitsResponse(&replyID));
5716             TunnelPeekState previousState = mTunnelPeekState;
5717             if (previousState != TunnelPeekState::kLegacyMode) {
5718                 mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
5719                     TunnelPeekState::kDisabledNoBuffer;
5720                 ALOGV("TunnelPeekState: %s -> %s",
5721                         asString(previousState),
5722                         asString(mTunnelPeekState));
5723             }
5724 
5725             mReplyID = replyID;
5726             setState(STARTING);
5727 
5728             mCodec->initiateStart();
5729             break;
5730         }
5731 
5732         case kWhatStop: {
5733             if (mReplyID) {
5734                 mDeferredMessages.push_back(msg);
5735                 break;
5736             }
5737             [[fallthrough]];
5738         }
5739         case kWhatRelease:
5740         {
5741             State targetState =
5742                 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
5743 
5744             if ((mState == RELEASING && targetState == UNINITIALIZED)
5745                     || (mState == STOPPING && targetState == INITIALIZED)) {
5746                 mDeferredMessages.push_back(msg);
5747                 break;
5748             }
5749 
5750             sp<AReplyToken> replyID;
5751             CHECK(msg->senderAwaitsResponse(&replyID));
5752             stopCryptoAsync();
5753             sp<AMessage> asyncNotify;
5754             (void)msg->findMessage("async", &asyncNotify);
5755             // post asyncNotify if going out of scope.
5756             struct AsyncNotifyPost {
5757                 AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
5758                 ~AsyncNotifyPost() {
5759                     if (mAsyncNotify) {
5760                         mAsyncNotify->post();
5761                     }
5762                 }
5763                 void clear() { mAsyncNotify.clear(); }
5764             private:
5765                 sp<AMessage> mAsyncNotify;
5766             } asyncNotifyPost{asyncNotify};
5767 
5768             // already stopped/released
5769             if (mState == UNINITIALIZED && mReleasedByResourceManager) {
5770                 sp<AMessage> response = new AMessage;
5771                 response->setInt32("err", OK);
5772                 response->postReply(replyID);
5773                 break;
5774             }
5775 
5776             int32_t reclaimed = 0;
5777             msg->findInt32("reclaimed", &reclaimed);
5778             if (reclaimed) {
5779                 if (!mReleasedByResourceManager) {
5780                     // notify the async client
5781                     if (mFlags & kFlagIsAsync) {
5782                         onError(DEAD_OBJECT, ACTION_CODE_FATAL);
5783                     }
5784                     mErrorLog.log(LOG_TAG, "Released by resource manager");
5785                     mReleasedByResourceManager = true;
5786                 }
5787 
5788                 int32_t force = 0;
5789                 msg->findInt32("force", &force);
5790                 if (!force && hasPendingBuffer()) {
5791                     ALOGW("Can't reclaim codec right now due to pending buffers.");
5792 
5793                     // return WOULD_BLOCK to ask resource manager to retry later.
5794                     sp<AMessage> response = new AMessage;
5795                     response->setInt32("err", WOULD_BLOCK);
5796                     response->postReply(replyID);
5797 
5798                     break;
5799                 }
5800             }
5801 
5802             bool isReleasingAllocatedComponent =
5803                     (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
5804             if (!isReleasingAllocatedComponent // See 1
5805                     && mState != INITIALIZED
5806                     && mState != CONFIGURED && !isExecuting()) {
5807                 // 1) Permit release to shut down the component if allocated.
5808                 //
5809                 // 2) We may be in "UNINITIALIZED" state already and
5810                 // also shutdown the encoder/decoder without the
5811                 // client being aware of this if media server died while
5812                 // we were being stopped. The client would assume that
5813                 // after stop() returned, it would be safe to call release()
5814                 // and it should be in this case, no harm to allow a release()
5815                 // if we're already uninitialized.
5816                 sp<AMessage> response = new AMessage;
5817                 // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
5818                 // the previous stop/release completes and then reply with OK.
5819                 status_t err = mState == targetState ? OK : INVALID_OPERATION;
5820                 response->setInt32("err", err);
5821                 // TODO: mErrorLog
5822                 if (err == OK && targetState == UNINITIALIZED) {
5823                     mComponentName.clear();
5824                 }
5825                 response->postReply(replyID);
5826                 break;
5827             }
5828 
5829             // If we're flushing, configuring or starting  but
5830             // received a release request, post the reply for the pending call
5831             // first, and consider it done. The reply token will be replaced
5832             // after this, and we'll no longer be able to reply.
5833             if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
5834                 // mReply is always set if in these states.
5835                 postPendingRepliesAndDeferredMessages(
5836                         std::string("kWhatRelease:") + stateString(mState));
5837             }
5838             // If we're stopping but received a release request, post the reply
5839             // for the pending call if necessary. Note that the reply may have been
5840             // already posted due to an error.
5841             if (mState == STOPPING && mReplyID) {
5842                 postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
5843             }
5844 
5845             if (mFlags & kFlagSawMediaServerDie) {
5846                 // It's dead, Jim. Don't expect initiateShutdown to yield
5847                 // any useful results now...
5848                 // Any pending reply would have been handled at kWhatError.
5849                 setState(UNINITIALIZED);
5850                 if (targetState == UNINITIALIZED) {
5851                     mComponentName.clear();
5852                 }
5853                 (new AMessage)->postReply(replyID);
5854                 break;
5855             }
5856 
5857             // If we already have an error, component may not be able to
5858             // complete the shutdown properly. If we're stopping, post the
5859             // reply now with an error to unblock the client, client can
5860             // release after the failure (instead of ANR).
5861             if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
5862                 // Any pending reply would have been handled at kWhatError.
5863                 PostReplyWithError(replyID, getStickyError());
5864                 break;
5865             }
5866 
5867             bool forceSync = false;
5868             if (asyncNotify != nullptr && mSurface != NULL) {
5869                 if (android::media::codec::provider_->null_output_surface_support()) {
5870                     if (handleSetSurface(getOrCreateDetachedSurface(), true /* callCodec */,
5871                                          true /* onShutDown */) != OK) {
5872                         // We were not able to detach the surface, so force
5873                         // synchronous release.
5874                         forceSync = true;
5875                     }
5876                 } else {
5877                     if (!mDetachedSurface) {
5878                         uint64_t usage = 0;
5879                         if (mSurface->getConsumerUsage(&usage) != OK) {
5880                             usage = 0;
5881                         }
5882                         mDetachedSurface.reset(new ReleaseSurface(usage));
5883                     }
5884                     if (mSurface != mDetachedSurface->getSurface()) {
5885                         uint32_t generation;
5886                         status_t err =
5887                             connectToSurface(mDetachedSurface->getSurface(), &generation);
5888                         ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
5889                         if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
5890                             err = mCodec->setSurface(mDetachedSurface->getSurface(), generation);
5891                             ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
5892                         }
5893                         if (err == OK) {
5894                             (void)disconnectFromSurface();
5895                             mSurface = mDetachedSurface->getSurface();
5896                             mSurfaceGeneration = generation;
5897                         } else {
5898                             // We were not able to switch the surface, so force
5899                             // synchronous release.
5900                             forceSync = true;
5901                         }
5902                     }
5903                 }
5904             }
5905 
5906             if (mReplyID) {
5907                 // State transition replies are handled above, so this reply
5908                 // would not be related to state transition. As we are
5909                 // shutting down the component, just fail the operation.
5910                 postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
5911             }
5912             mReplyID = replyID;
5913             setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
5914 
5915             mCodec->initiateShutdown(
5916                     msg->what() == kWhatStop /* keepComponentAllocated */);
5917 
5918             returnBuffersToCodec(reclaimed);
5919 
5920             if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
5921                 pushBlankBuffersToNativeWindow(mSurface.get());
5922             }
5923 
5924             if (asyncNotify != nullptr) {
5925                 if (!forceSync) {
5926                     mResourceManagerProxy->markClientForPendingRemoval();
5927                     postPendingRepliesAndDeferredMessages("kWhatRelease:async");
5928                 }
5929                 asyncNotifyPost.clear();
5930                 mAsyncReleaseCompleteNotification = asyncNotify;
5931             }
5932 
5933             break;
5934         }
5935 
5936         case kWhatDequeueInputBuffer:
5937         {
5938             sp<AReplyToken> replyID;
5939             CHECK(msg->senderAwaitsResponse(&replyID));
5940 
5941             if (mFlags & kFlagIsAsync) {
5942                 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used in async mode");
5943                 PostReplyWithError(replyID, INVALID_OPERATION);
5944                 break;
5945             }
5946 
5947             if (mHaveInputSurface) {
5948                 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used with input surface");
5949                 PostReplyWithError(replyID, INVALID_OPERATION);
5950                 break;
5951             }
5952 
5953             if (handleDequeueInputBuffer(replyID, true /* new request */)) {
5954                 break;
5955             }
5956 
5957             int64_t timeoutUs;
5958             CHECK(msg->findInt64("timeoutUs", &timeoutUs));
5959 
5960             if (timeoutUs == 0LL) {
5961                 PostReplyWithError(replyID, -EAGAIN);
5962                 break;
5963             }
5964 
5965             mFlags |= kFlagDequeueInputPending;
5966             mDequeueInputReplyID = replyID;
5967 
5968             if (timeoutUs > 0LL) {
5969                 sp<AMessage> timeoutMsg =
5970                     new AMessage(kWhatDequeueInputTimedOut, this);
5971                 timeoutMsg->setInt32(
5972                         "generation", ++mDequeueInputTimeoutGeneration);
5973                 timeoutMsg->post(timeoutUs);
5974             }
5975             break;
5976         }
5977 
5978         case kWhatDequeueInputTimedOut:
5979         {
5980             int32_t generation;
5981             CHECK(msg->findInt32("generation", &generation));
5982 
5983             if (generation != mDequeueInputTimeoutGeneration) {
5984                 // Obsolete
5985                 break;
5986             }
5987 
5988             CHECK(mFlags & kFlagDequeueInputPending);
5989 
5990             PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
5991 
5992             mFlags &= ~kFlagDequeueInputPending;
5993             mDequeueInputReplyID = 0;
5994             break;
5995         }
5996 
5997         case kWhatQueueInputBuffer:
5998         {
5999             sp<AReplyToken> replyID;
6000             CHECK(msg->senderAwaitsResponse(&replyID));
6001 
6002             if (!isExecuting()) {
6003                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6004                         "queueInputBuffer() is valid only at Executing states; currently %s",
6005                         apiStateString().c_str()));
6006                 PostReplyWithError(replyID, INVALID_OPERATION);
6007                 break;
6008             } else if (mFlags & kFlagStickyError) {
6009                 PostReplyWithError(replyID, getStickyError());
6010                 break;
6011             }
6012 
6013             status_t err = UNKNOWN_ERROR;
6014             if (!mLeftover.empty()) {
6015                 mLeftover.push_back(msg);
6016                 size_t index;
6017                 msg->findSize("index", &index);
6018                 err = handleLeftover(index);
6019             } else {
6020                 err = onQueueInputBuffer(msg);
6021             }
6022 
6023             PostReplyWithError(replyID, err);
6024             break;
6025         }
6026 
6027         case kWhatDequeueOutputBuffer:
6028         {
6029             sp<AReplyToken> replyID;
6030             CHECK(msg->senderAwaitsResponse(&replyID));
6031 
6032             if (mFlags & kFlagIsAsync) {
6033                 mErrorLog.log(LOG_TAG, "dequeueOutputBuffer can't be used in async mode");
6034                 PostReplyWithError(replyID, INVALID_OPERATION);
6035                 break;
6036             }
6037 
6038             DequeueOutputResult dequeueResult =
6039                 handleDequeueOutputBuffer(replyID, true /* new request */);
6040             switch (dequeueResult) {
6041                 case DequeueOutputResult::kNoBuffer:
6042                     [[fallthrough]];
6043                 case DequeueOutputResult::kDiscardedBuffer:
6044                 {
6045                     int64_t timeoutUs;
6046                     CHECK(msg->findInt64("timeoutUs", &timeoutUs));
6047 
6048                     if (timeoutUs == 0LL) {
6049                         PostReplyWithError(replyID, -EAGAIN);
6050                         break;
6051                     }
6052 
6053                     mFlags |= kFlagDequeueOutputPending;
6054                     mDequeueOutputReplyID = replyID;
6055 
6056                     if (timeoutUs > 0LL) {
6057                         sp<AMessage> timeoutMsg =
6058                             new AMessage(kWhatDequeueOutputTimedOut, this);
6059                         timeoutMsg->setInt32(
6060                                 "generation", ++mDequeueOutputTimeoutGeneration);
6061                         timeoutMsg->post(timeoutUs);
6062                     }
6063                     break;
6064                 }
6065                 case DequeueOutputResult::kRepliedWithError:
6066                     [[fallthrough]];
6067                 case DequeueOutputResult::kSuccess:
6068                     break;
6069                 default:
6070                     TRESPASS();
6071             }
6072             break;
6073         }
6074 
6075         case kWhatDequeueOutputTimedOut:
6076         {
6077             int32_t generation;
6078             CHECK(msg->findInt32("generation", &generation));
6079 
6080             if (generation != mDequeueOutputTimeoutGeneration) {
6081                 // Obsolete
6082                 break;
6083             }
6084 
6085             CHECK(mFlags & kFlagDequeueOutputPending);
6086 
6087             PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
6088 
6089             mFlags &= ~kFlagDequeueOutputPending;
6090             mDequeueOutputReplyID = 0;
6091             break;
6092         }
6093 
6094         case kWhatReleaseOutputBuffer:
6095         {
6096             sp<AReplyToken> replyID;
6097             CHECK(msg->senderAwaitsResponse(&replyID));
6098 
6099             if (!isExecuting()) {
6100                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6101                         "releaseOutputBuffer() is valid only at Executing states; currently %s",
6102                         apiStateString().c_str()));
6103                 PostReplyWithError(replyID, INVALID_OPERATION);
6104                 break;
6105             } else if (mFlags & kFlagStickyError) {
6106                 PostReplyWithError(replyID, getStickyError());
6107                 break;
6108             }
6109 
6110             status_t err = onReleaseOutputBuffer(msg);
6111 
6112             PostReplyWithError(replyID, err);
6113             break;
6114         }
6115 
6116         case kWhatPollForRenderedBuffers:
6117         {
6118             if (isExecuting()) {
6119                 mBufferChannel->pollForRenderedBuffers();
6120             }
6121             break;
6122         }
6123 
6124         case kWhatSignalEndOfInputStream:
6125         {
6126             if (!isExecuting()) {
6127                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6128                         "signalEndOfInputStream() is valid only at Executing states; currently %s",
6129                         apiStateString().c_str()));
6130                 PostReplyWithError(msg, INVALID_OPERATION);
6131                 break;
6132             } else if (!mHaveInputSurface) {
6133                 mErrorLog.log(
6134                         LOG_TAG, "signalEndOfInputStream() called without an input surface set");
6135                 PostReplyWithError(msg, INVALID_OPERATION);
6136                 break;
6137             } else if (mFlags & kFlagStickyError) {
6138                 PostReplyWithError(msg, getStickyError());
6139                 break;
6140             }
6141 
6142             if (mReplyID) {
6143                 mDeferredMessages.push_back(msg);
6144                 break;
6145             }
6146             sp<AReplyToken> replyID;
6147             CHECK(msg->senderAwaitsResponse(&replyID));
6148 
6149             mReplyID = replyID;
6150             mCodec->signalEndOfInputStream();
6151             break;
6152         }
6153 
6154         case kWhatGetBuffers:
6155         {
6156             sp<AReplyToken> replyID;
6157             CHECK(msg->senderAwaitsResponse(&replyID));
6158             if (!isExecuting()) {
6159                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6160                         "getInput/OutputBuffers() is valid only at Executing states; currently %s",
6161                         apiStateString().c_str()));
6162                 PostReplyWithError(replyID, INVALID_OPERATION);
6163                 break;
6164             } else if (mFlags & kFlagIsAsync) {
6165                 mErrorLog.log(LOG_TAG, "getInput/OutputBuffers() is not supported with callbacks");
6166                 PostReplyWithError(replyID, INVALID_OPERATION);
6167                 break;
6168             } else if (mFlags & kFlagStickyError) {
6169                 PostReplyWithError(replyID, getStickyError());
6170                 break;
6171             }
6172 
6173             int32_t portIndex;
6174             CHECK(msg->findInt32("portIndex", &portIndex));
6175 
6176             Vector<sp<MediaCodecBuffer> > *dstBuffers;
6177             CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
6178 
6179             dstBuffers->clear();
6180             // If we're using input surface (either non-persistent created by
6181             // createInputSurface(), or persistent set by setInputSurface()),
6182             // give the client an empty input buffers array.
6183             if (portIndex != kPortIndexInput || !mHaveInputSurface) {
6184                 if (portIndex == kPortIndexInput) {
6185                     mBufferChannel->getInputBufferArray(dstBuffers);
6186                 } else {
6187                     mBufferChannel->getOutputBufferArray(dstBuffers);
6188                 }
6189             }
6190 
6191             mApiUsageMetrics.isArrayMode = true;
6192 
6193             (new AMessage)->postReply(replyID);
6194             break;
6195         }
6196 
6197         case kWhatFlush:
6198         {
6199             if (!isExecuting()) {
6200                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6201                         "flush() is valid only at Executing states; currently %s",
6202                         apiStateString().c_str()));
6203                 PostReplyWithError(msg, INVALID_OPERATION);
6204                 break;
6205             } else if (mFlags & kFlagStickyError) {
6206                 PostReplyWithError(msg, getStickyError());
6207                 break;
6208             }
6209 
6210             if (mReplyID) {
6211                 mDeferredMessages.push_back(msg);
6212                 break;
6213             }
6214             sp<AReplyToken> replyID;
6215             CHECK(msg->senderAwaitsResponse(&replyID));
6216 
6217             mReplyID = replyID;
6218             // TODO: skip flushing if already FLUSHED
6219             setState(FLUSHING);
6220             stopCryptoAsync();
6221             mCodec->signalFlush();
6222             returnBuffersToCodec();
6223             TunnelPeekState previousState = mTunnelPeekState;
6224             if (previousState != TunnelPeekState::kLegacyMode) {
6225                 mTunnelPeekState = mTunnelPeekEnabled ? TunnelPeekState::kEnabledNoBuffer :
6226                     TunnelPeekState::kDisabledNoBuffer;
6227                 ALOGV("TunnelPeekState: %s -> %s",
6228                         asString(previousState),
6229                         asString(mTunnelPeekState));
6230             }
6231             break;
6232         }
6233 
6234         case kWhatGetInputFormat:
6235         case kWhatGetOutputFormat:
6236         {
6237             sp<AMessage> format =
6238                 (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
6239 
6240             sp<AReplyToken> replyID;
6241             CHECK(msg->senderAwaitsResponse(&replyID));
6242 
6243             if (mState != CONFIGURED && mState != STARTING &&
6244                     mState != STARTED && mState != FLUSHING &&
6245                     mState != FLUSHED) {
6246                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6247                         "getInput/OutputFormat() is valid at Executing states "
6248                         "and Configured state; currently %s",
6249                         apiStateString().c_str()));
6250                 PostReplyWithError(replyID, INVALID_OPERATION);
6251                 break;
6252             } else if (format == NULL) {
6253                 mErrorLog.log(LOG_TAG, "Fatal error: format is not initialized");
6254                 PostReplyWithError(replyID, INVALID_OPERATION);
6255                 break;
6256             } else if (mFlags & kFlagStickyError) {
6257                 PostReplyWithError(replyID, getStickyError());
6258                 break;
6259             }
6260 
6261             sp<AMessage> response = new AMessage;
6262             response->setMessage("format", format);
6263             response->postReply(replyID);
6264             break;
6265         }
6266 
6267         case kWhatRequestIDRFrame:
6268         {
6269             mCodec->signalRequestIDRFrame();
6270             break;
6271         }
6272 
6273         case kWhatRequestActivityNotification:
6274         {
6275             CHECK(mActivityNotify == NULL);
6276             CHECK(msg->findMessage("notify", &mActivityNotify));
6277 
6278             postActivityNotificationIfPossible();
6279             break;
6280         }
6281 
6282         case kWhatGetName:
6283         {
6284             sp<AReplyToken> replyID;
6285             CHECK(msg->senderAwaitsResponse(&replyID));
6286 
6287             if (mComponentName.empty()) {
6288                 mErrorLog.log(LOG_TAG, "Fatal error: name is not set");
6289                 PostReplyWithError(replyID, INVALID_OPERATION);
6290                 break;
6291             }
6292 
6293             sp<AMessage> response = new AMessage;
6294             response->setString("name", mComponentName.c_str());
6295             response->postReply(replyID);
6296             break;
6297         }
6298 
6299         case kWhatGetCodecInfo:
6300         {
6301             sp<AReplyToken> replyID;
6302             CHECK(msg->senderAwaitsResponse(&replyID));
6303 
6304             sp<AMessage> response = new AMessage;
6305             response->setObject("codecInfo", mCodecInfo);
6306             response->postReply(replyID);
6307             break;
6308         }
6309 
6310         case kWhatSetParameters:
6311         {
6312             sp<AReplyToken> replyID;
6313             CHECK(msg->senderAwaitsResponse(&replyID));
6314 
6315             sp<AMessage> params;
6316             CHECK(msg->findMessage("params", &params));
6317 
6318             status_t err = onSetParameters(params);
6319 
6320             PostReplyWithError(replyID, err);
6321             break;
6322         }
6323 
6324         case kWhatDrmReleaseCrypto:
6325         {
6326             onReleaseCrypto(msg);
6327             break;
6328         }
6329 
6330         case kWhatCheckBatteryStats:
6331         {
6332             if (mBatteryChecker != nullptr) {
6333                 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
6334                     mResourceManagerProxy->removeResource(
6335                             MediaResource::VideoBatteryResource(mIsHardware));
6336                 });
6337             }
6338             break;
6339         }
6340 
6341         default:
6342             TRESPASS();
6343     }
6344 }
6345 
6346 // always called from the looper thread
handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> & buffer)6347 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
6348     sp<AMessage> format = buffer->format();
6349     if (mOutputFormat == format) {
6350         return;
6351     }
6352     if (mFlags & kFlagUseBlockModel) {
6353         sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
6354         sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
6355         std::set<std::string> keys;
6356         size_t numEntries = diff1->countEntries();
6357         AMessage::Type type;
6358         for (size_t i = 0; i < numEntries; ++i) {
6359             keys.emplace(diff1->getEntryNameAt(i, &type));
6360         }
6361         numEntries = diff2->countEntries();
6362         for (size_t i = 0; i < numEntries; ++i) {
6363             keys.emplace(diff2->getEntryNameAt(i, &type));
6364         }
6365         sp<WrapperObject<std::set<std::string>>> changedKeys{
6366             new WrapperObject<std::set<std::string>>{std::move(keys)}};
6367         buffer->meta()->setObject("changedKeys", changedKeys);
6368     }
6369     mOutputFormat = format;
6370     mapFormat(mComponentName, format, nullptr, true);
6371     ALOGV("[%s] output format changed to: %s",
6372             mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
6373 
6374     if (mSoftRenderer == NULL &&
6375             mSurface != NULL &&
6376             (mFlags & kFlagUsesSoftwareRenderer)) {
6377         AString mime;
6378         CHECK(mOutputFormat->findString("mime", &mime));
6379 
6380         // TODO: propagate color aspects to software renderer to allow better
6381         // color conversion to RGB. For now, just mark dataspace for YUV
6382         // rendering.
6383         int32_t dataSpace;
6384         if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
6385             ALOGD("[%s] setting dataspace on output surface to %#x",
6386                     mComponentName.c_str(), dataSpace);
6387             int err = native_window_set_buffers_data_space(
6388                     mSurface.get(), (android_dataspace)dataSpace);
6389             ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
6390         }
6391         if (mOutputFormat->contains("hdr-static-info")) {
6392             HDRStaticInfo info;
6393             if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
6394                 setNativeWindowHdrMetadata(mSurface.get(), &info);
6395             }
6396         }
6397 
6398         sp<ABuffer> hdr10PlusInfo;
6399         if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
6400                 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
6401             native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
6402                     hdr10PlusInfo->size(), hdr10PlusInfo->data());
6403         }
6404 
6405         if (mime.startsWithIgnoreCase("video/")) {
6406             mSurface->setDequeueTimeout(-1);
6407             mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
6408         }
6409     }
6410 
6411     requestCpuBoostIfNeeded();
6412 
6413     if (mFlags & kFlagIsEncoder) {
6414         // Before we announce the format change we should
6415         // collect codec specific data and amend the output
6416         // format as necessary.
6417         int32_t flags = 0;
6418         (void) buffer->meta()->findInt32("flags", &flags);
6419         if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
6420                 && !mOwnerName.startsWith("codec2::")) {
6421             status_t err =
6422                 amendOutputFormatWithCodecSpecificData(buffer);
6423 
6424             if (err != OK) {
6425                 ALOGE("Codec spit out malformed codec "
6426                       "specific data!");
6427             }
6428         }
6429     }
6430 
6431     // Update the width and the height.
6432     int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
6433     bool newSubsession = false;
6434     if (android::media::codec::provider_->subsession_metrics()) {
6435         // consider a new subsession if the actual video size changes
6436         // TODO: if the resolution of the clip changes "mid-stream" and crop params did not change
6437         // or changed in such a way that the actual video size did not change then new subsession is
6438         // not detected.
6439         // TODO: although rare, the buffer attributes (rect(...), width, height) need not be a true
6440         // representation of actual stream attributes (rect(...), width, height). It is only
6441         // required that actual video frame is correctly presented in the rect() region of the
6442         // buffer making this approach of detecting subsession less reliable.
6443         if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
6444             if ((right - left + 1) != mWidth || (bottom - top + 1) != mHeight) {
6445                 newSubsession = true;
6446             }
6447         } else if (mOutputFormat->findInt32("width", &width) &&
6448                    mOutputFormat->findInt32("height", &height) &&
6449                    (width != mWidth || height != mHeight)) {
6450             newSubsession = true;
6451         }
6452     }
6453     // TODO: properly detect new audio subsession
6454 
6455     // Only consider a new subsession if we already have output (from a previous subsession).
6456     if (newSubsession && mMetricsToUpload && mBytesEncoded > 0) {
6457         handleStartingANewSubsession();
6458     }
6459 
6460     if (mFlags & kFlagIsAsync) {
6461         onOutputFormatChanged();
6462     } else {
6463         mFlags |= kFlagOutputFormatChanged;
6464         postActivityNotificationIfPossible();
6465     }
6466 
6467     bool resolutionChanged = false;
6468     if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
6469         mWidth = right - left + 1;
6470         mHeight = bottom - top + 1;
6471         resolutionChanged = true;
6472     } else if (mOutputFormat->findInt32("width", &width) &&
6473                mOutputFormat->findInt32("height", &height)) {
6474         mWidth = width;
6475         mHeight = height;
6476         resolutionChanged = true;
6477     }
6478 
6479     // Notify mCrypto and the RM of video resolution changes
6480     if (resolutionChanged) {
6481         if (mCrypto != NULL) {
6482             mCrypto->notifyResolution(mWidth, mHeight);
6483         }
6484         ClientConfigParcel clientConfig;
6485         initClientConfigParcel(clientConfig);
6486         mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
6487         mReliabilityContextMetrics.resolutionChangeCount++;
6488     }
6489 
6490     updateHdrMetrics(false /* isConfig */);
6491 
6492     if (mDomain == DOMAIN_VIDEO) {
6493         bool isEncoder = mFlags & kFlagIsEncoder;
6494         // Since the output format has changed, see if we need to update
6495         // operating frame-rate.
6496         float frameRate = getOperatingFrameRate(mOutputFormat, mFrameRate, isEncoder);
6497         // if the operating frame-rate has changed, we need to recalibrate the
6498         // required system resources again and notify the caller.
6499         if (frameRate != mFrameRate) {
6500             mFrameRate = frameRate;
6501             if (getRequiredSystemResources()) {
6502                 onRequiredResourcesChanged();
6503             }
6504         }
6505     }
6506 }
6507 
6508 // always called from the looper thread (and therefore not mutexed)
handleStartingANewSubsession()6509 void MediaCodec::handleStartingANewSubsession() {
6510     // create a new metrics item for the subsession with the new resolution.
6511     // TODO: properly account input counts for the previous and the new
6512     // subsessions. We only find out that a new subsession started from the
6513     // output format, but by that time we already accounted the input counts
6514     // to the previous subsession.
6515     flushMediametrics(); // this deletes mMetricsHandle, but stores it in mLastMetricsHandle
6516 
6517     // hence mLastMetricsHandle has the metrics item for the previous subsession.
6518     if ((mFlags & kFlagIsAsync) && mCallback != nullptr) {
6519         sp<AMessage> msg = mCallback->dup();
6520         msg->setInt32("callbackID", CB_METRICS_FLUSHED);
6521         std::unique_ptr<mediametrics::Item> flushedMetrics(
6522                 mediametrics::Item::convert(mediametrics_dup(mLastMetricsHandle)));
6523         msg->setObject("metrics", new WrapperObject<std::unique_ptr<mediametrics::Item>>(
6524                 std::move(flushedMetrics)));
6525         msg->post();
6526     }
6527 
6528     // reuse/continue old metrics item for the new subsession.
6529     mMetricsHandle = mediametrics_dup(mLastMetricsHandle);
6530     mMetricsToUpload = true;
6531     // TODO: configured width/height for the new subsession should be the
6532     // previous width/height.
6533     mSubsessionCount++;
6534     resetSubsessionMetricsFields();
6535 }
6536 
extractCSD(const sp<AMessage> & format)6537 void MediaCodec::extractCSD(const sp<AMessage> &format) {
6538     mCSD.clear();
6539 
6540     size_t i = 0;
6541     for (;;) {
6542         sp<ABuffer> csd;
6543         if (!format->findBuffer(base::StringPrintf("csd-%zu", i).c_str(), &csd)) {
6544             break;
6545         }
6546         if (csd->size() == 0) {
6547             ALOGW("csd-%zu size is 0", i);
6548         } else {
6549             mCSD.push_back(csd);
6550         }
6551         ++i;
6552     }
6553 
6554     ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
6555 }
6556 
queueCSDInputBuffer(size_t bufferIndex)6557 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
6558     CHECK(!mCSD.empty());
6559 
6560     sp<ABuffer> csd = *mCSD.begin();
6561     mCSD.erase(mCSD.begin());
6562     std::shared_ptr<C2Buffer> c2Buffer;
6563     sp<hardware::HidlMemory> memory;
6564 
6565     if (mFlags & kFlagUseBlockModel) {
6566         if (hasCryptoOrDescrambler()) {
6567             constexpr size_t kInitialDealerCapacity = 1048576;  // 1MB
6568             thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
6569                     kInitialDealerCapacity, "CSD(1MB)");
6570             sp<IMemory> mem = sDealer->allocate(csd->size());
6571             if (mem == nullptr) {
6572                 size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
6573                 while (csd->size() * 2 > newDealerCapacity) {
6574                     newDealerCapacity *= 2;
6575                 }
6576                 sDealer = new MemoryDealer(
6577                         newDealerCapacity,
6578                         base::StringPrintf("CSD(%zuMB)", newDealerCapacity / 1048576).c_str());
6579                 mem = sDealer->allocate(csd->size());
6580             }
6581             memcpy(mem->unsecurePointer(), csd->data(), csd->size());
6582             ssize_t heapOffset;
6583             memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
6584         } else {
6585             std::shared_ptr<C2LinearBlock> block =
6586                 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
6587             C2WriteView view{block->map().get()};
6588             if (view.error() != C2_OK) {
6589                 mErrorLog.log(LOG_TAG, "Fatal error: failed to allocate and map a block");
6590                 return -EINVAL;
6591             }
6592             if (csd->size() > view.capacity()) {
6593                 mErrorLog.log(LOG_TAG, base::StringPrintf(
6594                         "Fatal error: allocated block is too small "
6595                         "(csd size %zu; block cap %u)",
6596                         csd->size(), view.capacity()));
6597                 return -EINVAL;
6598             }
6599             memcpy(view.base(), csd->data(), csd->size());
6600             c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
6601         }
6602     } else {
6603         const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
6604         const sp<MediaCodecBuffer> &codecInputData = info.mData;
6605 
6606         if (csd->size() > codecInputData->capacity()) {
6607             mErrorLog.log(LOG_TAG, base::StringPrintf(
6608                     "CSD is too large to fit in input buffer "
6609                     "(csd size %zu; buffer cap %zu)",
6610                     csd->size(), codecInputData->capacity()));
6611             return -EINVAL;
6612         }
6613         if (codecInputData->data() == NULL) {
6614             mErrorLog.log(LOG_TAG, base::StringPrintf(
6615                     "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
6616             return -EINVAL;
6617         }
6618 
6619         memcpy(codecInputData->data(), csd->data(), csd->size());
6620     }
6621 
6622     AString errorDetailMsg;
6623 
6624     sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
6625     msg->setSize("index", bufferIndex);
6626     msg->setSize("offset", 0);
6627     msg->setSize("size", csd->size());
6628     msg->setInt64("timeUs", 0LL);
6629     msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
6630     msg->setPointer("errorDetailMsg", &errorDetailMsg);
6631     if (c2Buffer) {
6632         sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
6633             new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
6634         msg->setObject("c2buffer", obj);
6635     } else if (memory) {
6636         sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
6637             new WrapperObject<sp<hardware::HidlMemory>>{memory}};
6638         msg->setObject("memory", obj);
6639     }
6640 
6641     return onQueueInputBuffer(msg);
6642 }
6643 
setState(State newState)6644 void MediaCodec::setState(State newState) {
6645     if (newState == INITIALIZED || newState == UNINITIALIZED) {
6646         delete mSoftRenderer;
6647         mSoftRenderer = NULL;
6648 
6649         if ( mCrypto != NULL ) {
6650             ALOGV("setState: ~mCrypto: %p (%d)",
6651                     mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
6652         }
6653         mCrypto.clear();
6654         mDescrambler.clear();
6655         handleSetSurface(NULL);
6656 
6657         mInputFormat.clear();
6658         mOutputFormat.clear();
6659         if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
6660             mCSD.clear();
6661             mLeftover.clear();
6662         }
6663         mFlags &= ~kFlagOutputFormatChanged;
6664         mFlags &= ~kFlagOutputBuffersChanged;
6665         mFlags &= ~kFlagStickyError;
6666         mFlags &= ~kFlagIsEncoder;
6667         mFlags &= ~kFlagIsAsync;
6668         mStickyError = OK;
6669 
6670         mActivityNotify.clear();
6671         mCallback.clear();
6672         mErrorLog.clear();
6673     }
6674 
6675     if (android::media::codec::provider_->set_state_early()) {
6676         mState = newState;
6677     }
6678 
6679     if (newState == UNINITIALIZED) {
6680         // return any straggling buffers, e.g. if we got here on an error
6681         returnBuffersToCodec();
6682 
6683         // The component is gone, mediaserver's probably back up already
6684         // but should definitely be back up should we try to instantiate
6685         // another component.. and the cycle continues.
6686         mFlags &= ~kFlagSawMediaServerDie;
6687     }
6688 
6689     if (!android::media::codec::provider_->set_state_early()) {
6690         mState = newState;
6691     }
6692 
6693     if (mBatteryChecker != nullptr) {
6694         mBatteryChecker->setExecuting(isExecuting());
6695     }
6696 
6697     cancelPendingDequeueOperations();
6698 }
6699 
returnBuffersToCodec(bool isReclaim)6700 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
6701     returnBuffersToCodecOnPort(kPortIndexInput, isReclaim);
6702     returnBuffersToCodecOnPort(kPortIndexOutput, isReclaim);
6703 }
6704 
returnBuffersToCodecOnPort(int32_t portIndex,bool isReclaim)6705 void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex, bool isReclaim) {
6706     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6707     Mutex::Autolock al(mBufferLock);
6708 
6709     if (portIndex == kPortIndexInput) {
6710         mLeftover.clear();
6711     }
6712     for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
6713         BufferInfo *info = &mPortBuffers[portIndex][i];
6714 
6715         if (info->mData != nullptr) {
6716             sp<MediaCodecBuffer> buffer = info->mData;
6717             if (isReclaim && info->mOwnedByClient) {
6718                 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
6719                         portIndex, i);
6720             } else {
6721                 ALOGV("returnBuffersToCodecOnPort: mPortBuffers[%s][%zu] NOT owned by client",
6722                       portIndex == kPortIndexInput ? "in" : "out", i);
6723                 info->mOwnedByClient = false;
6724                 info->mData.clear();
6725             }
6726             mBufferChannel->discardBuffer(buffer);
6727         }
6728     }
6729 
6730     mAvailPortBuffers[portIndex].clear();
6731 }
6732 
updateBuffers(int32_t portIndex,const sp<AMessage> & msg)6733 size_t MediaCodec::updateBuffers(
6734         int32_t portIndex, const sp<AMessage> &msg) {
6735     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6736     size_t index;
6737     CHECK(msg->findSize("index", &index));
6738     sp<RefBase> obj;
6739     CHECK(msg->findObject("buffer", &obj));
6740     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
6741 
6742     {
6743         Mutex::Autolock al(mBufferLock);
6744         if (mPortBuffers[portIndex].size() <= index) {
6745             mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
6746         }
6747         mPortBuffers[portIndex][index].mData = buffer;
6748     }
6749     mAvailPortBuffers[portIndex].push_back(index);
6750 
6751     return index;
6752 }
6753 
onQueueInputBuffer(const sp<AMessage> & msg)6754 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
6755     size_t index;
6756     size_t offset = 0;
6757     size_t size = 0;
6758     int64_t timeUs = 0;
6759     uint32_t flags = 0;
6760     ScopedTrace trace(ATRACE_TAG, "MediaCodec::onQueueInputBuffer#native");
6761     CHECK(msg->findSize("index", &index));
6762     CHECK(msg->findInt64("timeUs", &timeUs));
6763     CHECK(msg->findInt32("flags", (int32_t *)&flags));
6764     std::shared_ptr<C2Buffer> c2Buffer;
6765     sp<hardware::HidlMemory> memory;
6766     sp<RefBase> obj;
6767     if (msg->findObject("c2buffer", &obj)) {
6768         CHECK(obj);
6769         c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
6770     } else if (msg->findObject("memory", &obj)) {
6771         CHECK(obj);
6772         memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
6773         CHECK(msg->findSize("offset", &offset));
6774     } else {
6775         CHECK(msg->findSize("offset", &offset));
6776     }
6777     const CryptoPlugin::SubSample *subSamples;
6778     size_t numSubSamples = 0;
6779     const uint8_t *key = NULL;
6780     const uint8_t *iv = NULL;
6781     CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
6782 
6783     // We allow the simpler queueInputBuffer API to be used even in
6784     // secure mode, by fabricating a single unencrypted subSample.
6785     CryptoPlugin::SubSample ss;
6786     CryptoPlugin::Pattern pattern;
6787 
6788     if (android::media::codec::provider_->secure_codecs_require_crypto()
6789             && (mFlags & kFlagIsSecure) && !hasCryptoOrDescrambler()) {
6790         mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
6791         return INVALID_OPERATION;
6792     }
6793 
6794     if (msg->findSize("size", &size)) {
6795         if (hasCryptoOrDescrambler()) {
6796             ss.mNumBytesOfClearData = size;
6797             ss.mNumBytesOfEncryptedData = 0;
6798 
6799             subSamples = &ss;
6800             numSubSamples = 1;
6801             pattern.mEncryptBlocks = 0;
6802             pattern.mSkipBlocks = 0;
6803         }
6804     } else if (!c2Buffer) {
6805         if (!hasCryptoOrDescrambler()) {
6806             ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
6807                     mComponentName.c_str());
6808             mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
6809             return -EINVAL;
6810         }
6811         sp<RefBase> obj;
6812         if (msg->findObject("cryptoInfos", &obj)) {
6813             CHECK(msg->findSize("ssize", &size));
6814         } else {
6815             CHECK(msg->findPointer("subSamples", (void **)&subSamples));
6816             CHECK(msg->findSize("numSubSamples", &numSubSamples));
6817             CHECK(msg->findPointer("key", (void **)&key));
6818             CHECK(msg->findPointer("iv", (void **)&iv));
6819             CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
6820             CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
6821 
6822             int32_t tmp;
6823             CHECK(msg->findInt32("mode", &tmp));
6824 
6825             mode = (CryptoPlugin::Mode)tmp;
6826             size = 0;
6827             for (size_t i = 0; i < numSubSamples; ++i) {
6828                 size += subSamples[i].mNumBytesOfClearData;
6829                 size += subSamples[i].mNumBytesOfEncryptedData;
6830             }
6831         }
6832     }
6833 
6834     if (index >= mPortBuffers[kPortIndexInput].size()) {
6835         mErrorLog.log(LOG_TAG, base::StringPrintf(
6836                 "index out of range (index=%zu)", mPortBuffers[kPortIndexInput].size()));
6837         return -ERANGE;
6838     }
6839 
6840     BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
6841     sp<MediaCodecBuffer> buffer = info->mData;
6842     if (buffer == nullptr) {
6843         mErrorLog.log(LOG_TAG, base::StringPrintf(
6844                 "Fatal error: failed to fetch buffer for index %zu", index));
6845         return -EACCES;
6846     }
6847     if (!info->mOwnedByClient) {
6848         mErrorLog.log(LOG_TAG, base::StringPrintf(
6849                 "client does not own the buffer #%zu", index));
6850         return -EACCES;
6851     }
6852     auto setInputBufferParams = [this, &msg, &buffer]
6853         (int64_t timeUs, uint32_t flags = 0) -> status_t {
6854         status_t err = OK;
6855         sp<RefBase> obj;
6856         if (msg->findObject("accessUnitInfo", &obj)) {
6857             buffer->meta()->setObject("accessUnitInfo", obj);
6858         }
6859         buffer->meta()->setInt64("timeUs", timeUs);
6860         if (flags & BUFFER_FLAG_EOS) {
6861             buffer->meta()->setInt32("eos", true);
6862         }
6863 
6864         if (flags & BUFFER_FLAG_CODECCONFIG) {
6865             buffer->meta()->setInt32("csd", true);
6866         }
6867         bool isBufferDecodeOnly = ((flags & BUFFER_FLAG_DECODE_ONLY) != 0);
6868         if (isBufferDecodeOnly) {
6869             buffer->meta()->setInt32("decode-only", true);
6870         }
6871         if (mTunneled && !isBufferDecodeOnly && !(flags & BUFFER_FLAG_CODECCONFIG)) {
6872             TunnelPeekState previousState = mTunnelPeekState;
6873             switch(mTunnelPeekState){
6874                 case TunnelPeekState::kEnabledNoBuffer:
6875                     buffer->meta()->setInt32("tunnel-first-frame", 1);
6876                     mTunnelPeekState = TunnelPeekState::kEnabledQueued;
6877                     ALOGV("TunnelPeekState: %s -> %s",
6878                         asString(previousState),
6879                         asString(mTunnelPeekState));
6880                 break;
6881                 case TunnelPeekState::kDisabledNoBuffer:
6882                     buffer->meta()->setInt32("tunnel-first-frame", 1);
6883                     mTunnelPeekState = TunnelPeekState::kDisabledQueued;
6884                     ALOGV("TunnelPeekState: %s -> %s",
6885                         asString(previousState),
6886                         asString(mTunnelPeekState));
6887                 break;
6888             default:
6889                 break;
6890            }
6891         }
6892      return err;
6893     };
6894     auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
6895         // set decrypt Action
6896         cryptoInfo->setInt32("action", action);
6897         cryptoInfo->setObject("buffer", buffer);
6898         cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
6899         sp<RefBase> obj;
6900         if (msg->findObject("cryptoInfos", &obj)) {
6901             // this object is a standalone object when created (no copy requied here)
6902             buffer->meta()->setObject("cryptoInfos", obj);
6903         } else {
6904             size_t key_len = (key != nullptr)? 16 : 0;
6905             size_t iv_len = (iv != nullptr)? 16 : 0;
6906             sp<ABuffer> shared_key;
6907             sp<ABuffer> shared_iv;
6908             if (key_len > 0) {
6909                 shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
6910             }
6911             if (iv_len > 0) {
6912                 shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
6913             }
6914             sp<ABuffer> subSamples_buffer =
6915                 new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
6916             CryptoPlugin::SubSample * samples =
6917                (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
6918             for (int s = 0 ; s < numSubSamples ; s++) {
6919                 samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
6920                 samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
6921             }
6922             cryptoInfo->setBuffer("key", shared_key);
6923             cryptoInfo->setBuffer("iv", shared_iv);
6924             cryptoInfo->setInt32("mode", (int)mode);
6925             cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
6926             cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
6927             cryptoInfo->setBuffer("subSamples", subSamples_buffer);
6928             cryptoInfo->setSize("numSubSamples", numSubSamples);
6929         }
6930     };
6931     if (c2Buffer || memory) {
6932         sp<AMessage> tunings = NULL;
6933         if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
6934             onSetParameters(tunings);
6935         }
6936         status_t err = OK;
6937         if (c2Buffer) {
6938             err = mBufferChannel->attachBuffer(c2Buffer, buffer);
6939             // to prevent unnecessary copy for single info case.
6940             if (msg->findObject("accessUnitInfo", &obj)) {
6941                 sp<BufferInfosWrapper> infos{(BufferInfosWrapper*)(obj.get())};
6942                 if (infos->value.size() == 1) {
6943                    msg->removeEntryByName("accessUnitInfo");
6944                 }
6945             }
6946         } else if (memory) {
6947             AString errorDetailMsg;
6948             if (msg->findObject("cryptoInfos", &obj)) {
6949                 buffer->meta()->setSize("ssize", size);
6950                 buffer->meta()->setObject("cryptoInfos", obj);
6951                 if (msg->findObject("accessUnitInfo", &obj)) {
6952                     // the reference will be same here and
6953                     // setBufferParams
6954                     buffer->meta()->setObject("accessUnitInfo", obj);
6955                 }
6956                 err = mBufferChannel->attachEncryptedBuffers(
6957                     memory,
6958                     offset,
6959                     buffer,
6960                     (mFlags & kFlagIsSecure),
6961                     &errorDetailMsg);
6962             } else {
6963                 err = mBufferChannel->attachEncryptedBuffer(
6964                         memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
6965                         offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
6966             }
6967             if (err != OK && hasCryptoOrDescrambler()
6968                     && (mFlags & kFlagUseCryptoAsync)) {
6969                 // create error detail
6970                 sp<AMessage> cryptoErrorInfo = new AMessage();
6971                 if (msg->findObject("cryptoInfos", &obj)) {
6972                     cryptoErrorInfo->setObject("cryptoInfos", obj);
6973                 } else {
6974                     buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
6975                 }
6976                 cryptoErrorInfo->setInt32("err", err);
6977                 cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
6978                 cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
6979                 onCryptoError(cryptoErrorInfo);
6980                 // we want cryptoError to be in the callback
6981                 // but Codec IllegalStateException to be triggered.
6982                 err = INVALID_OPERATION;
6983             }
6984         } else {
6985             mErrorLog.log(LOG_TAG, "Fatal error: invalid queue request without a buffer");
6986             err = UNKNOWN_ERROR;
6987         }
6988         if (err == OK && !buffer->asC2Buffer()
6989                 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
6990             C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
6991             if (block.size() > buffer->size()) {
6992                 C2ConstLinearBlock leftover = block.subBlock(
6993                         block.offset() + buffer->size(), block.size() - buffer->size());
6994                 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
6995                     new WrapperObject<std::shared_ptr<C2Buffer>>{
6996                         C2Buffer::CreateLinearBuffer(leftover)}};
6997                 msg->setObject("c2buffer", obj);
6998                 mLeftover.push_front(msg);
6999                 // Not sending EOS if we have leftovers
7000                 flags &= ~BUFFER_FLAG_EOS;
7001             }
7002         }
7003         offset = buffer->offset();
7004         size = buffer->size();
7005         if (err != OK) {
7006             ALOGE("block model buffer attach failed: err = %s (%d)",
7007                   StrMediaError(err).c_str(), err);
7008             return err;
7009         }
7010     }
7011 
7012     if (offset + size > buffer->capacity()) {
7013         mErrorLog.log(LOG_TAG, base::StringPrintf(
7014                 "buffer offset and size goes beyond the capacity: "
7015                 "offset=%zu, size=%zu, cap=%zu",
7016                 offset, size, buffer->capacity()));
7017         return -EINVAL;
7018     }
7019     buffer->setRange(offset, size);
7020     status_t err = OK;
7021     err = setInputBufferParams(timeUs, flags);
7022     if (err != OK) {
7023         return -EINVAL;
7024     }
7025 
7026     int32_t usedMaxInputSize = mApiUsageMetrics.inputBufferSize.usedMax;
7027     mApiUsageMetrics.inputBufferSize.usedMax = size > usedMaxInputSize ? size : usedMaxInputSize;
7028 
7029     if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
7030         AString *errorDetailMsg;
7031         CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
7032         // Notify mCrypto of video resolution changes
7033         if (mTunneled && mCrypto != NULL) {
7034             int32_t width, height;
7035             if (mInputFormat->findInt32("width", &width) &&
7036                 mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
7037                 if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
7038                     mTunneledInputWidth = width;
7039                     mTunneledInputHeight = height;
7040                     mCrypto->notifyResolution(width, height);
7041                 }
7042             }
7043         }
7044         if (mCryptoAsync) {
7045             // prepare a message and enqueue
7046             sp<AMessage> cryptoInfo = new AMessage();
7047             buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
7048             err = mCryptoAsync->decrypt(cryptoInfo);
7049         } else if (msg->findObject("cryptoInfos", &obj)) {
7050                 buffer->meta()->setObject("cryptoInfos", obj);
7051                 err = mBufferChannel->queueSecureInputBuffers(
7052                         buffer,
7053                         (mFlags & kFlagIsSecure),
7054                         errorDetailMsg);
7055         } else {
7056             err = mBufferChannel->queueSecureInputBuffer(
7057                 buffer,
7058                 (mFlags & kFlagIsSecure),
7059                 key,
7060                 iv,
7061                 mode,
7062                 pattern,
7063                 subSamples,
7064                 numSubSamples,
7065                 errorDetailMsg);
7066         }
7067         if (err != OK) {
7068             mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
7069             ALOGW("Log queueSecureInputBuffer error: %d", err);
7070         }
7071     } else {
7072         err = mBufferChannel->queueInputBuffer(buffer);
7073         if (err != OK) {
7074             mediametrics_setInt32(mMetricsHandle, kCodecQueueInputBufferError, err);
7075             ALOGW("Log queueInputBuffer error: %d", err);
7076         }
7077     }
7078 
7079     if (err == OK) {
7080         if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
7081             mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
7082         }
7083 
7084         // synchronization boundary for getBufferAndFormat
7085         Mutex::Autolock al(mBufferLock);
7086         ALOGV("onQueueInputBuffer: mPortBuffers[in][%zu] NOT owned by client", index);
7087         info->mOwnedByClient = false;
7088         info->mData.clear();
7089 
7090         statsBufferSent(timeUs, buffer);
7091     }
7092 
7093     return err;
7094 }
7095 
handleLeftover(size_t index)7096 status_t MediaCodec::handleLeftover(size_t index) {
7097     if (mLeftover.empty()) {
7098         return OK;
7099     }
7100     sp<AMessage> msg = mLeftover.front();
7101     mLeftover.pop_front();
7102     msg->setSize("index", index);
7103     ALOGV("handleLeftover(%zu)", index);
7104     return onQueueInputBuffer(msg);
7105 }
7106 
7107 template<typename T>
CreateFramesRenderedMessageInternal(const std::list<T> & done,sp<AMessage> & msg)7108 static size_t CreateFramesRenderedMessageInternal(const std::list<T> &done, sp<AMessage> &msg) {
7109     size_t index = 0;
7110     for (typename std::list<T>::const_iterator it = done.cbegin(); it != done.cend(); ++it) {
7111         if (it->getRenderTimeNs() < 0) {
7112             continue; // dropped frame from tracking
7113         }
7114         msg->setInt64(base::StringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
7115         msg->setInt64(base::StringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
7116         ++index;
7117     }
7118     return index;
7119 }
7120 
7121 //static
CreateFramesRenderedMessage(const std::list<RenderedFrameInfo> & done,sp<AMessage> & msg)7122 size_t MediaCodec::CreateFramesRenderedMessage(
7123         const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg) {
7124     return CreateFramesRenderedMessageInternal(done, msg);
7125 }
7126 
7127 //static
CreateFramesRenderedMessage(const std::list<FrameRenderTracker::Info> & done,sp<AMessage> & msg)7128 size_t MediaCodec::CreateFramesRenderedMessage(
7129         const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
7130     return CreateFramesRenderedMessageInternal(done, msg);
7131 }
7132 
onReleaseOutputBuffer(const sp<AMessage> & msg)7133 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
7134     size_t index;
7135     CHECK(msg->findSize("index", &index));
7136 
7137     int32_t render;
7138     if (!msg->findInt32("render", &render)) {
7139         render = 0;
7140     }
7141 
7142     if (!isExecuting()) {
7143         mErrorLog.log(LOG_TAG, base::StringPrintf(
7144                 "releaseOutputBuffer() is valid at Executing states; currently %s",
7145                 apiStateString().c_str()));
7146         return -EINVAL;
7147     }
7148 
7149     if (index >= mPortBuffers[kPortIndexOutput].size()) {
7150         mErrorLog.log(LOG_TAG, base::StringPrintf(
7151                 "index out of range (index=%zu)", mPortBuffers[kPortIndexOutput].size()));
7152         return -ERANGE;
7153     }
7154 
7155     BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
7156 
7157     if (!info->mOwnedByClient) {
7158         mErrorLog.log(LOG_TAG, base::StringPrintf(
7159                 "client does not own the buffer #%zu", index));
7160         return -EACCES;
7161     }
7162     if (info->mData == nullptr) {
7163         mErrorLog.log(LOG_TAG, base::StringPrintf(
7164                 "Fatal error: null buffer for index %zu", index));
7165         return -EACCES;
7166     }
7167 
7168     // synchronization boundary for getBufferAndFormat
7169     sp<MediaCodecBuffer> buffer;
7170     {
7171         Mutex::Autolock al(mBufferLock);
7172         ALOGV("onReleaseOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
7173         info->mOwnedByClient = false;
7174         buffer = info->mData;
7175         info->mData.clear();
7176     }
7177 
7178     if (render && buffer->size() != 0) {
7179         int64_t mediaTimeUs = INT64_MIN;
7180         buffer->meta()->findInt64("timeUs", &mediaTimeUs);
7181 
7182         bool noRenderTime = false;
7183         int64_t renderTimeNs = 0;
7184         if (!msg->findInt64("timestampNs", &renderTimeNs)) {
7185             // use media timestamp if client did not request a specific render timestamp
7186             ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
7187             renderTimeNs = mediaTimeUs * 1000;
7188             noRenderTime = true;
7189         }
7190 
7191         if (mSoftRenderer != NULL) {
7192             std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
7193                     buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
7194                     mPortBuffers[kPortIndexOutput].size(), buffer->format());
7195 
7196             // if we are running, notify rendered frames
7197             if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
7198                 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
7199                 sp<AMessage> data = new AMessage;
7200                 if (CreateFramesRenderedMessage(doneFrames, data)) {
7201                     notify->setMessage("data", data);
7202                     notify->post();
7203                 }
7204             }
7205         }
7206 
7207         // If rendering to the screen, then schedule a time in the future to poll to see if this
7208         // frame was ever rendered to seed onFrameRendered callbacks.
7209         if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
7210             if (mediaTimeUs != INT64_MIN) {
7211                 noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
7212                              : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
7213                                                                           renderTimeNs);
7214             }
7215             // can't initialize this in the constructor because the Looper parent class needs to be
7216             // initialized first
7217             if (mMsgPollForRenderedBuffers == nullptr) {
7218                 mMsgPollForRenderedBuffers = new AMessage(kWhatPollForRenderedBuffers, this);
7219             }
7220             // Schedule the poll to occur 100ms after the render time - should be safe for
7221             // determining if the frame was ever rendered. If no render time was specified, the
7222             // presentation timestamp is used instead, which almost certainly occurs in the past,
7223             // since it's almost always a zero-based offset from the start of the stream. In these
7224             // scenarios, we expect the frame to be rendered with no delay.
7225             int64_t nowUs = ALooper::GetNowUs();
7226             int64_t renderTimeUs = renderTimeNs / 1000;
7227             int64_t delayUs = renderTimeUs < nowUs ? 0 : renderTimeUs - nowUs;
7228             delayUs += 100 * 1000; /* 100ms in microseconds */
7229             status_t err =
7230                     mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
7231                                                            delayUs);
7232             if (err != OK) {
7233                 ALOGE("unexpected failure to post pollForRenderedBuffers: %d", err);
7234             }
7235         }
7236         status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
7237 
7238         if (err == NO_INIT) {
7239             mErrorLog.log(LOG_TAG, "rendering to non-initialized(obsolete) surface");
7240             return err;
7241         }
7242         if (err != OK) {
7243             ALOGI("rendring output error %d", err);
7244         }
7245     } else {
7246         if (mIsSurfaceToDisplay && buffer->size() != 0) {
7247             int64_t mediaTimeUs = INT64_MIN;
7248             if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
7249                 mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
7250             }
7251         }
7252         mBufferChannel->discardBuffer(buffer);
7253     }
7254 
7255     return OK;
7256 }
7257 
peekNextPortBuffer(int32_t portIndex)7258 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
7259     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
7260 
7261     std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
7262 
7263     if (availBuffers->empty()) {
7264         return nullptr;
7265     }
7266 
7267     return &mPortBuffers[portIndex][*availBuffers->begin()];
7268 }
7269 
dequeuePortBuffer(int32_t portIndex)7270 ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
7271     CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
7272 
7273     BufferInfo *info = peekNextPortBuffer(portIndex);
7274     if (!info) {
7275         return -EAGAIN;
7276     }
7277 
7278     std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
7279     size_t index = *availBuffers->begin();
7280     CHECK_EQ(info, &mPortBuffers[portIndex][index]);
7281     availBuffers->erase(availBuffers->begin());
7282 
7283     {
7284         Mutex::Autolock al(mBufferLock);
7285         ALOGV("dequeuePortBuffer: mPortBuffers[%s][%zu] checking if not owned by client",
7286               portIndex == kPortIndexInput ? "in" : "out", index);
7287         CHECK(!info->mOwnedByClient);
7288         info->mOwnedByClient = true;
7289 
7290         // set image-data
7291         if (info->mData->format() != NULL) {
7292             sp<ABuffer> imageData;
7293             if (info->mData->format()->findBuffer("image-data", &imageData)) {
7294                 info->mData->meta()->setBuffer("image-data", imageData);
7295             }
7296             int32_t left, top, right, bottom;
7297             if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
7298                 info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
7299             }
7300         }
7301     }
7302 
7303     return index;
7304 }
7305 
getOrCreateDetachedSurface()7306 sp<Surface> MediaCodec::getOrCreateDetachedSurface() {
7307     if (mDomain != DOMAIN_VIDEO || (mFlags & kFlagIsEncoder)) {
7308         return nullptr;
7309     }
7310 
7311     if (!mDetachedSurface) {
7312         uint64_t usage = 0;
7313         if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
7314             // By default prepare buffer to be displayed on any of the common surfaces
7315             usage = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER);
7316         }
7317         mDetachedSurface.reset(new ReleaseSurface(usage));
7318     }
7319 
7320     return mDetachedSurface->getSurface();
7321 }
7322 
connectToSurface(const sp<Surface> & surface,uint32_t * generation)7323 status_t MediaCodec::connectToSurface(const sp<Surface> &surface, uint32_t *generation) {
7324     status_t err = OK;
7325     if (surface != NULL) {
7326         uint64_t oldId, newId;
7327         if (mSurface != NULL
7328                 && surface->getUniqueId(&newId) == NO_ERROR
7329                 && mSurface->getUniqueId(&oldId) == NO_ERROR
7330                 && newId == oldId) {
7331             ALOGI("[%s] connecting to the same surface. Nothing to do.", mComponentName.c_str());
7332             return ALREADY_EXISTS;
7333         }
7334 
7335         // in case we don't connect, ensure that we don't signal the surface is
7336         // connected to the screen
7337         mIsSurfaceToDisplay = false;
7338 
7339         err = nativeWindowConnect(surface.get(), "connectToSurface");
7340         if (err == OK) {
7341             // Require a fresh set of buffers after each connect by using a unique generation
7342             // number. Rely on the fact that max supported process id by Linux is 2^22.
7343             // PID is never 0 so we don't have to worry that we use the default generation of 0.
7344             // TODO: come up with a unique scheme if other producers also set the generation number.
7345             static uint32_t sSurfaceGeneration = 0;
7346             *generation = (getpid() << 10) | (++sSurfaceGeneration & ((1 << 10) - 1));
7347             surface->setGenerationNumber(*generation);
7348             ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), *generation);
7349 
7350             // HACK: clear any free buffers. Remove when connect will automatically do this.
7351             // This is needed as the consumer may be holding onto stale frames that it can reattach
7352             // to this surface after disconnect/connect, and those free frames would inherit the new
7353             // generation number. Disconnecting after setting a unique generation prevents this.
7354             nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
7355             sp<SurfaceListener> listener =
7356                     new OnBufferReleasedListener(*generation, mBufferChannel);
7357             err = surfaceConnectWithListener(
7358                     surface, listener, "connectToSurface(reconnect-with-listener)");
7359         }
7360 
7361         if (err != OK) {
7362             *generation = 0;
7363             ALOGE("nativeWindowConnect/surfaceConnectWithListener returned an error: %s (%d)",
7364                     strerror(-err), err);
7365         } else {
7366             if (!mAllowFrameDroppingBySurface) {
7367                 disableLegacyBufferDropPostQ(surface);
7368             }
7369             // keep track whether or not the buffers of the connected surface go to the screen
7370             int result = 0;
7371             surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
7372             mIsSurfaceToDisplay = result != 0;
7373         }
7374     }
7375     // do not return ALREADY_EXISTS unless surfaces are the same
7376     return err == ALREADY_EXISTS ? BAD_VALUE : err;
7377 }
7378 
disconnectFromSurface()7379 status_t MediaCodec::disconnectFromSurface() {
7380     status_t err = OK;
7381     if (mSurface != NULL) {
7382         // Resetting generation is not technically needed, but there is no need to keep it either
7383         mSurface->setGenerationNumber(0);
7384         err = nativeWindowDisconnect(mSurface.get(), "disconnectFromSurface");
7385         if (err != OK) {
7386             ALOGW("nativeWindowDisconnect returned an error: %s (%d)", strerror(-err), err);
7387         }
7388         // assume disconnected even on error
7389         mSurface.clear();
7390         mSurfaceGeneration = 0;
7391         mIsSurfaceToDisplay = false;
7392     }
7393     return err;
7394 }
7395 
handleSetSurface(const sp<Surface> & surface,bool callCodec,bool onShutDown)7396 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface, bool callCodec, bool onShutDown) {
7397     uint32_t generation;
7398     status_t err = OK;
7399     if (surface != nullptr) {
7400         err = connectToSurface(surface, &generation);
7401         if (err == ALREADY_EXISTS) {
7402             // reconnecting to same surface
7403             return OK;
7404         }
7405 
7406         if (err == OK && callCodec) {
7407             if (mFlags & kFlagUsesSoftwareRenderer) {
7408                 if (mSoftRenderer != NULL
7409                         && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
7410                     pushBlankBuffersToNativeWindow(mSurface.get());
7411                 }
7412                 // do not create a new software renderer on shutdown (release)
7413                 // as it will not be used anyway
7414                 if (!onShutDown) {
7415                     surface->setDequeueTimeout(-1);
7416                     mSoftRenderer = new SoftwareRenderer(surface);
7417                     // TODO: check if this was successful
7418                 }
7419             } else {
7420                 err = mCodec->setSurface(surface, generation);
7421             }
7422 
7423             mReliabilityContextMetrics.setOutputSurfaceCount++;
7424         }
7425     }
7426 
7427     if (err == OK) {
7428         if (mSurface != NULL) {
7429             (void)disconnectFromSurface();
7430         }
7431 
7432         if (surface != NULL) {
7433             mSurface = surface;
7434             mSurfaceGeneration = generation;
7435         }
7436     }
7437 
7438     return err;
7439 }
7440 
handleSetSurface(const sp<Surface> & surface)7441 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
7442     if (android::media::codec::provider_->null_output_surface_support()) {
7443         return handleSetSurface(surface, false /* callCodec */);
7444     }
7445 
7446     status_t err = OK;
7447     if (mSurface != NULL) {
7448         (void)disconnectFromSurface();
7449     }
7450     if (surface != NULL) {
7451         uint32_t generation;
7452         err = connectToSurface(surface, &generation);
7453         if (err == OK) {
7454             mSurface = surface;
7455             mSurfaceGeneration = generation;
7456         }
7457     }
7458     return err;
7459 }
7460 
onInputBufferAvailable()7461 void MediaCodec::onInputBufferAvailable() {
7462     int32_t index;
7463     while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
7464         sp<AMessage> msg = mCallback->dup();
7465         msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
7466         msg->setInt32("index", index);
7467         msg->post();
7468     }
7469 }
7470 
onOutputBufferAvailable()7471 void MediaCodec::onOutputBufferAvailable() {
7472     ScopedTrace trace(ATRACE_TAG, "MediaCodec::onOutputBufferAvailable#native");
7473     int32_t index;
7474     while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
7475         if (discardDecodeOnlyOutputBuffer(index)) {
7476             continue;
7477         }
7478         sp<AMessage> msg = mCallback->dup();
7479         const sp<MediaCodecBuffer> &buffer =
7480             mPortBuffers[kPortIndexOutput][index].mData;
7481         int32_t outputCallbackID = CB_OUTPUT_AVAILABLE;
7482         sp<RefBase> accessUnitInfoObj;
7483         msg->setInt32("index", index);
7484         msg->setSize("offset", buffer->offset());
7485         msg->setSize("size", buffer->size());
7486 
7487         int64_t timeUs;
7488         CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
7489 
7490         msg->setInt64("timeUs", timeUs);
7491 
7492         int32_t flags;
7493         CHECK(buffer->meta()->findInt32("flags", &flags));
7494 
7495         msg->setInt32("flags", flags);
7496         buffer->meta()->findObject("accessUnitInfo", &accessUnitInfoObj);
7497         if (accessUnitInfoObj) {
7498             outputCallbackID = CB_LARGE_FRAME_OUTPUT_AVAILABLE;
7499             msg->setObject("accessUnitInfo", accessUnitInfoObj);
7500             sp<BufferInfosWrapper> auInfo(
7501                     (decltype(auInfo.get()))accessUnitInfoObj.get());
7502              auInfo->value.back().mFlags |= flags & BUFFER_FLAG_END_OF_STREAM;
7503         }
7504         msg->setInt32("callbackID", outputCallbackID);
7505 
7506         statsBufferReceived(timeUs, buffer);
7507 
7508         msg->post();
7509     }
7510 }
onCryptoError(const sp<AMessage> & msg)7511 void MediaCodec::onCryptoError(const sp<AMessage> & msg) {
7512     if (mCallback != NULL) {
7513         sp<AMessage> cb_msg = mCallback->dup();
7514         cb_msg->setInt32("callbackID", CB_CRYPTO_ERROR);
7515         cb_msg->extend(msg);
7516         cb_msg->post();
7517     }
7518 }
onError(status_t err,int32_t actionCode,const char * detail)7519 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
7520     if (mCallback != NULL) {
7521         sp<AMessage> msg = mCallback->dup();
7522         msg->setInt32("callbackID", CB_ERROR);
7523         msg->setInt32("err", err);
7524         msg->setInt32("actionCode", actionCode);
7525 
7526         if (detail != NULL) {
7527             msg->setString("detail", detail);
7528         }
7529 
7530         msg->post();
7531     }
7532 }
7533 
onOutputFormatChanged()7534 void MediaCodec::onOutputFormatChanged() {
7535     if (mCallback != NULL) {
7536         sp<AMessage> msg = mCallback->dup();
7537         msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
7538         msg->setMessage("format", mOutputFormat);
7539         msg->post();
7540     }
7541 }
7542 
onRequiredResourcesChanged()7543 void MediaCodec::onRequiredResourcesChanged() {
7544     if (mCallback != nullptr) {
7545         // Post the callback
7546         sp<AMessage> msg = mCallback->dup();
7547         msg->setInt32("callbackID", CB_REQUIRED_RESOURCES_CHANGED);
7548         msg->post();
7549     }
7550 }
7551 
postActivityNotificationIfPossible()7552 void MediaCodec::postActivityNotificationIfPossible() {
7553     if (mActivityNotify == NULL) {
7554         return;
7555     }
7556 
7557     bool isErrorOrOutputChanged =
7558             (mFlags & (kFlagStickyError
7559                     | kFlagOutputBuffersChanged
7560                     | kFlagOutputFormatChanged));
7561 
7562     if (isErrorOrOutputChanged
7563             || !mAvailPortBuffers[kPortIndexInput].empty()
7564             || !mAvailPortBuffers[kPortIndexOutput].empty()) {
7565         mActivityNotify->setInt32("input-buffers",
7566                 mAvailPortBuffers[kPortIndexInput].size());
7567 
7568         if (isErrorOrOutputChanged) {
7569             // we want consumer to dequeue as many times as it can
7570             mActivityNotify->setInt32("output-buffers", INT32_MAX);
7571         } else {
7572             mActivityNotify->setInt32("output-buffers",
7573                     mAvailPortBuffers[kPortIndexOutput].size());
7574         }
7575         mActivityNotify->post();
7576         mActivityNotify.clear();
7577     }
7578 }
7579 
setParameters(const sp<AMessage> & params)7580 status_t MediaCodec::setParameters(const sp<AMessage> &params) {
7581     sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
7582     msg->setMessage("params", params);
7583 
7584     sp<AMessage> response;
7585     return PostAndAwaitResponse(msg, &response);
7586 }
7587 
onSetParameters(const sp<AMessage> & params)7588 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
7589     if (mState == UNINITIALIZED || mState == INITIALIZING) {
7590         return NO_INIT;
7591     }
7592     updateLowLatency(params);
7593     updateCodecImportance(params);
7594     if (android::media::tv::flags::apply_picture_profiles()) {
7595         updatePictureProfile(params, false /* applyDefaultProfile */);
7596     }
7597     mapFormat(mComponentName, params, nullptr, false);
7598     updateTunnelPeek(params);
7599     mCodec->signalSetParameters(params);
7600 
7601     return OK;
7602 }
7603 
amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> & buffer)7604 status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
7605         const sp<MediaCodecBuffer> &buffer) {
7606     AString mime;
7607     CHECK(mOutputFormat->findString("mime", &mime));
7608 
7609     if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
7610         // Codec specific data should be SPS and PPS in a single buffer,
7611         // each prefixed by a startcode (0x00 0x00 0x00 0x01).
7612         // We separate the two and put them into the output format
7613         // under the keys "csd-0" and "csd-1".
7614 
7615         unsigned csdIndex = 0;
7616 
7617         const uint8_t *data = buffer->data();
7618         size_t size = buffer->size();
7619 
7620         const uint8_t *nalStart;
7621         size_t nalSize;
7622         while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
7623             sp<ABuffer> csd = new ABuffer(nalSize + 4);
7624             memcpy(csd->data(), "\x00\x00\x00\x01", 4);
7625             memcpy(csd->data() + 4, nalStart, nalSize);
7626 
7627             mOutputFormat->setBuffer(
7628                     base::StringPrintf("csd-%u", csdIndex).c_str(), csd);
7629 
7630             ++csdIndex;
7631         }
7632 
7633         if (csdIndex != 2) {
7634             mErrorLog.log(LOG_TAG, base::StringPrintf(
7635                     "codec config data contains %u NAL units; expected 2.", csdIndex));
7636             return ERROR_MALFORMED;
7637         }
7638     } else {
7639         // For everything else we just stash the codec specific data into
7640         // the output format as a single piece of csd under "csd-0".
7641         sp<ABuffer> csd = new ABuffer(buffer->size());
7642         memcpy(csd->data(), buffer->data(), buffer->size());
7643         csd->setRange(0, buffer->size());
7644         mOutputFormat->setBuffer("csd-0", csd);
7645     }
7646 
7647     return OK;
7648 }
7649 
postPendingRepliesAndDeferredMessages(std::string origin,status_t err)7650 void MediaCodec::postPendingRepliesAndDeferredMessages(
7651         std::string origin, status_t err /* = OK */) {
7652     sp<AMessage> response{new AMessage};
7653     if (err != OK) {
7654         response->setInt32("err", err);
7655     }
7656     postPendingRepliesAndDeferredMessages(origin, response);
7657 }
7658 
postPendingRepliesAndDeferredMessages(std::string origin,const sp<AMessage> & response)7659 void MediaCodec::postPendingRepliesAndDeferredMessages(
7660         std::string origin, const sp<AMessage> &response) {
7661     LOG_ALWAYS_FATAL_IF(
7662             !mReplyID,
7663             "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
7664             origin.c_str(),
7665             mLastReplyOrigin.c_str());
7666     mLastReplyOrigin = origin;
7667     response->postReply(mReplyID);
7668     mReplyID.clear();
7669     ALOGV_IF(!mDeferredMessages.empty(),
7670             "posting %zu deferred messages", mDeferredMessages.size());
7671     for (sp<AMessage> msg : mDeferredMessages) {
7672         msg->post();
7673     }
7674     mDeferredMessages.clear();
7675 }
7676 
apiStateString()7677 std::string MediaCodec::apiStateString() {
7678     const char *rval = NULL;
7679     char rawbuffer[16]; // room for "%d"
7680 
7681     switch (mState) {
7682         case UNINITIALIZED:
7683             rval = (mFlags & kFlagStickyError) ? "at Error state" : "at Released state";
7684             break;
7685         case INITIALIZING: rval = "while constructing"; break;
7686         case INITIALIZED: rval = "at Uninitialized state"; break;
7687         case CONFIGURING: rval = "during configure()"; break;
7688         case CONFIGURED: rval = "at Configured state"; break;
7689         case STARTING: rval = "during start()"; break;
7690         case STARTED: rval = "at Running state"; break;
7691         case FLUSHING: rval = "during flush()"; break;
7692         case FLUSHED: rval = "at Flushed state"; break;
7693         case STOPPING: rval = "during stop()"; break;
7694         case RELEASING: rval = "during release()"; break;
7695         default:
7696             snprintf(rawbuffer, sizeof(rawbuffer), "at %d", mState);
7697             rval = rawbuffer;
7698             break;
7699     }
7700     return rval;
7701 }
7702 
stateString(State state)7703 std::string MediaCodec::stateString(State state) {
7704     const char *rval = NULL;
7705     char rawbuffer[16]; // room for "%d"
7706 
7707     switch (state) {
7708         case UNINITIALIZED: rval = "UNINITIALIZED"; break;
7709         case INITIALIZING: rval = "INITIALIZING"; break;
7710         case INITIALIZED: rval = "INITIALIZED"; break;
7711         case CONFIGURING: rval = "CONFIGURING"; break;
7712         case CONFIGURED: rval = "CONFIGURED"; break;
7713         case STARTING: rval = "STARTING"; break;
7714         case STARTED: rval = "STARTED"; break;
7715         case FLUSHING: rval = "FLUSHING"; break;
7716         case FLUSHED: rval = "FLUSHED"; break;
7717         case STOPPING: rval = "STOPPING"; break;
7718         case RELEASING: rval = "RELEASING"; break;
7719         default:
7720             snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
7721             rval = rawbuffer;
7722             break;
7723     }
7724     return rval;
7725 }
7726 
7727 // static
CanFetchLinearBlock(const std::vector<std::string> & names,bool * isCompatible)7728 status_t MediaCodec::CanFetchLinearBlock(
7729         const std::vector<std::string> &names, bool *isCompatible) {
7730     *isCompatible = false;
7731     if (names.size() == 0) {
7732         *isCompatible = true;
7733         return OK;
7734     }
7735     const CodecListCache &cache = GetCodecListCache();
7736     for (const std::string &name : names) {
7737         auto it = cache.mCodecInfoMap.find(name);
7738         if (it == cache.mCodecInfoMap.end()) {
7739             return NAME_NOT_FOUND;
7740         }
7741         const char *owner = it->second->getOwnerName();
7742         if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
7743             *isCompatible = false;
7744             return OK;
7745         } else if (strncmp(owner, "codec2::", 8) != 0) {
7746             return NAME_NOT_FOUND;
7747         }
7748     }
7749     return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
7750 }
7751 
7752 // static
FetchLinearBlock(size_t capacity,const std::vector<std::string> & names)7753 std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
7754         size_t capacity, const std::vector<std::string> &names) {
7755     return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
7756 }
7757 
7758 // static
CanFetchGraphicBlock(const std::vector<std::string> & names,bool * isCompatible)7759 status_t MediaCodec::CanFetchGraphicBlock(
7760         const std::vector<std::string> &names, bool *isCompatible) {
7761     *isCompatible = false;
7762     if (names.size() == 0) {
7763         *isCompatible = true;
7764         return OK;
7765     }
7766     const CodecListCache &cache = GetCodecListCache();
7767     for (const std::string &name : names) {
7768         auto it = cache.mCodecInfoMap.find(name);
7769         if (it == cache.mCodecInfoMap.end()) {
7770             return NAME_NOT_FOUND;
7771         }
7772         const char *owner = it->second->getOwnerName();
7773         if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
7774             *isCompatible = false;
7775             return OK;
7776         } else if (strncmp(owner, "codec2.", 7) != 0) {
7777             return NAME_NOT_FOUND;
7778         }
7779     }
7780     return CCodec::CanFetchGraphicBlock(names, isCompatible);
7781 }
7782 
7783 // static
FetchGraphicBlock(int32_t width,int32_t height,int32_t format,uint64_t usage,const std::vector<std::string> & names)7784 std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
7785         int32_t width,
7786         int32_t height,
7787         int32_t format,
7788         uint64_t usage,
7789         const std::vector<std::string> &names) {
7790     return CCodec::FetchGraphicBlock(width, height, format, usage, names);
7791 }
7792 
7793 }  // namespace android
7794