1 /*
2 * Copyright 2012, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #include "hidl/HidlSupport.h"
19 #define LOG_TAG "MediaCodec"
20 #include <utils/Log.h>
21
22 #include <dlfcn.h>
23 #include <inttypes.h>
24 #include <random>
25 #include <set>
26 #include <stdlib.h>
27 #include <string>
28
29 #include <C2Buffer.h>
30
31 #include "include/SoftwareRenderer.h"
32
33 #include <android/api-level.h>
34 #include <android/binder_manager.h>
35 #include <android/content/pm/IPackageManagerNative.h>
36 #include <android/hardware/cas/native/1.0/IDescrambler.h>
37 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
38
39 #include <aidl/android/media/BnResourceManagerClient.h>
40 #include <aidl/android/media/IResourceManagerService.h>
41 #include <android/binder_ibinder.h>
42 #include <android/binder_manager.h>
43 #include <android/dlext.h>
44 #include <android-base/stringprintf.h>
45 #include <binder/IMemory.h>
46 #include <binder/IServiceManager.h>
47 #include <binder/MemoryDealer.h>
48 #include <cutils/properties.h>
49 #include <gui/BufferQueue.h>
50 #include <gui/Surface.h>
51 #include <hidlmemory/FrameworkUtils.h>
52 #include <mediadrm/ICrypto.h>
53 #include <media/IOMX.h>
54 #include <media/MediaCodecBuffer.h>
55 #include <media/MediaCodecInfo.h>
56 #include <media/MediaMetricsItem.h>
57 #include <media/MediaResource.h>
58 #include <media/NdkMediaErrorPriv.h>
59 #include <media/NdkMediaFormat.h>
60 #include <media/NdkMediaFormatPriv.h>
61 #include <media/formatshaper/FormatShaper.h>
62 #include <media/stagefright/foundation/ABuffer.h>
63 #include <media/stagefright/foundation/ADebug.h>
64 #include <media/stagefright/foundation/AMessage.h>
65 #include <media/stagefright/foundation/AString.h>
66 #include <media/stagefright/foundation/AUtils.h>
67 #include <media/stagefright/foundation/avc_utils.h>
68 #include <media/stagefright/foundation/hexdump.h>
69 #include <media/stagefright/ACodec.h>
70 #include <media/stagefright/BatteryChecker.h>
71 #include <media/stagefright/BufferProducerWrapper.h>
72 #include <media/stagefright/CCodec.h>
73 #include <media/stagefright/CryptoAsync.h>
74 #include <media/stagefright/MediaCodec.h>
75 #include <media/stagefright/MediaCodecConstants.h>
76 #include <media/stagefright/MediaCodecList.h>
77 #include <media/stagefright/MediaCodecConstants.h>
78 #include <media/stagefright/MediaDefs.h>
79 #include <media/stagefright/MediaErrors.h>
80 #include <media/stagefright/OMXClient.h>
81 #include <media/stagefright/PersistentSurface.h>
82 #include <media/stagefright/RenderedFrameInfo.h>
83 #include <media/stagefright/SurfaceUtils.h>
84 #include <nativeloader/dlext_namespaces.h>
85 #include <private/android_filesystem_config.h>
86 #include <server_configurable_flags/get_flags.h>
87 #include <utils/Singleton.h>
88
89 namespace android {
90
91 using Status = ::ndk::ScopedAStatus;
92 using aidl::android::media::BnResourceManagerClient;
93 using aidl::android::media::IResourceManagerClient;
94 using aidl::android::media::IResourceManagerService;
95 using aidl::android::media::ClientInfoParcel;
96 using server_configurable_flags::GetServerConfigurableFlag;
97 using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
98 using JudderEvent = VideoRenderQualityTracker::JudderEvent;
99
100 // key for media statistics
101 static const char *kCodecKeyName = "codec";
102 // attrs for media statistics
103 // NB: these are matched with public Java API constants defined
104 // in frameworks/base/media/java/android/media/MediaCodec.java
105 // These must be kept synchronized with the constants there.
106 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
107 static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
108 static const char *kCodecId = "android.media.mediacodec.id";
109 static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
110 static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
111 static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
112 static const char *kCodecModeAudio = "audio";
113 static const char *kCodecModeImage = "image";
114 static const char *kCodecModeUnknown = "unknown";
115 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
116 static const char *kCodecHardware = "android.media.mediacodec.hardware"; /* 0,1 */
117 static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
118 static const char *kCodecTunneled = "android.media.mediacodec.tunneled"; /* 0,1 */
119 static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
120 static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
121 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
122 static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
123 static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
124 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
125 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
126 static const char *kCodecPriority = "android.media.mediacodec.priority";
127
128 // Min/Max QP before shaping
129 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
130 static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
131 static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
132 static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
133 static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
134 static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
135
136 // Min/Max QP after shaping
137 static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
138 static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
139 static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
140 static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
141 static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
142 static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
143
144 // NB: These are not yet exposed as public Java API constants.
145 static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
146 static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
147 static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
148 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
149 static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
150 static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate"; /* 0..n */
151 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
152 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
153 static const char *kCodecError = "android.media.mediacodec.errcode";
154 static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
155 static const char *kCodecErrorState = "android.media.mediacodec.errstate";
156 static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
157 static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
158 static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
159 static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
160 static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
161 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
162 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
163 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
164 static const char *kCodecComponentColorFormat = "android.media.mediacodec.component-color-format";
165
166 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
167 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
168 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
169 static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
170 static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
171 static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
172 static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
173 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
174 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
175 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
176 // HDR metrics
177 static const char *kCodecConfigColorStandard = "android.media.mediacodec.config-color-standard";
178 static const char *kCodecConfigColorRange = "android.media.mediacodec.config-color-range";
179 static const char *kCodecConfigColorTransfer = "android.media.mediacodec.config-color-transfer";
180 static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
181 static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
182 static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
183 static const char *kCodecHdrStaticInfo = "android.media.mediacodec.hdr-static-info";
184 static const char *kCodecHdr10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
185 static const char *kCodecHdrFormat = "android.media.mediacodec.hdr-format";
186 // array/sync/async/block modes
187 static const char *kCodecArrayMode = "android.media.mediacodec.array-mode";
188 static const char *kCodecOperationMode = "android.media.mediacodec.operation-mode";
189 static const char *kCodecOutputSurface = "android.media.mediacodec.output-surface";
190 // max size configured by the app
191 static const char *kCodecAppMaxInputSize = "android.media.mediacodec.app-max-input-size";
192 // max size actually used
193 static const char *kCodecUsedMaxInputSize = "android.media.mediacodec.used-max-input-size";
194 // max size suggested by the codec
195 static const char *kCodecCodecMaxInputSize = "android.media.mediacodec.codec-max-input-size";
196 static const char *kCodecFlushCount = "android.media.mediacodec.flush-count";
197 static const char *kCodecSetSurfaceCount = "android.media.mediacodec.set-surface-count";
198 static const char *kCodecResolutionChangeCount = "android.media.mediacodec.resolution-change-count";
199
200 // the kCodecRecent* fields appear only in getMetrics() results
201 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
202 static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
203 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
204 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
205 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
206
207 /* -1: shaper disabled
208 >=0: number of fields changed */
209 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
210
211 // Render metrics
212 static const char *kCodecPlaybackDurationSec = "android.media.mediacodec.playback-duration-sec";
213 static const char *kCodecFirstRenderTimeUs = "android.media.mediacodec.first-render-time-us";
214 static const char *kCodecLastRenderTimeUs = "android.media.mediacodec.last-render-time-us";
215 static const char *kCodecFramesReleased = "android.media.mediacodec.frames-released";
216 static const char *kCodecFramesRendered = "android.media.mediacodec.frames-rendered";
217 static const char *kCodecFramesDropped = "android.media.mediacodec.frames-dropped";
218 static const char *kCodecFramesSkipped = "android.media.mediacodec.frames-skipped";
219 static const char *kCodecFramerateContent = "android.media.mediacodec.framerate-content";
220 static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate-desired";
221 static const char *kCodecFramerateActual = "android.media.mediacodec.framerate-actual";
222 // Freeze
223 static const char *kCodecFreezeCount = "android.media.mediacodec.freeze-count";
224 static const char *kCodecFreezeScore = "android.media.mediacodec.freeze-score";
225 static const char *kCodecFreezeRate = "android.media.mediacodec.freeze-rate";
226 static const char *kCodecFreezeDurationMsAvg = "android.media.mediacodec.freeze-duration-ms-avg";
227 static const char *kCodecFreezeDurationMsMax = "android.media.mediacodec.freeze-duration-ms-max";
228 static const char *kCodecFreezeDurationMsHistogram =
229 "android.media.mediacodec.freeze-duration-ms-histogram";
230 static const char *kCodecFreezeDurationMsHistogramBuckets =
231 "android.media.mediacodec.freeze-duration-ms-histogram-buckets";
232 static const char *kCodecFreezeDistanceMsAvg = "android.media.mediacodec.freeze-distance-ms-avg";
233 static const char *kCodecFreezeDistanceMsHistogram =
234 "android.media.mediacodec.freeze-distance-ms-histogram";
235 static const char *kCodecFreezeDistanceMsHistogramBuckets =
236 "android.media.mediacodec.freeze-distance-ms-histogram-buckets";
237 // Judder
238 static const char *kCodecJudderCount = "android.media.mediacodec.judder-count";
239 static const char *kCodecJudderScore = "android.media.mediacodec.judder-score";
240 static const char *kCodecJudderRate = "android.media.mediacodec.judder-rate";
241 static const char *kCodecJudderScoreAvg = "android.media.mediacodec.judder-score-avg";
242 static const char *kCodecJudderScoreMax = "android.media.mediacodec.judder-score-max";
243 static const char *kCodecJudderScoreHistogram = "android.media.mediacodec.judder-score-histogram";
244 static const char *kCodecJudderScoreHistogramBuckets =
245 "android.media.mediacodec.judder-score-histogram-buckets";
246 // Freeze event
247 static const char *kCodecFreezeEventCount = "android.media.mediacodec.freeze-event-count";
248 static const char *kFreezeEventKeyName = "freeze";
249 static const char *kFreezeEventInitialTimeUs = "android.media.mediacodec.freeze.initial-time-us";
250 static const char *kFreezeEventDurationMs = "android.media.mediacodec.freeze.duration-ms";
251 static const char *kFreezeEventCount = "android.media.mediacodec.freeze.count";
252 static const char *kFreezeEventAvgDurationMs = "android.media.mediacodec.freeze.avg-duration-ms";
253 static const char *kFreezeEventAvgDistanceMs = "android.media.mediacodec.freeze.avg-distance-ms";
254 static const char *kFreezeEventDetailsDurationMs =
255 "android.media.mediacodec.freeze.details-duration-ms";
256 static const char *kFreezeEventDetailsDistanceMs =
257 "android.media.mediacodec.freeze.details-distance-ms";
258 // Judder event
259 static const char *kCodecJudderEventCount = "android.media.mediacodec.judder-event-count";
260 static const char *kJudderEventKeyName = "judder";
261 static const char *kJudderEventInitialTimeUs = "android.media.mediacodec.judder.initial-time-us";
262 static const char *kJudderEventDurationMs = "android.media.mediacodec.judder.duration-ms";
263 static const char *kJudderEventCount = "android.media.mediacodec.judder.count";
264 static const char *kJudderEventAvgScore = "android.media.mediacodec.judder.avg-score";
265 static const char *kJudderEventAvgDistanceMs = "android.media.mediacodec.judder.avg-distance-ms";
266 static const char *kJudderEventDetailsActualDurationUs =
267 "android.media.mediacodec.judder.details-actual-duration-us";
268 static const char *kJudderEventDetailsContentDurationUs =
269 "android.media.mediacodec.judder.details-content-duration-us";
270 static const char *kJudderEventDetailsDistanceMs =
271 "android.media.mediacodec.judder.details-distance-ms";
272
273 // XXX suppress until we get our representation right
274 static bool kEmitHistogram = false;
275
getId(IResourceManagerClient const * client)276 static int64_t getId(IResourceManagerClient const * client) {
277 return (int64_t) client;
278 }
279
getId(const std::shared_ptr<IResourceManagerClient> & client)280 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
281 return getId(client.get());
282 }
283
isResourceError(status_t err)284 static bool isResourceError(status_t err) {
285 return (err == NO_MEMORY);
286 }
287
areRenderMetricsEnabled()288 static bool areRenderMetricsEnabled() {
289 std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
290 return v == "true";
291 }
292
293 static const int kMaxRetry = 2;
294 static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
295 static const int kNumBuffersAlign = 16;
296
297 static const C2MemoryUsage kDefaultReadWriteUsage{
298 C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
299
300 ////////////////////////////////////////////////////////////////////////////////
301
302 struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClientandroid::ResourceManagerClient303 explicit ResourceManagerClient(MediaCodec* codec, int32_t pid, int32_t uid) :
304 mMediaCodec(codec), mPid(pid), mUid(uid) {}
305
reclaimResourceandroid::ResourceManagerClient306 Status reclaimResource(bool* _aidl_return) override {
307 sp<MediaCodec> codec = mMediaCodec.promote();
308 if (codec == NULL) {
309 // Codec is already gone, so remove the resources as well
310 ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
311 std::shared_ptr<IResourceManagerService> service =
312 IResourceManagerService::fromBinder(binder);
313 if (service == nullptr) {
314 ALOGW("MediaCodec::ResourceManagerClient unable to find ResourceManagerService");
315 }
316 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
317 .uid = static_cast<int32_t>(mUid),
318 .id = getId(this)};
319 service->removeClient(clientInfo);
320 *_aidl_return = true;
321 return Status::ok();
322 }
323 status_t err = codec->reclaim();
324 if (err == WOULD_BLOCK) {
325 ALOGD("Wait for the client to release codec.");
326 usleep(kMaxReclaimWaitTimeInUs);
327 ALOGD("Try to reclaim again.");
328 err = codec->reclaim(true /* force */);
329 }
330 if (err != OK) {
331 ALOGW("ResourceManagerClient failed to release codec with err %d", err);
332 }
333 *_aidl_return = (err == OK);
334 return Status::ok();
335 }
336
getNameandroid::ResourceManagerClient337 Status getName(::std::string* _aidl_return) override {
338 _aidl_return->clear();
339 sp<MediaCodec> codec = mMediaCodec.promote();
340 if (codec == NULL) {
341 // codec is already gone.
342 return Status::ok();
343 }
344
345 AString name;
346 if (codec->getName(&name) == OK) {
347 *_aidl_return = name.c_str();
348 }
349 return Status::ok();
350 }
351
~ResourceManagerClientandroid::ResourceManagerClient352 virtual ~ResourceManagerClient() {}
353
354 private:
355 wp<MediaCodec> mMediaCodec;
356 int32_t mPid;
357 int32_t mUid;
358
359 DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
360 };
361
362 struct MediaCodec::ResourceManagerServiceProxy : public RefBase {
363 ResourceManagerServiceProxy(pid_t pid, uid_t uid,
364 const std::shared_ptr<IResourceManagerClient> &client);
365 virtual ~ResourceManagerServiceProxy();
366
367 status_t init();
368
369 // implements DeathRecipient
370 static void BinderDiedCallback(void* cookie);
371 void binderDied();
372 static Mutex sLockCookies;
373 static std::set<void*> sCookies;
374 static void addCookie(void* cookie);
375 static void removeCookie(void* cookie);
376
377 void addResource(const MediaResourceParcel &resource);
378 void removeResource(const MediaResourceParcel &resource);
379 void removeClient();
380 void markClientForPendingRemoval();
381 bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
382 void notifyClientCreated();
383 void notifyClientStarted(ClientConfigParcel& clientConfig);
384 void notifyClientStopped(ClientConfigParcel& clientConfig);
385 void notifyClientConfigChanged(ClientConfigParcel& clientConfig);
386
setCodecNameandroid::MediaCodec::ResourceManagerServiceProxy387 inline void setCodecName(const char* name) {
388 mCodecName = name;
389 }
390
391 private:
392 Mutex mLock;
393 pid_t mPid;
394 uid_t mUid;
395 std::string mCodecName;
396 std::shared_ptr<IResourceManagerService> mService;
397 std::shared_ptr<IResourceManagerClient> mClient;
398 ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
399 };
400
ResourceManagerServiceProxy(pid_t pid,uid_t uid,const std::shared_ptr<IResourceManagerClient> & client)401 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
402 pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client)
403 : mPid(pid), mUid(uid), mClient(client),
404 mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
405 if (mUid == MediaCodec::kNoUid) {
406 mUid = AIBinder_getCallingUid();
407 }
408 if (mPid == MediaCodec::kNoPid) {
409 mPid = AIBinder_getCallingPid();
410 }
411 }
412
~ResourceManagerServiceProxy()413 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
414
415 // remove the cookie, so any in-flight death notification will get dropped
416 // by our handler.
417 removeCookie(this);
418
419 Mutex::Autolock _l(mLock);
420 if (mService != nullptr) {
421 AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
422 mService = nullptr;
423 }
424 }
425
init()426 status_t MediaCodec::ResourceManagerServiceProxy::init() {
427 ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.resource_manager"));
428 mService = IResourceManagerService::fromBinder(binder);
429 if (mService == nullptr) {
430 ALOGE("Failed to get ResourceManagerService");
431 return UNKNOWN_ERROR;
432 }
433
434 int callerPid = AIBinder_getCallingPid();
435 int callerUid = AIBinder_getCallingUid();
436 if (mPid != callerPid || mUid != callerUid) {
437 // Media processes don't need special permissions to act on behalf of other processes.
438 if (callerUid != AID_MEDIA) {
439 char const * permission = "android.permission.MEDIA_RESOURCE_OVERRIDE_PID";
440 if (!checkCallingPermission(String16(permission))) {
441 ALOGW("%s is required to override the caller's PID for media resource management.",
442 permission);
443 return PERMISSION_DENIED;
444 }
445 }
446 }
447
448 // Kill clients pending removal.
449 mService->reclaimResourcesFromClientsPendingRemoval(mPid);
450
451 // so our handler will process the death notifications
452 addCookie(this);
453
454 // after this, require mLock whenever using mService
455 AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
456 return OK;
457 }
458
459 //static
460 // these are no_destroy to keep them from being destroyed at process exit
461 // where some thread calls exit() while other threads are still running.
462 // see b/194783918
463 [[clang::no_destroy]] Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
464 [[clang::no_destroy]] std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
465
466 //static
addCookie(void * cookie)467 void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
468 Mutex::Autolock _l(sLockCookies);
469 sCookies.insert(cookie);
470 }
471
472 //static
removeCookie(void * cookie)473 void MediaCodec::ResourceManagerServiceProxy::removeCookie(void* cookie) {
474 Mutex::Autolock _l(sLockCookies);
475 sCookies.erase(cookie);
476 }
477
478 //static
BinderDiedCallback(void * cookie)479 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
480 Mutex::Autolock _l(sLockCookies);
481 if (sCookies.find(cookie) != sCookies.end()) {
482 auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
483 thiz->binderDied();
484 }
485 }
486
binderDied()487 void MediaCodec::ResourceManagerServiceProxy::binderDied() {
488 ALOGW("ResourceManagerService died.");
489 Mutex::Autolock _l(mLock);
490 mService = nullptr;
491 }
492
addResource(const MediaResourceParcel & resource)493 void MediaCodec::ResourceManagerServiceProxy::addResource(
494 const MediaResourceParcel &resource) {
495 std::vector<MediaResourceParcel> resources;
496 resources.push_back(resource);
497
498 Mutex::Autolock _l(mLock);
499 if (mService == nullptr) {
500 return;
501 }
502 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
503 .uid = static_cast<int32_t>(mUid),
504 .id = getId(mClient),
505 .name = mCodecName};
506 mService->addResource(clientInfo, mClient, resources);
507 }
508
removeResource(const MediaResourceParcel & resource)509 void MediaCodec::ResourceManagerServiceProxy::removeResource(
510 const MediaResourceParcel &resource) {
511 std::vector<MediaResourceParcel> resources;
512 resources.push_back(resource);
513
514 Mutex::Autolock _l(mLock);
515 if (mService == nullptr) {
516 return;
517 }
518 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
519 .uid = static_cast<int32_t>(mUid),
520 .id = getId(mClient),
521 .name = mCodecName};
522 mService->removeResource(clientInfo, resources);
523 }
524
removeClient()525 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
526 Mutex::Autolock _l(mLock);
527 if (mService == nullptr) {
528 return;
529 }
530 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
531 .uid = static_cast<int32_t>(mUid),
532 .id = getId(mClient),
533 .name = mCodecName};
534 mService->removeClient(clientInfo);
535 }
536
markClientForPendingRemoval()537 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
538 Mutex::Autolock _l(mLock);
539 if (mService == nullptr) {
540 return;
541 }
542 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
543 .uid = static_cast<int32_t>(mUid),
544 .id = getId(mClient),
545 .name = mCodecName};
546 mService->markClientForPendingRemoval(clientInfo);
547 }
548
reclaimResource(const std::vector<MediaResourceParcel> & resources)549 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
550 const std::vector<MediaResourceParcel> &resources) {
551 Mutex::Autolock _l(mLock);
552 if (mService == NULL) {
553 return false;
554 }
555 bool success;
556 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
557 .uid = static_cast<int32_t>(mUid),
558 .id = getId(mClient),
559 .name = mCodecName};
560 Status status = mService->reclaimResource(clientInfo, resources, &success);
561 return status.isOk() && success;
562 }
563
notifyClientCreated()564 void MediaCodec::ResourceManagerServiceProxy::notifyClientCreated() {
565 ClientInfoParcel clientInfo{.pid = static_cast<int32_t>(mPid),
566 .uid = static_cast<int32_t>(mUid),
567 .id = getId(mClient),
568 .name = mCodecName};
569 mService->notifyClientCreated(clientInfo);
570 }
571
notifyClientStarted(ClientConfigParcel & clientConfig)572 void MediaCodec::ResourceManagerServiceProxy::notifyClientStarted(
573 ClientConfigParcel& clientConfig) {
574 clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
575 clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
576 clientConfig.clientInfo.id = getId(mClient);
577 clientConfig.clientInfo.name = mCodecName;
578 mService->notifyClientStarted(clientConfig);
579 }
580
notifyClientStopped(ClientConfigParcel & clientConfig)581 void MediaCodec::ResourceManagerServiceProxy::notifyClientStopped(
582 ClientConfigParcel& clientConfig) {
583 clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
584 clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
585 clientConfig.clientInfo.id = getId(mClient);
586 clientConfig.clientInfo.name = mCodecName;
587 mService->notifyClientStopped(clientConfig);
588 }
589
notifyClientConfigChanged(ClientConfigParcel & clientConfig)590 void MediaCodec::ResourceManagerServiceProxy::notifyClientConfigChanged(
591 ClientConfigParcel& clientConfig) {
592 clientConfig.clientInfo.pid = static_cast<int32_t>(mPid);
593 clientConfig.clientInfo.uid = static_cast<int32_t>(mUid);
594 clientConfig.clientInfo.id = getId(mClient);
595 clientConfig.clientInfo.name = mCodecName;
596 mService->notifyClientConfigChanged(clientConfig);
597 }
598
599 ////////////////////////////////////////////////////////////////////////////////
600
BufferInfo()601 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
602
603 ////////////////////////////////////////////////////////////////////////////////
604
605 class MediaCodec::ReleaseSurface {
606 public:
ReleaseSurface(uint64_t usage)607 explicit ReleaseSurface(uint64_t usage) {
608 BufferQueue::createBufferQueue(&mProducer, &mConsumer);
609 mSurface = new Surface(mProducer, false /* controlledByApp */);
610 struct ConsumerListener : public BnConsumerListener {
611 ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
612 mConsumer = consumer;
613 }
614 void onFrameAvailable(const BufferItem&) override {
615 BufferItem buffer;
616 // consume buffer
617 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
618 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
619 consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
620 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
621 }
622 }
623
624 wp<IGraphicBufferConsumer> mConsumer;
625 void onBuffersReleased() override {}
626 void onSidebandStreamChanged() override {}
627 };
628 sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
629 mConsumer->consumerConnect(listener, false);
630 mConsumer->setConsumerName(String8{"MediaCodec.release"});
631 mConsumer->setConsumerUsageBits(usage);
632 }
633
getSurface()634 const sp<Surface> &getSurface() {
635 return mSurface;
636 }
637
638 private:
639 sp<IGraphicBufferProducer> mProducer;
640 sp<IGraphicBufferConsumer> mConsumer;
641 sp<Surface> mSurface;
642 };
643
644 ////////////////////////////////////////////////////////////////////////////////
645
646 namespace {
647
648 enum {
649 kWhatFillThisBuffer = 'fill',
650 kWhatDrainThisBuffer = 'drai',
651 kWhatEOS = 'eos ',
652 kWhatStartCompleted = 'Scom',
653 kWhatStopCompleted = 'scom',
654 kWhatReleaseCompleted = 'rcom',
655 kWhatFlushCompleted = 'fcom',
656 kWhatError = 'erro',
657 kWhatCryptoError = 'ercp',
658 kWhatComponentAllocated = 'cAll',
659 kWhatComponentConfigured = 'cCon',
660 kWhatInputSurfaceCreated = 'isfc',
661 kWhatInputSurfaceAccepted = 'isfa',
662 kWhatSignaledInputEOS = 'seos',
663 kWhatOutputFramesRendered = 'outR',
664 kWhatOutputBuffersChanged = 'outC',
665 kWhatFirstTunnelFrameReady = 'ftfR',
666 kWhatPollForRenderedBuffers = 'plrb',
667 kWhatMetricsUpdated = 'mtru',
668 };
669
670 class CryptoAsyncCallback : public CryptoAsync::CryptoAsyncCallback {
671 public:
672
CryptoAsyncCallback(const sp<AMessage> & notify)673 explicit CryptoAsyncCallback(const sp<AMessage> & notify):mNotify(notify) {
674 }
675
~CryptoAsyncCallback()676 ~CryptoAsyncCallback() {}
677
onDecryptComplete(const sp<AMessage> & result)678 void onDecryptComplete(const sp<AMessage> &result) override {
679 (void)result;
680 }
681
onDecryptError(const std::list<sp<AMessage>> & errorMsgs)682 void onDecryptError(const std::list<sp<AMessage>> &errorMsgs) override {
683 // This error may be decrypt/queue error.
684 status_t errorCode ;
685 for (auto &emsg : errorMsgs) {
686 sp<AMessage> notify(mNotify->dup());
687 if(emsg->findInt32("err", &errorCode)) {
688 if (isCryptoError(errorCode)) {
689 notify->setInt32("what", kWhatCryptoError);
690 } else {
691 notify->setInt32("what", kWhatError);
692 }
693 notify->extend(emsg);
694 notify->post();
695 } else {
696 ALOGW("Buffers with no errorCode are not expected");
697 }
698 }
699 }
700 private:
701 const sp<AMessage> mNotify;
702 };
703
704 class BufferCallback : public CodecBase::BufferCallback {
705 public:
706 explicit BufferCallback(const sp<AMessage> ¬ify);
707 virtual ~BufferCallback() = default;
708
709 virtual void onInputBufferAvailable(
710 size_t index, const sp<MediaCodecBuffer> &buffer) override;
711 virtual void onOutputBufferAvailable(
712 size_t index, const sp<MediaCodecBuffer> &buffer) override;
713 private:
714 const sp<AMessage> mNotify;
715 };
716
BufferCallback(const sp<AMessage> & notify)717 BufferCallback::BufferCallback(const sp<AMessage> ¬ify)
718 : mNotify(notify) {}
719
onInputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)720 void BufferCallback::onInputBufferAvailable(
721 size_t index, const sp<MediaCodecBuffer> &buffer) {
722 sp<AMessage> notify(mNotify->dup());
723 notify->setInt32("what", kWhatFillThisBuffer);
724 notify->setSize("index", index);
725 notify->setObject("buffer", buffer);
726 notify->post();
727 }
728
onOutputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)729 void BufferCallback::onOutputBufferAvailable(
730 size_t index, const sp<MediaCodecBuffer> &buffer) {
731 sp<AMessage> notify(mNotify->dup());
732 notify->setInt32("what", kWhatDrainThisBuffer);
733 notify->setSize("index", index);
734 notify->setObject("buffer", buffer);
735 notify->post();
736 }
737
738 class CodecCallback : public CodecBase::CodecCallback {
739 public:
740 explicit CodecCallback(const sp<AMessage> ¬ify);
741 virtual ~CodecCallback() = default;
742
743 virtual void onEos(status_t err) override;
744 virtual void onStartCompleted() override;
745 virtual void onStopCompleted() override;
746 virtual void onReleaseCompleted() override;
747 virtual void onFlushCompleted() override;
748 virtual void onError(status_t err, enum ActionCode actionCode) override;
749 virtual void onComponentAllocated(const char *componentName) override;
750 virtual void onComponentConfigured(
751 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
752 virtual void onInputSurfaceCreated(
753 const sp<AMessage> &inputFormat,
754 const sp<AMessage> &outputFormat,
755 const sp<BufferProducerWrapper> &inputSurface) override;
756 virtual void onInputSurfaceCreationFailed(status_t err) override;
757 virtual void onInputSurfaceAccepted(
758 const sp<AMessage> &inputFormat,
759 const sp<AMessage> &outputFormat) override;
760 virtual void onInputSurfaceDeclined(status_t err) override;
761 virtual void onSignaledInputEOS(status_t err) override;
762 virtual void onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) override;
763 virtual void onOutputBuffersChanged() override;
764 virtual void onFirstTunnelFrameReady() override;
765 virtual void onMetricsUpdated(const sp<AMessage> &updatedMetrics) override;
766 private:
767 const sp<AMessage> mNotify;
768 };
769
CodecCallback(const sp<AMessage> & notify)770 CodecCallback::CodecCallback(const sp<AMessage> ¬ify) : mNotify(notify) {}
771
onEos(status_t err)772 void CodecCallback::onEos(status_t err) {
773 sp<AMessage> notify(mNotify->dup());
774 notify->setInt32("what", kWhatEOS);
775 notify->setInt32("err", err);
776 notify->post();
777 }
778
onStartCompleted()779 void CodecCallback::onStartCompleted() {
780 sp<AMessage> notify(mNotify->dup());
781 notify->setInt32("what", kWhatStartCompleted);
782 notify->post();
783 }
784
onStopCompleted()785 void CodecCallback::onStopCompleted() {
786 sp<AMessage> notify(mNotify->dup());
787 notify->setInt32("what", kWhatStopCompleted);
788 notify->post();
789 }
790
onReleaseCompleted()791 void CodecCallback::onReleaseCompleted() {
792 sp<AMessage> notify(mNotify->dup());
793 notify->setInt32("what", kWhatReleaseCompleted);
794 notify->post();
795 }
796
onFlushCompleted()797 void CodecCallback::onFlushCompleted() {
798 sp<AMessage> notify(mNotify->dup());
799 notify->setInt32("what", kWhatFlushCompleted);
800 notify->post();
801 }
802
onError(status_t err,enum ActionCode actionCode)803 void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
804 sp<AMessage> notify(mNotify->dup());
805 notify->setInt32("what", kWhatError);
806 notify->setInt32("err", err);
807 notify->setInt32("actionCode", actionCode);
808 notify->post();
809 }
810
onComponentAllocated(const char * componentName)811 void CodecCallback::onComponentAllocated(const char *componentName) {
812 sp<AMessage> notify(mNotify->dup());
813 notify->setInt32("what", kWhatComponentAllocated);
814 notify->setString("componentName", componentName);
815 notify->post();
816 }
817
onComponentConfigured(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)818 void CodecCallback::onComponentConfigured(
819 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
820 sp<AMessage> notify(mNotify->dup());
821 notify->setInt32("what", kWhatComponentConfigured);
822 notify->setMessage("input-format", inputFormat);
823 notify->setMessage("output-format", outputFormat);
824 notify->post();
825 }
826
onInputSurfaceCreated(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat,const sp<BufferProducerWrapper> & inputSurface)827 void CodecCallback::onInputSurfaceCreated(
828 const sp<AMessage> &inputFormat,
829 const sp<AMessage> &outputFormat,
830 const sp<BufferProducerWrapper> &inputSurface) {
831 sp<AMessage> notify(mNotify->dup());
832 notify->setInt32("what", kWhatInputSurfaceCreated);
833 notify->setMessage("input-format", inputFormat);
834 notify->setMessage("output-format", outputFormat);
835 notify->setObject("input-surface", inputSurface);
836 notify->post();
837 }
838
onInputSurfaceCreationFailed(status_t err)839 void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
840 sp<AMessage> notify(mNotify->dup());
841 notify->setInt32("what", kWhatInputSurfaceCreated);
842 notify->setInt32("err", err);
843 notify->post();
844 }
845
onInputSurfaceAccepted(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)846 void CodecCallback::onInputSurfaceAccepted(
847 const sp<AMessage> &inputFormat,
848 const sp<AMessage> &outputFormat) {
849 sp<AMessage> notify(mNotify->dup());
850 notify->setInt32("what", kWhatInputSurfaceAccepted);
851 notify->setMessage("input-format", inputFormat);
852 notify->setMessage("output-format", outputFormat);
853 notify->post();
854 }
855
onInputSurfaceDeclined(status_t err)856 void CodecCallback::onInputSurfaceDeclined(status_t err) {
857 sp<AMessage> notify(mNotify->dup());
858 notify->setInt32("what", kWhatInputSurfaceAccepted);
859 notify->setInt32("err", err);
860 notify->post();
861 }
862
onSignaledInputEOS(status_t err)863 void CodecCallback::onSignaledInputEOS(status_t err) {
864 sp<AMessage> notify(mNotify->dup());
865 notify->setInt32("what", kWhatSignaledInputEOS);
866 if (err != OK) {
867 notify->setInt32("err", err);
868 }
869 notify->post();
870 }
871
onOutputFramesRendered(const std::list<RenderedFrameInfo> & done)872 void CodecCallback::onOutputFramesRendered(const std::list<RenderedFrameInfo> &done) {
873 sp<AMessage> notify(mNotify->dup());
874 notify->setInt32("what", kWhatOutputFramesRendered);
875 if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
876 notify->post();
877 }
878 }
879
onOutputBuffersChanged()880 void CodecCallback::onOutputBuffersChanged() {
881 sp<AMessage> notify(mNotify->dup());
882 notify->setInt32("what", kWhatOutputBuffersChanged);
883 notify->post();
884 }
885
onFirstTunnelFrameReady()886 void CodecCallback::onFirstTunnelFrameReady() {
887 sp<AMessage> notify(mNotify->dup());
888 notify->setInt32("what", kWhatFirstTunnelFrameReady);
889 notify->post();
890 }
891
onMetricsUpdated(const sp<AMessage> & updatedMetrics)892 void CodecCallback::onMetricsUpdated(const sp<AMessage> &updatedMetrics) {
893 sp<AMessage> notify(mNotify->dup());
894 notify->setInt32("what", kWhatMetricsUpdated);
895 notify->setMessage("updated-metrics", updatedMetrics);
896 notify->post();
897 }
898
toMediaResourceSubType(MediaCodec::Domain domain)899 static MediaResourceSubType toMediaResourceSubType(MediaCodec::Domain domain) {
900 switch (domain) {
901 case MediaCodec::DOMAIN_VIDEO: return MediaResourceSubType::kVideoCodec;
902 case MediaCodec::DOMAIN_AUDIO: return MediaResourceSubType::kAudioCodec;
903 case MediaCodec::DOMAIN_IMAGE: return MediaResourceSubType::kImageCodec;
904 default: return MediaResourceSubType::kUnspecifiedSubType;
905 }
906 }
907
toCodecMode(MediaCodec::Domain domain)908 static const char * toCodecMode(MediaCodec::Domain domain) {
909 switch (domain) {
910 case MediaCodec::DOMAIN_VIDEO: return kCodecModeVideo;
911 case MediaCodec::DOMAIN_AUDIO: return kCodecModeAudio;
912 case MediaCodec::DOMAIN_IMAGE: return kCodecModeImage;
913 default: return kCodecModeUnknown;
914 }
915 }
916
917 } // namespace
918
919 ////////////////////////////////////////////////////////////////////////////////
920
921 // static
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid)922 sp<MediaCodec> MediaCodec::CreateByType(
923 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
924 uid_t uid) {
925 sp<AMessage> format;
926 return CreateByType(looper, mime, encoder, err, pid, uid, format);
927 }
928
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid,sp<AMessage> format)929 sp<MediaCodec> MediaCodec::CreateByType(
930 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
931 uid_t uid, sp<AMessage> format) {
932 Vector<AString> matchingCodecs;
933
934 MediaCodecList::findMatchingCodecs(
935 mime.c_str(),
936 encoder,
937 0,
938 format,
939 &matchingCodecs);
940
941 if (err != NULL) {
942 *err = NAME_NOT_FOUND;
943 }
944 for (size_t i = 0; i < matchingCodecs.size(); ++i) {
945 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
946 AString componentName = matchingCodecs[i];
947 status_t ret = codec->init(componentName);
948 if (err != NULL) {
949 *err = ret;
950 }
951 if (ret == OK) {
952 return codec;
953 }
954 ALOGD("Allocating component '%s' failed (%d), try next one.",
955 componentName.c_str(), ret);
956 }
957 return NULL;
958 }
959
960 // static
CreateByComponentName(const sp<ALooper> & looper,const AString & name,status_t * err,pid_t pid,uid_t uid)961 sp<MediaCodec> MediaCodec::CreateByComponentName(
962 const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
963 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
964
965 const status_t ret = codec->init(name);
966 if (err != NULL) {
967 *err = ret;
968 }
969 return ret == OK ? codec : NULL; // NULL deallocates codec.
970 }
971
972 // static
CreatePersistentInputSurface()973 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
974 sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
975 if (pluginSurface != nullptr) {
976 return pluginSurface;
977 }
978
979 OMXClient client;
980 if (client.connect() != OK) {
981 ALOGE("Failed to connect to OMX to create persistent input surface.");
982 return NULL;
983 }
984
985 sp<IOMX> omx = client.interface();
986
987 sp<IGraphicBufferProducer> bufferProducer;
988 sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
989
990 status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
991
992 if (err != OK) {
993 ALOGE("Failed to create persistent input surface.");
994 return NULL;
995 }
996
997 return new PersistentSurface(bufferProducer, bufferSource);
998 }
999
1000 // GenerateCodecId generates a 64bit Random ID for each codec that is created.
1001 // The Codec ID is generated as:
1002 // - A process-unique random high 32bits
1003 // - An atomic sequence low 32bits
1004 //
GenerateCodecId()1005 static uint64_t GenerateCodecId() {
1006 static std::atomic_uint64_t sId = [] {
1007 std::random_device rd;
1008 std::mt19937 gen(rd());
1009 std::uniform_int_distribution<uint32_t> distrib(0, UINT32_MAX);
1010 uint32_t randomID = distrib(gen);
1011 uint64_t id = randomID;
1012 return id << 32;
1013 }();
1014 return sId++;
1015 }
1016
MediaCodec(const sp<ALooper> & looper,pid_t pid,uid_t uid,std::function<sp<CodecBase> (const AString &,const char *)> getCodecBase,std::function<status_t (const AString &,sp<MediaCodecInfo> *)> getCodecInfo)1017 MediaCodec::MediaCodec(
1018 const sp<ALooper> &looper, pid_t pid, uid_t uid,
1019 std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
1020 std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
1021 : mState(UNINITIALIZED),
1022 mReleasedByResourceManager(false),
1023 mLooper(looper),
1024 mCodec(NULL),
1025 mReplyID(0),
1026 mFlags(0),
1027 mStickyError(OK),
1028 mSoftRenderer(NULL),
1029 mDomain(DOMAIN_UNKNOWN),
1030 mWidth(0),
1031 mHeight(0),
1032 mRotationDegrees(0),
1033 mDequeueInputTimeoutGeneration(0),
1034 mDequeueInputReplyID(0),
1035 mDequeueOutputTimeoutGeneration(0),
1036 mDequeueOutputReplyID(0),
1037 mTunneledInputWidth(0),
1038 mTunneledInputHeight(0),
1039 mTunneled(false),
1040 mTunnelPeekState(TunnelPeekState::kLegacyMode),
1041 mHaveInputSurface(false),
1042 mHavePendingInputBuffers(false),
1043 mCpuBoostRequested(false),
1044 mIsSurfaceToDisplay(false),
1045 mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
1046 mVideoRenderQualityTracker(
1047 VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
1048 GetServerConfigurableFlag)),
1049 mLatencyUnknown(0),
1050 mBytesEncoded(0),
1051 mEarliestEncodedPtsUs(INT64_MAX),
1052 mLatestEncodedPtsUs(INT64_MIN),
1053 mFramesEncoded(0),
1054 mNumLowLatencyEnables(0),
1055 mNumLowLatencyDisables(0),
1056 mIsLowLatencyModeOn(false),
1057 mIndexOfFirstFrameWhenLowLatencyOn(-1),
1058 mInputBufferCounter(0),
1059 mGetCodecBase(getCodecBase),
1060 mGetCodecInfo(getCodecInfo) {
1061 mCodecId = GenerateCodecId();
1062 mResourceManagerProxy = new ResourceManagerServiceProxy(pid, uid,
1063 ::ndk::SharedRefBase::make<ResourceManagerClient>(this, pid, uid));
1064 if (!mGetCodecBase) {
1065 mGetCodecBase = [](const AString &name, const char *owner) {
1066 return GetCodecBase(name, owner);
1067 };
1068 }
1069 if (!mGetCodecInfo) {
1070 mGetCodecInfo = [&log = mErrorLog](const AString &name,
1071 sp<MediaCodecInfo> *info) -> status_t {
1072 *info = nullptr;
1073 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1074 if (!mcl) {
1075 log.log(LOG_TAG, "Fatal error: failed to initialize MediaCodecList");
1076 return NO_INIT; // if called from Java should raise IOException
1077 }
1078 AString tmp = name;
1079 if (tmp.endsWith(".secure")) {
1080 tmp.erase(tmp.size() - 7, 7);
1081 }
1082 for (const AString &codecName : { name, tmp }) {
1083 ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
1084 if (codecIdx < 0) {
1085 continue;
1086 }
1087 *info = mcl->getCodecInfo(codecIdx);
1088 return OK;
1089 }
1090 log.log(LOG_TAG, base::StringPrintf("Codec with name '%s' is not found on the device.",
1091 name.c_str()));
1092 return NAME_NOT_FOUND;
1093 };
1094 }
1095
1096 // we want an empty metrics record for any early getMetrics() call
1097 // this should be the *only* initMediametrics() call that's not on the Looper thread
1098 initMediametrics();
1099 }
1100
~MediaCodec()1101 MediaCodec::~MediaCodec() {
1102 CHECK_EQ(mState, UNINITIALIZED);
1103 mResourceManagerProxy->removeClient();
1104
1105 flushMediametrics();
1106
1107 // clean any saved metrics info we stored as part of configure()
1108 if (mConfigureMsg != nullptr) {
1109 mediametrics_handle_t metricsHandle;
1110 if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
1111 mediametrics_delete(metricsHandle);
1112 }
1113 }
1114 }
1115
1116 // except for in constructor, called from the looper thread (and therefore mutexed)
initMediametrics()1117 void MediaCodec::initMediametrics() {
1118 if (mMetricsHandle == 0) {
1119 mMetricsHandle = mediametrics_create(kCodecKeyName);
1120 }
1121
1122 mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1123
1124 {
1125 Mutex::Autolock al(mRecentLock);
1126 for (int i = 0; i<kRecentLatencyFrames; i++) {
1127 mRecentSamples[i] = kRecentSampleInvalid;
1128 }
1129 mRecentHead = 0;
1130 }
1131
1132 {
1133 Mutex::Autolock al(mLatencyLock);
1134 mBuffersInFlight.clear();
1135 mNumLowLatencyEnables = 0;
1136 mNumLowLatencyDisables = 0;
1137 mIsLowLatencyModeOn = false;
1138 mIndexOfFirstFrameWhenLowLatencyOn = -1;
1139 mInputBufferCounter = 0;
1140 }
1141
1142 mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
1143 resetMetricsFields();
1144 }
1145
resetMetricsFields()1146 void MediaCodec::resetMetricsFields() {
1147 mHdrInfoFlags = 0;
1148
1149 mApiUsageMetrics = ApiUsageMetrics();
1150 mReliabilityContextMetrics = ReliabilityContextMetrics();
1151 }
1152
updateMediametrics()1153 void MediaCodec::updateMediametrics() {
1154 if (mMetricsHandle == 0) {
1155 ALOGW("no metrics handle found");
1156 return;
1157 }
1158
1159 Mutex::Autolock _lock(mMetricsLock);
1160
1161 mediametrics_setInt32(mMetricsHandle, kCodecArrayMode, mApiUsageMetrics.isArrayMode ? 1 : 0);
1162 mApiUsageMetrics.operationMode = (mFlags & kFlagIsAsync) ?
1163 ((mFlags & kFlagUseBlockModel) ? ApiUsageMetrics::kBlockMode
1164 : ApiUsageMetrics::kAsynchronousMode)
1165 : ApiUsageMetrics::kSynchronousMode;
1166 mediametrics_setInt32(mMetricsHandle, kCodecOperationMode, mApiUsageMetrics.operationMode);
1167 mediametrics_setInt32(mMetricsHandle, kCodecOutputSurface,
1168 mApiUsageMetrics.isUsingOutputSurface ? 1 : 0);
1169
1170 mediametrics_setInt32(mMetricsHandle, kCodecAppMaxInputSize,
1171 mApiUsageMetrics.inputBufferSize.appMax);
1172 mediametrics_setInt32(mMetricsHandle, kCodecUsedMaxInputSize,
1173 mApiUsageMetrics.inputBufferSize.usedMax);
1174 mediametrics_setInt32(mMetricsHandle, kCodecCodecMaxInputSize,
1175 mApiUsageMetrics.inputBufferSize.codecMax);
1176
1177 mediametrics_setInt32(mMetricsHandle, kCodecFlushCount, mReliabilityContextMetrics.flushCount);
1178 mediametrics_setInt32(mMetricsHandle, kCodecSetSurfaceCount,
1179 mReliabilityContextMetrics.setOutputSurfaceCount);
1180 mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
1181 mReliabilityContextMetrics.resolutionChangeCount);
1182
1183 // Video rendering quality metrics
1184 {
1185 const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
1186 if (m.frameReleasedCount > 0) {
1187 mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
1188 mediametrics_setInt64(mMetricsHandle, kCodecLastRenderTimeUs, m.lastRenderTimeUs);
1189 mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
1190 mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
1191 mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
1192 mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
1193 mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
1194 mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
1195 mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
1196 }
1197 if (m.freezeDurationMsHistogram.getCount() >= 1) {
1198 const MediaHistogram<int32_t> &h = m.freezeDurationMsHistogram;
1199 mediametrics_setInt64(mMetricsHandle, kCodecFreezeScore, m.freezeScore);
1200 mediametrics_setDouble(mMetricsHandle, kCodecFreezeRate, m.freezeRate);
1201 mediametrics_setInt64(mMetricsHandle, kCodecFreezeCount, h.getCount());
1202 mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsAvg, h.getAvg());
1203 mediametrics_setInt32(mMetricsHandle, kCodecFreezeDurationMsMax, h.getMax());
1204 mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogram, h.emit());
1205 mediametrics_setString(mMetricsHandle, kCodecFreezeDurationMsHistogramBuckets,
1206 h.emitBuckets());
1207 }
1208 if (m.freezeDistanceMsHistogram.getCount() >= 1) {
1209 const MediaHistogram<int32_t> &h = m.freezeDistanceMsHistogram;
1210 mediametrics_setInt32(mMetricsHandle, kCodecFreezeDistanceMsAvg, h.getAvg());
1211 mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogram, h.emit());
1212 mediametrics_setString(mMetricsHandle, kCodecFreezeDistanceMsHistogramBuckets,
1213 h.emitBuckets());
1214 }
1215 if (m.judderScoreHistogram.getCount() >= 1) {
1216 const MediaHistogram<int32_t> &h = m.judderScoreHistogram;
1217 mediametrics_setInt64(mMetricsHandle, kCodecJudderScore, m.judderScore);
1218 mediametrics_setDouble(mMetricsHandle, kCodecJudderRate, m.judderRate);
1219 mediametrics_setInt64(mMetricsHandle, kCodecJudderCount, h.getCount());
1220 mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreAvg, h.getAvg());
1221 mediametrics_setInt32(mMetricsHandle, kCodecJudderScoreMax, h.getMax());
1222 mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogram, h.emit());
1223 mediametrics_setString(mMetricsHandle, kCodecJudderScoreHistogramBuckets,
1224 h.emitBuckets());
1225 }
1226 if (m.freezeEventCount != 0) {
1227 mediametrics_setInt32(mMetricsHandle, kCodecFreezeEventCount, m.freezeEventCount);
1228 }
1229 if (m.judderEventCount != 0) {
1230 mediametrics_setInt32(mMetricsHandle, kCodecJudderEventCount, m.judderEventCount);
1231 }
1232 }
1233
1234 if (mLatencyHist.getCount() != 0 ) {
1235 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
1236 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
1237 mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
1238 mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
1239
1240 if (kEmitHistogram) {
1241 // and the histogram itself
1242 std::string hist = mLatencyHist.emit();
1243 mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
1244 }
1245 }
1246 if (mLatencyUnknown > 0) {
1247 mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
1248 }
1249 int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
1250 if (playbackDurationSec > 0) {
1251 mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
1252 }
1253 if (mLifetimeStartNs > 0) {
1254 nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
1255 lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
1256 mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
1257 }
1258
1259 if (mBytesEncoded) {
1260 Mutex::Autolock al(mOutputStatsLock);
1261
1262 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
1263 int64_t duration = 0;
1264 if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
1265 duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
1266 }
1267 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
1268 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
1269 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
1270 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
1271 }
1272
1273 {
1274 Mutex::Autolock al(mLatencyLock);
1275 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
1276 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
1277 mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
1278 mIndexOfFirstFrameWhenLowLatencyOn);
1279 }
1280
1281 #if 0
1282 // enable for short term, only while debugging
1283 updateEphemeralMediametrics(mMetricsHandle);
1284 #endif
1285 }
1286
updateHdrMetrics(bool isConfig)1287 void MediaCodec::updateHdrMetrics(bool isConfig) {
1288 if ((mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) || mMetricsHandle == 0) {
1289 return;
1290 }
1291
1292 int32_t colorStandard = -1;
1293 if (mOutputFormat->findInt32(KEY_COLOR_STANDARD, &colorStandard)) {
1294 mediametrics_setInt32(mMetricsHandle,
1295 isConfig ? kCodecConfigColorStandard : kCodecParsedColorStandard, colorStandard);
1296 }
1297 int32_t colorRange = -1;
1298 if (mOutputFormat->findInt32(KEY_COLOR_RANGE, &colorRange)) {
1299 mediametrics_setInt32(mMetricsHandle,
1300 isConfig ? kCodecConfigColorRange : kCodecParsedColorRange, colorRange);
1301 }
1302 int32_t colorTransfer = -1;
1303 if (mOutputFormat->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
1304 mediametrics_setInt32(mMetricsHandle,
1305 isConfig ? kCodecConfigColorTransfer : kCodecParsedColorTransfer, colorTransfer);
1306 }
1307 HDRStaticInfo info;
1308 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)
1309 && ColorUtils::isHDRStaticInfoValid(&info)) {
1310 mHdrInfoFlags |= kFlagHasHdrStaticInfo;
1311 }
1312 mediametrics_setInt32(mMetricsHandle, kCodecHdrStaticInfo,
1313 (mHdrInfoFlags & kFlagHasHdrStaticInfo) ? 1 : 0);
1314 sp<ABuffer> hdr10PlusInfo;
1315 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
1316 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
1317 mHdrInfoFlags |= kFlagHasHdr10PlusInfo;
1318 }
1319 mediametrics_setInt32(mMetricsHandle, kCodecHdr10PlusInfo,
1320 (mHdrInfoFlags & kFlagHasHdr10PlusInfo) ? 1 : 0);
1321
1322 // hdr format
1323 sp<AMessage> codedFormat = (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
1324
1325 AString mime;
1326 int32_t profile = -1;
1327
1328 if (codedFormat->findString("mime", &mime)
1329 && codedFormat->findInt32(KEY_PROFILE, &profile)
1330 && colorTransfer != -1) {
1331 hdr_format hdrFormat = getHdrFormat(mime, profile, colorTransfer);
1332 mediametrics_setInt32(mMetricsHandle, kCodecHdrFormat, static_cast<int>(hdrFormat));
1333 }
1334 }
1335
getHdrFormat(const AString & mime,const int32_t profile,const int32_t colorTransfer)1336 hdr_format MediaCodec::getHdrFormat(const AString &mime, const int32_t profile,
1337 const int32_t colorTransfer) {
1338 return (mFlags & kFlagIsEncoder)
1339 ? getHdrFormatForEncoder(mime, profile, colorTransfer)
1340 : getHdrFormatForDecoder(mime, profile, colorTransfer);
1341 }
1342
getHdrFormatForEncoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1343 hdr_format MediaCodec::getHdrFormatForEncoder(const AString &mime, const int32_t profile,
1344 const int32_t colorTransfer) {
1345 switch (colorTransfer) {
1346 case COLOR_TRANSFER_ST2084:
1347 if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1348 switch (profile) {
1349 case VP9Profile2HDR:
1350 return HDR_FORMAT_HDR10;
1351 case VP9Profile2HDR10Plus:
1352 return HDR_FORMAT_HDR10PLUS;
1353 default:
1354 return HDR_FORMAT_NONE;
1355 }
1356 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1357 switch (profile) {
1358 case AV1ProfileMain10HDR10:
1359 return HDR_FORMAT_HDR10;
1360 case AV1ProfileMain10HDR10Plus:
1361 return HDR_FORMAT_HDR10PLUS;
1362 default:
1363 return HDR_FORMAT_NONE;
1364 }
1365 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1366 switch (profile) {
1367 case HEVCProfileMain10HDR10:
1368 return HDR_FORMAT_HDR10;
1369 case HEVCProfileMain10HDR10Plus:
1370 return HDR_FORMAT_HDR10PLUS;
1371 default:
1372 return HDR_FORMAT_NONE;
1373 }
1374 } else {
1375 return HDR_FORMAT_NONE;
1376 }
1377 case COLOR_TRANSFER_HLG:
1378 if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1379 return HDR_FORMAT_HLG;
1380 } else {
1381 // TODO: DOLBY format
1382 return HDR_FORMAT_NONE;
1383 }
1384 default:
1385 return HDR_FORMAT_NONE;
1386 }
1387 }
1388
getHdrFormatForDecoder(const AString & mime,const int32_t profile,const int32_t colorTransfer)1389 hdr_format MediaCodec::getHdrFormatForDecoder(const AString &mime, const int32_t profile,
1390 const int32_t colorTransfer) {
1391 switch (colorTransfer) {
1392 case COLOR_TRANSFER_ST2084:
1393 if (!(mHdrInfoFlags & kFlagHasHdrStaticInfo) || !profileSupport10Bits(mime, profile)) {
1394 return HDR_FORMAT_NONE;
1395 }
1396 return mHdrInfoFlags & kFlagHasHdr10PlusInfo ? HDR_FORMAT_HDR10PLUS : HDR_FORMAT_HDR10;
1397 case COLOR_TRANSFER_HLG:
1398 if (!mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
1399 return HDR_FORMAT_HLG;
1400 }
1401 // TODO: DOLBY format
1402 }
1403 return HDR_FORMAT_NONE;
1404 }
1405
profileSupport10Bits(const AString & mime,const int32_t profile)1406 bool MediaCodec::profileSupport10Bits(const AString &mime, const int32_t profile) {
1407 if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
1408 return true;
1409 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
1410 switch (profile) {
1411 case VP9Profile2:
1412 case VP9Profile3:
1413 case VP9Profile2HDR:
1414 case VP9Profile3HDR:
1415 case VP9Profile2HDR10Plus:
1416 case VP9Profile3HDR10Plus:
1417 return true;
1418 }
1419 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
1420 switch (profile) {
1421 case HEVCProfileMain10:
1422 case HEVCProfileMain10HDR10:
1423 case HEVCProfileMain10HDR10Plus:
1424 return true;
1425 }
1426 }
1427 return false;
1428 }
1429
1430
1431 // called to update info being passed back via getMetrics(), which is a
1432 // unique copy for that call, no concurrent access worries.
updateEphemeralMediametrics(mediametrics_handle_t item)1433 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
1434 ALOGD("MediaCodec::updateEphemeralMediametrics()");
1435
1436 if (item == 0) {
1437 return;
1438 }
1439
1440 // build an empty histogram
1441 MediaHistogram<int64_t> recentHist;
1442 recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
1443
1444 // stuff it with the samples in the ring buffer
1445 {
1446 Mutex::Autolock al(mRecentLock);
1447
1448 for (int i = 0; i < kRecentLatencyFrames; i++) {
1449 if (mRecentSamples[i] != kRecentSampleInvalid) {
1450 recentHist.insert(mRecentSamples[i]);
1451 }
1452 }
1453 }
1454
1455 // spit the data (if any) into the supplied analytics record
1456 if (recentHist.getCount() != 0 ) {
1457 mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
1458 mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
1459 mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
1460 mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
1461
1462 if (kEmitHistogram) {
1463 // and the histogram itself
1464 std::string hist = recentHist.emit();
1465 mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
1466 }
1467 }
1468 }
1469
emitVector(std::vector<int32_t> vector)1470 static std::string emitVector(std::vector<int32_t> vector) {
1471 std::ostringstream sstr;
1472 for (size_t i = 0; i < vector.size(); ++i) {
1473 if (i != 0) {
1474 sstr << ',';
1475 }
1476 sstr << vector[i];
1477 }
1478 return sstr.str();
1479 }
1480
reportToMediaMetricsIfValid(const FreezeEvent & e)1481 static void reportToMediaMetricsIfValid(const FreezeEvent &e) {
1482 if (e.valid) {
1483 mediametrics_handle_t handle = mediametrics_create(kFreezeEventKeyName);
1484 mediametrics_setInt64(handle, kFreezeEventInitialTimeUs, e.initialTimeUs);
1485 mediametrics_setInt32(handle, kFreezeEventDurationMs, e.durationMs);
1486 mediametrics_setInt64(handle, kFreezeEventCount, e.count);
1487 mediametrics_setInt32(handle, kFreezeEventAvgDurationMs, e.sumDurationMs / e.count);
1488 mediametrics_setInt32(handle, kFreezeEventAvgDistanceMs, e.sumDistanceMs / e.count);
1489 mediametrics_setString(handle, kFreezeEventDetailsDurationMs,
1490 emitVector(e.details.durationMs));
1491 mediametrics_setString(handle, kFreezeEventDetailsDistanceMs,
1492 emitVector(e.details.distanceMs));
1493 mediametrics_selfRecord(handle);
1494 mediametrics_delete(handle);
1495 }
1496 }
1497
reportToMediaMetricsIfValid(const JudderEvent & e)1498 static void reportToMediaMetricsIfValid(const JudderEvent &e) {
1499 if (e.valid) {
1500 mediametrics_handle_t handle = mediametrics_create(kJudderEventKeyName);
1501 mediametrics_setInt64(handle, kJudderEventInitialTimeUs, e.initialTimeUs);
1502 mediametrics_setInt32(handle, kJudderEventDurationMs, e.durationMs);
1503 mediametrics_setInt64(handle, kJudderEventCount, e.count);
1504 mediametrics_setInt32(handle, kJudderEventAvgScore, e.sumScore / e.count);
1505 mediametrics_setInt32(handle, kJudderEventAvgDistanceMs, e.sumDistanceMs / e.count);
1506 mediametrics_setString(handle, kJudderEventDetailsActualDurationUs,
1507 emitVector(e.details.actualRenderDurationUs));
1508 mediametrics_setString(handle, kJudderEventDetailsContentDurationUs,
1509 emitVector(e.details.contentRenderDurationUs));
1510 mediametrics_setString(handle, kJudderEventDetailsDistanceMs,
1511 emitVector(e.details.distanceMs));
1512 mediametrics_selfRecord(handle);
1513 mediametrics_delete(handle);
1514 }
1515 }
1516
flushMediametrics()1517 void MediaCodec::flushMediametrics() {
1518 ALOGD("flushMediametrics");
1519
1520 // update does its own mutex locking
1521 updateMediametrics();
1522 resetMetricsFields();
1523
1524 // ensure mutex while we do our own work
1525 Mutex::Autolock _lock(mMetricsLock);
1526 if (mMetricsHandle != 0) {
1527 if (mMetricsToUpload && mediametrics_count(mMetricsHandle) > 0) {
1528 mediametrics_selfRecord(mMetricsHandle);
1529 }
1530 mediametrics_delete(mMetricsHandle);
1531 mMetricsHandle = 0;
1532 }
1533 // we no longer have anything pending upload
1534 mMetricsToUpload = false;
1535
1536 // Freeze and judder events are reported separately
1537 reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetFreezeEvent());
1538 reportToMediaMetricsIfValid(mVideoRenderQualityTracker.getAndResetJudderEvent());
1539 }
1540
updateLowLatency(const sp<AMessage> & msg)1541 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
1542 int32_t lowLatency = 0;
1543 if (msg->findInt32("low-latency", &lowLatency)) {
1544 Mutex::Autolock al(mLatencyLock);
1545 if (lowLatency > 0) {
1546 ++mNumLowLatencyEnables;
1547 // This is just an estimate since low latency mode change happens ONLY at key frame
1548 mIsLowLatencyModeOn = true;
1549 } else if (lowLatency == 0) {
1550 ++mNumLowLatencyDisables;
1551 // This is just an estimate since low latency mode change happens ONLY at key frame
1552 mIsLowLatencyModeOn = false;
1553 }
1554 }
1555 }
1556
asString(TunnelPeekState state,const char * default_string)1557 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
1558 switch(state) {
1559 case TunnelPeekState::kLegacyMode:
1560 return "LegacyMode";
1561 case TunnelPeekState::kEnabledNoBuffer:
1562 return "EnabledNoBuffer";
1563 case TunnelPeekState::kDisabledNoBuffer:
1564 return "DisabledNoBuffer";
1565 case TunnelPeekState::kBufferDecoded:
1566 return "BufferDecoded";
1567 case TunnelPeekState::kBufferRendered:
1568 return "BufferRendered";
1569 case TunnelPeekState::kDisabledQueued:
1570 return "DisabledQueued";
1571 case TunnelPeekState::kEnabledQueued:
1572 return "EnabledQueued";
1573 default:
1574 return default_string;
1575 }
1576 }
1577
updateTunnelPeek(const sp<AMessage> & msg)1578 void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
1579 int32_t tunnelPeek = 0;
1580 if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
1581 return;
1582 }
1583
1584 TunnelPeekState previousState = mTunnelPeekState;
1585 if(tunnelPeek == 0){
1586 switch (mTunnelPeekState) {
1587 case TunnelPeekState::kLegacyMode:
1588 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1589 [[fallthrough]];
1590 case TunnelPeekState::kEnabledNoBuffer:
1591 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
1592 break;
1593 case TunnelPeekState::kEnabledQueued:
1594 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
1595 break;
1596 default:
1597 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1598 return;
1599 }
1600 } else {
1601 switch (mTunnelPeekState) {
1602 case TunnelPeekState::kLegacyMode:
1603 msg->setInt32("android._tunnel-peek-set-legacy", 0);
1604 [[fallthrough]];
1605 case TunnelPeekState::kDisabledNoBuffer:
1606 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
1607 break;
1608 case TunnelPeekState::kDisabledQueued:
1609 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
1610 break;
1611 case TunnelPeekState::kBufferDecoded:
1612 msg->setInt32("android._trigger-tunnel-peek", 1);
1613 mTunnelPeekState = TunnelPeekState::kBufferRendered;
1614 break;
1615 default:
1616 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1617 return;
1618 }
1619 }
1620
1621 ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
1622 }
1623
processRenderedFrames(const sp<AMessage> & msg)1624 void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
1625 int what = 0;
1626 msg->findInt32("what", &what);
1627 if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
1628 static bool logged = false;
1629 if (!logged) {
1630 logged = true;
1631 ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
1632 }
1633 return;
1634 }
1635 // Rendered frames only matter if they're being sent to the display
1636 if (mIsSurfaceToDisplay) {
1637 int64_t renderTimeNs;
1638 for (size_t index = 0;
1639 msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
1640 index++) {
1641 // Capture metrics for playback duration
1642 mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
1643 // Capture metrics for quality
1644 int64_t mediaTimeUs = 0;
1645 if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
1646 ALOGE("processRenderedFrames: no media time found");
1647 continue;
1648 }
1649 // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
1650 // rendered frame.
1651 if (!mTunneled || mediaTimeUs != INT64_MAX) {
1652 FreezeEvent freezeEvent;
1653 JudderEvent judderEvent;
1654 mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs, &freezeEvent,
1655 &judderEvent);
1656 reportToMediaMetricsIfValid(freezeEvent);
1657 reportToMediaMetricsIfValid(judderEvent);
1658 }
1659 }
1660 }
1661 }
1662
1663 // when we send a buffer to the codec;
statsBufferSent(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1664 void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1665
1666 // only enqueue if we have a legitimate time
1667 if (presentationUs <= 0) {
1668 ALOGV("presentation time: %" PRId64, presentationUs);
1669 return;
1670 }
1671
1672 if (mBatteryChecker != nullptr) {
1673 mBatteryChecker->onCodecActivity([this] () {
1674 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1675 });
1676 }
1677
1678 if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
1679 mBytesInput += buffer->size();
1680 mFramesInput++;
1681 }
1682
1683 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1684 BufferFlightTiming_t startdata = { presentationUs, nowNs };
1685
1686 {
1687 // mutex access to mBuffersInFlight and other stats
1688 Mutex::Autolock al(mLatencyLock);
1689
1690
1691 // XXX: we *could* make sure that the time is later than the end of queue
1692 // as part of a consistency check...
1693 mBuffersInFlight.push_back(startdata);
1694
1695 if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
1696 mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
1697 }
1698 ++mInputBufferCounter;
1699 }
1700 }
1701
1702 // when we get a buffer back from the codec
statsBufferReceived(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1703 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1704
1705 CHECK_NE(mState, UNINITIALIZED);
1706
1707 if (mDomain == DOMAIN_VIDEO && (mFlags & kFlagIsEncoder)) {
1708 int32_t flags = 0;
1709 (void) buffer->meta()->findInt32("flags", &flags);
1710
1711 // some of these frames, we don't want to count
1712 // standalone EOS.... has an invalid timestamp
1713 if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
1714 mBytesEncoded += buffer->size();
1715 mFramesEncoded++;
1716
1717 Mutex::Autolock al(mOutputStatsLock);
1718 int64_t timeUs = 0;
1719 if (buffer->meta()->findInt64("timeUs", &timeUs)) {
1720 if (timeUs > mLatestEncodedPtsUs) {
1721 mLatestEncodedPtsUs = timeUs;
1722 }
1723 // can't chain as an else-if or this never triggers
1724 if (timeUs < mEarliestEncodedPtsUs) {
1725 mEarliestEncodedPtsUs = timeUs;
1726 }
1727 }
1728 }
1729 }
1730
1731 // mutex access to mBuffersInFlight and other stats
1732 Mutex::Autolock al(mLatencyLock);
1733
1734 // how long this buffer took for the round trip through the codec
1735 // NB: pipelining can/will make these times larger. e.g., if each packet
1736 // is always 2 msec and we have 3 in flight at any given time, we're going to
1737 // see "6 msec" as an answer.
1738
1739 // ignore stuff with no presentation time
1740 if (presentationUs <= 0) {
1741 ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
1742 mLatencyUnknown++;
1743 return;
1744 }
1745
1746 if (mBatteryChecker != nullptr) {
1747 mBatteryChecker->onCodecActivity([this] () {
1748 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1749 });
1750 }
1751
1752 BufferFlightTiming_t startdata;
1753 bool valid = false;
1754 while (mBuffersInFlight.size() > 0) {
1755 startdata = *mBuffersInFlight.begin();
1756 ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
1757 startdata.presentationUs, startdata.startedNs);
1758 if (startdata.presentationUs == presentationUs) {
1759 // a match
1760 ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
1761 startdata.presentationUs, presentationUs);
1762 mBuffersInFlight.pop_front();
1763 valid = true;
1764 break;
1765 } else if (startdata.presentationUs < presentationUs) {
1766 // we must have missed the match for this, drop it and keep looking
1767 ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
1768 startdata.presentationUs, presentationUs);
1769 mBuffersInFlight.pop_front();
1770 continue;
1771 } else {
1772 // head is after, so we don't have a frame for ourselves
1773 ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
1774 " we have nothing to pair with",
1775 startdata.presentationUs, presentationUs);
1776 mLatencyUnknown++;
1777 return;
1778 }
1779 }
1780 if (!valid) {
1781 ALOGV("-- empty queue, so ignore that.");
1782 mLatencyUnknown++;
1783 return;
1784 }
1785
1786 // now start our calculations
1787 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1788 int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
1789
1790 mLatencyHist.insert(latencyUs);
1791
1792 // push into the recent samples
1793 {
1794 Mutex::Autolock al(mRecentLock);
1795
1796 if (mRecentHead >= kRecentLatencyFrames) {
1797 mRecentHead = 0;
1798 }
1799 mRecentSamples[mRecentHead++] = latencyUs;
1800 }
1801 }
1802
discardDecodeOnlyOutputBuffer(size_t index)1803 bool MediaCodec::discardDecodeOnlyOutputBuffer(size_t index) {
1804 Mutex::Autolock al(mBufferLock);
1805 BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
1806 sp<MediaCodecBuffer> buffer = info->mData;
1807 int32_t flags;
1808 CHECK(buffer->meta()->findInt32("flags", &flags));
1809 if (flags & BUFFER_FLAG_DECODE_ONLY) {
1810 info->mOwnedByClient = false;
1811 info->mData.clear();
1812 mBufferChannel->discardBuffer(buffer);
1813 return true;
1814 }
1815 return false;
1816 }
1817
1818 // static
PostAndAwaitResponse(const sp<AMessage> & msg,sp<AMessage> * response)1819 status_t MediaCodec::PostAndAwaitResponse(
1820 const sp<AMessage> &msg, sp<AMessage> *response) {
1821 status_t err = msg->postAndAwaitResponse(response);
1822
1823 if (err != OK) {
1824 return err;
1825 }
1826
1827 if (!(*response)->findInt32("err", &err)) {
1828 err = OK;
1829 }
1830
1831 return err;
1832 }
1833
PostReplyWithError(const sp<AMessage> & msg,int32_t err)1834 void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
1835 sp<AReplyToken> replyID;
1836 CHECK(msg->senderAwaitsResponse(&replyID));
1837 PostReplyWithError(replyID, err);
1838 }
1839
PostReplyWithError(const sp<AReplyToken> & replyID,int32_t err)1840 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
1841 int32_t finalErr = err;
1842 if (mReleasedByResourceManager) {
1843 // override the err code if MediaCodec has been released by ResourceManager.
1844 finalErr = DEAD_OBJECT;
1845 }
1846
1847 sp<AMessage> response = new AMessage;
1848 response->setInt32("err", finalErr);
1849 response->postReply(replyID);
1850 }
1851
CreateCCodec()1852 static CodecBase *CreateCCodec() {
1853 return new CCodec;
1854 }
1855
1856 //static
GetCodecBase(const AString & name,const char * owner)1857 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
1858 if (owner) {
1859 if (strcmp(owner, "default") == 0) {
1860 return new ACodec;
1861 } else if (strncmp(owner, "codec2", 6) == 0) {
1862 return CreateCCodec();
1863 }
1864 }
1865
1866 if (name.startsWithIgnoreCase("c2.")) {
1867 return CreateCCodec();
1868 } else if (name.startsWithIgnoreCase("omx.")) {
1869 // at this time only ACodec specifies a mime type.
1870 return new ACodec;
1871 } else {
1872 return NULL;
1873 }
1874 }
1875
1876 struct CodecListCache {
CodecListCacheandroid::CodecListCache1877 CodecListCache()
1878 : mCodecInfoMap{[] {
1879 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1880 size_t count = mcl->countCodecs();
1881 std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
1882 for (size_t i = 0; i < count; ++i) {
1883 sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
1884 codecInfoMap.emplace(info->getCodecName(), info);
1885 }
1886 return codecInfoMap;
1887 }()} {
1888 }
1889
1890 const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
1891 };
1892
GetCodecListCache()1893 static const CodecListCache &GetCodecListCache() {
1894 static CodecListCache sCache{};
1895 return sCache;
1896 }
1897
init(const AString & name)1898 status_t MediaCodec::init(const AString &name) {
1899 status_t err = mResourceManagerProxy->init();
1900 if (err != OK) {
1901 mErrorLog.log(LOG_TAG, base::StringPrintf(
1902 "Fatal error: failed to initialize ResourceManager (err=%d)", err));
1903 mCodec = NULL; // remove the codec
1904 return err;
1905 }
1906
1907 // save init parameters for reset
1908 mInitName = name;
1909
1910 // Current video decoders do not return from OMX_FillThisBuffer
1911 // quickly, violating the OpenMAX specs, until that is remedied
1912 // we need to invest in an extra looper to free the main event
1913 // queue.
1914
1915 mCodecInfo.clear();
1916
1917 bool secureCodec = false;
1918 const char *owner = "";
1919 if (!name.startsWith("android.filter.")) {
1920 err = mGetCodecInfo(name, &mCodecInfo);
1921 if (err != OK) {
1922 mErrorLog.log(LOG_TAG, base::StringPrintf(
1923 "Getting codec info with name '%s' failed (err=%d)", name.c_str(), err));
1924 mCodec = NULL; // remove the codec.
1925 return err;
1926 }
1927 if (mCodecInfo == nullptr) {
1928 mErrorLog.log(LOG_TAG, base::StringPrintf(
1929 "Getting codec info with name '%s' failed", name.c_str()));
1930 return NAME_NOT_FOUND;
1931 }
1932 secureCodec = name.endsWith(".secure");
1933 Vector<AString> mediaTypes;
1934 mCodecInfo->getSupportedMediaTypes(&mediaTypes);
1935 for (size_t i = 0; i < mediaTypes.size(); ++i) {
1936 if (mediaTypes[i].startsWith("video/")) {
1937 mDomain = DOMAIN_VIDEO;
1938 break;
1939 } else if (mediaTypes[i].startsWith("audio/")) {
1940 mDomain = DOMAIN_AUDIO;
1941 break;
1942 } else if (mediaTypes[i].startsWith("image/")) {
1943 mDomain = DOMAIN_IMAGE;
1944 break;
1945 }
1946 }
1947 owner = mCodecInfo->getOwnerName();
1948 }
1949
1950 mCodec = mGetCodecBase(name, owner);
1951 if (mCodec == NULL) {
1952 mErrorLog.log(LOG_TAG, base::StringPrintf(
1953 "Getting codec base with name '%s' (from '%s' HAL) failed", name.c_str(), owner));
1954 return NAME_NOT_FOUND;
1955 }
1956
1957 if (mDomain == DOMAIN_VIDEO) {
1958 // video codec needs dedicated looper
1959 if (mCodecLooper == NULL) {
1960 status_t err = OK;
1961 mCodecLooper = new ALooper;
1962 mCodecLooper->setName("CodecLooper");
1963 err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
1964 if (OK != err) {
1965 mErrorLog.log(LOG_TAG, "Fatal error: codec looper failed to start");
1966 return err;
1967 }
1968 }
1969
1970 mCodecLooper->registerHandler(mCodec);
1971 } else {
1972 mLooper->registerHandler(mCodec);
1973 }
1974
1975 mLooper->registerHandler(this);
1976
1977 mCodec->setCallback(
1978 std::unique_ptr<CodecBase::CodecCallback>(
1979 new CodecCallback(new AMessage(kWhatCodecNotify, this))));
1980 mBufferChannel = mCodec->getBufferChannel();
1981 mBufferChannel->setCallback(
1982 std::unique_ptr<CodecBase::BufferCallback>(
1983 new BufferCallback(new AMessage(kWhatCodecNotify, this))));
1984 sp<AMessage> msg = new AMessage(kWhatInit, this);
1985 if (mCodecInfo) {
1986 msg->setObject("codecInfo", mCodecInfo);
1987 // name may be different from mCodecInfo->getCodecName() if we stripped
1988 // ".secure"
1989 }
1990 msg->setString("name", name);
1991
1992 // initial naming setup covers the period before the first call to ::configure().
1993 // after that, we manage this through ::configure() and the setup message.
1994 if (mMetricsHandle != 0) {
1995 mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
1996 mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
1997 }
1998
1999 if (mDomain == DOMAIN_VIDEO) {
2000 mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
2001 }
2002
2003 std::vector<MediaResourceParcel> resources;
2004 resources.push_back(MediaResource::CodecResource(secureCodec, toMediaResourceSubType(mDomain)));
2005
2006 // If the ComponentName is not set yet, use the name passed by the user.
2007 if (mComponentName.empty()) {
2008 mResourceManagerProxy->setCodecName(name.c_str());
2009 }
2010 for (int i = 0; i <= kMaxRetry; ++i) {
2011 if (i > 0) {
2012 // Don't try to reclaim resource for the first time.
2013 if (!mResourceManagerProxy->reclaimResource(resources)) {
2014 break;
2015 }
2016 }
2017
2018 sp<AMessage> response;
2019 err = PostAndAwaitResponse(msg, &response);
2020 if (!isResourceError(err)) {
2021 break;
2022 }
2023 }
2024
2025 if (OK == err) {
2026 // Notify the ResourceManager that, this codec has been created
2027 // (initialized) successfully.
2028 mResourceManagerProxy->notifyClientCreated();
2029 }
2030 return err;
2031 }
2032
setCallback(const sp<AMessage> & callback)2033 status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
2034 sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
2035 msg->setMessage("callback", callback);
2036
2037 sp<AMessage> response;
2038 return PostAndAwaitResponse(msg, &response);
2039 }
2040
setOnFrameRenderedNotification(const sp<AMessage> & notify)2041 status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> ¬ify) {
2042 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2043 msg->setMessage("on-frame-rendered", notify);
2044 return msg->post();
2045 }
2046
setOnFirstTunnelFrameReadyNotification(const sp<AMessage> & notify)2047 status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> ¬ify) {
2048 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
2049 msg->setMessage("first-tunnel-frame-ready", notify);
2050 return msg->post();
2051 }
2052
2053 /*
2054 * MediaFormat Shaping forward declarations
2055 * including the property name we use for control.
2056 */
2057 static int enableMediaFormatShapingDefault = 1;
2058 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
2059 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2060 bool reverse);
2061
configure(const sp<AMessage> & format,const sp<Surface> & nativeWindow,const sp<ICrypto> & crypto,uint32_t flags)2062 status_t MediaCodec::configure(
2063 const sp<AMessage> &format,
2064 const sp<Surface> &nativeWindow,
2065 const sp<ICrypto> &crypto,
2066 uint32_t flags) {
2067 return configure(format, nativeWindow, crypto, NULL, flags);
2068 }
2069
configure(const sp<AMessage> & format,const sp<Surface> & surface,const sp<ICrypto> & crypto,const sp<IDescrambler> & descrambler,uint32_t flags)2070 status_t MediaCodec::configure(
2071 const sp<AMessage> &format,
2072 const sp<Surface> &surface,
2073 const sp<ICrypto> &crypto,
2074 const sp<IDescrambler> &descrambler,
2075 uint32_t flags) {
2076
2077 sp<AMessage> msg = new AMessage(kWhatConfigure, this);
2078 mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
2079
2080 // TODO: validity check log-session-id: it should be a 32-hex-digit.
2081 format->findString("log-session-id", &mLogSessionId);
2082
2083 if (nextMetricsHandle != 0) {
2084 mediametrics_setInt64(nextMetricsHandle, kCodecId, mCodecId);
2085 int32_t profile = 0;
2086 if (format->findInt32("profile", &profile)) {
2087 mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
2088 }
2089 int32_t level = 0;
2090 if (format->findInt32("level", &level)) {
2091 mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
2092 }
2093 mediametrics_setInt32(nextMetricsHandle, kCodecEncoder,
2094 (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
2095
2096 mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
2097
2098 // moved here from ::init()
2099 mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
2100 mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
2101 }
2102
2103 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
2104 format->findInt32("width", &mWidth);
2105 format->findInt32("height", &mHeight);
2106 if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
2107 mRotationDegrees = 0;
2108 }
2109 if (nextMetricsHandle != 0) {
2110 mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
2111 mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
2112 mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
2113 int32_t maxWidth = 0;
2114 if (format->findInt32("max-width", &maxWidth)) {
2115 mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
2116 }
2117 int32_t maxHeight = 0;
2118 if (format->findInt32("max-height", &maxHeight)) {
2119 mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
2120 }
2121 int32_t colorFormat = -1;
2122 if (format->findInt32("color-format", &colorFormat)) {
2123 mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
2124 }
2125 int32_t appMaxInputSize = -1;
2126 if (format->findInt32(KEY_MAX_INPUT_SIZE, &appMaxInputSize)) {
2127 mApiUsageMetrics.inputBufferSize.appMax = appMaxInputSize;
2128 }
2129 if (mDomain == DOMAIN_VIDEO) {
2130 float frameRate = -1.0;
2131 if (format->findFloat("frame-rate", &frameRate)) {
2132 mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
2133 }
2134 float captureRate = -1.0;
2135 if (format->findFloat("capture-rate", &captureRate)) {
2136 mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
2137 }
2138 float operatingRate = -1.0;
2139 if (format->findFloat("operating-rate", &operatingRate)) {
2140 mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
2141 }
2142 int32_t priority = -1;
2143 if (format->findInt32("priority", &priority)) {
2144 mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
2145 }
2146 }
2147 }
2148
2149 // Prevent possible integer overflow in downstream code.
2150 if (mWidth < 0 || mHeight < 0 ||
2151 (uint64_t)mWidth * mHeight > (uint64_t)INT32_MAX / 4) {
2152 mErrorLog.log(LOG_TAG, base::StringPrintf(
2153 "Invalid size(s), width=%d, height=%d", mWidth, mHeight));
2154 mediametrics_delete(nextMetricsHandle);
2155 return BAD_VALUE;
2156 }
2157
2158 } else {
2159 if (nextMetricsHandle != 0) {
2160 int32_t channelCount;
2161 if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
2162 mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
2163 }
2164 int32_t sampleRate;
2165 if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
2166 mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
2167 }
2168 }
2169 }
2170
2171 if (flags & CONFIGURE_FLAG_ENCODE) {
2172 int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
2173 enableMediaFormatShapingDefault);
2174 if (!enableShaping) {
2175 ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
2176 if (nextMetricsHandle != 0) {
2177 mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
2178 }
2179 } else {
2180 (void) shapeMediaFormat(format, flags, nextMetricsHandle);
2181 // XXX: do we want to do this regardless of shaping enablement?
2182 mapFormat(mComponentName, format, nullptr, false);
2183 }
2184 }
2185
2186 // push min/max QP to MediaMetrics after shaping
2187 if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
2188 int32_t qpIMin = -1;
2189 if (format->findInt32("video-qp-i-min", &qpIMin)) {
2190 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
2191 }
2192 int32_t qpIMax = -1;
2193 if (format->findInt32("video-qp-i-max", &qpIMax)) {
2194 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
2195 }
2196 int32_t qpPMin = -1;
2197 if (format->findInt32("video-qp-p-min", &qpPMin)) {
2198 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
2199 }
2200 int32_t qpPMax = -1;
2201 if (format->findInt32("video-qp-p-max", &qpPMax)) {
2202 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
2203 }
2204 int32_t qpBMin = -1;
2205 if (format->findInt32("video-qp-b-min", &qpBMin)) {
2206 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
2207 }
2208 int32_t qpBMax = -1;
2209 if (format->findInt32("video-qp-b-max", &qpBMax)) {
2210 mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
2211 }
2212 }
2213
2214 updateLowLatency(format);
2215
2216 msg->setMessage("format", format);
2217 msg->setInt32("flags", flags);
2218 msg->setObject("surface", surface);
2219
2220 if (crypto != NULL || descrambler != NULL) {
2221 if (crypto != NULL) {
2222 msg->setPointer("crypto", crypto.get());
2223 } else {
2224 msg->setPointer("descrambler", descrambler.get());
2225 }
2226 if (nextMetricsHandle != 0) {
2227 mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
2228 }
2229 } else if (mFlags & kFlagIsSecure) {
2230 ALOGW("Crypto or descrambler should be given for secure codec");
2231 }
2232
2233 if (mConfigureMsg != nullptr) {
2234 // if re-configuring, we have one of these from before.
2235 // Recover the space before we discard the old mConfigureMsg
2236 mediametrics_handle_t metricsHandle;
2237 if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
2238 mediametrics_delete(metricsHandle);
2239 }
2240 }
2241 msg->setInt64("metrics", nextMetricsHandle);
2242
2243 // save msg for reset
2244 mConfigureMsg = msg;
2245
2246 sp<AMessage> callback = mCallback;
2247
2248 status_t err;
2249 std::vector<MediaResourceParcel> resources;
2250 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
2251 toMediaResourceSubType(mDomain)));
2252 // Don't know the buffer size at this point, but it's fine to use 1 because
2253 // the reclaimResource call doesn't consider the requester's buffer size for now.
2254 resources.push_back(MediaResource::GraphicMemoryResource(1));
2255 for (int i = 0; i <= kMaxRetry; ++i) {
2256 sp<AMessage> response;
2257 err = PostAndAwaitResponse(msg, &response);
2258 if (err != OK && err != INVALID_OPERATION) {
2259 if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
2260 break;
2261 }
2262 // MediaCodec now set state to UNINITIALIZED upon any fatal error.
2263 // To maintain backward-compatibility, do a reset() to put codec
2264 // back into INITIALIZED state.
2265 // But don't reset if the err is INVALID_OPERATION, which means
2266 // the configure failure is due to wrong state.
2267
2268 ALOGE("configure failed with err 0x%08x, resetting...", err);
2269 status_t err2 = reset();
2270 if (err2 != OK) {
2271 ALOGE("retrying configure: failed to reset codec (%08x)", err2);
2272 break;
2273 }
2274 if (callback != nullptr) {
2275 err2 = setCallback(callback);
2276 if (err2 != OK) {
2277 ALOGE("retrying configure: failed to set callback (%08x)", err2);
2278 break;
2279 }
2280 }
2281 }
2282 if (!isResourceError(err)) {
2283 break;
2284 }
2285 }
2286
2287 return err;
2288 }
2289
2290 // Media Format Shaping support
2291 //
2292
2293 static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
2294 static bool sIsHandheld = true;
2295
connectFormatShaper()2296 static bool connectFormatShaper() {
2297 static std::once_flag sCheckOnce;
2298
2299 ALOGV("connectFormatShaper...");
2300
2301 std::call_once(sCheckOnce, [&](){
2302
2303 void *libHandle = NULL;
2304 nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
2305
2306 // prefer any copy in the mainline module
2307 //
2308 android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
2309 AString libraryName = "libmediaformatshaper.so";
2310
2311 if (mediaNs != NULL) {
2312 static const android_dlextinfo dlextinfo = {
2313 .flags = ANDROID_DLEXT_USE_NAMESPACE,
2314 .library_namespace = mediaNs,
2315 };
2316
2317 AString libraryMainline = "/apex/com.android.media/";
2318 #if __LP64__
2319 libraryMainline.append("lib64/");
2320 #else
2321 libraryMainline.append("lib/");
2322 #endif
2323 libraryMainline.append(libraryName);
2324
2325 libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
2326 &dlextinfo);
2327
2328 if (libHandle != NULL) {
2329 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2330 dlsym(libHandle, "shaper_ops");
2331 } else {
2332 ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
2333 libraryMainline.c_str());
2334 }
2335 } else {
2336 ALOGV("connectFormatShaper: couldn't find media namespace.");
2337 }
2338
2339 // fall back to the system partition, if present.
2340 //
2341 if (sShaperOps == NULL) {
2342
2343 libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
2344
2345 if (libHandle != NULL) {
2346 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
2347 dlsym(libHandle, "shaper_ops");
2348 } else {
2349 ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
2350 }
2351 }
2352
2353 if (sShaperOps != nullptr
2354 && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
2355 ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
2356 sShaperOps->version);
2357 sShaperOps = nullptr;
2358 }
2359
2360 if (sShaperOps != nullptr) {
2361 ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
2362 }
2363
2364 nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2365 ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
2366 (loading_finished - loading_started)/1000);
2367
2368
2369 // we also want to know whether this is a handheld device
2370 // start with assumption that the device is handheld.
2371 sIsHandheld = true;
2372 sp<IServiceManager> serviceMgr = defaultServiceManager();
2373 sp<content::pm::IPackageManagerNative> packageMgr;
2374 if (serviceMgr.get() != nullptr) {
2375 sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
2376 packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
2377 }
2378 // if we didn't get serviceMgr, we'll leave packageMgr as default null
2379 if (packageMgr != nullptr) {
2380
2381 // MUST have these
2382 static const String16 featuresNeeded[] = {
2383 String16("android.hardware.touchscreen")
2384 };
2385 // these must be present to be a handheld
2386 for (::android::String16 required : featuresNeeded) {
2387 bool hasFeature = false;
2388 binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
2389 if (!status.isOk()) {
2390 ALOGE("%s: hasSystemFeature failed: %s",
2391 __func__, status.exceptionMessage().c_str());
2392 continue;
2393 }
2394 ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
2395 if (!hasFeature) {
2396 ALOGV("... which means we are not handheld");
2397 sIsHandheld = false;
2398 break;
2399 }
2400 }
2401
2402 // MUST NOT have these
2403 static const String16 featuresDisallowed[] = {
2404 String16("android.hardware.type.automotive"),
2405 String16("android.hardware.type.television"),
2406 String16("android.hardware.type.watch")
2407 };
2408 // any of these present -- we aren't a handheld
2409 for (::android::String16 forbidden : featuresDisallowed) {
2410 bool hasFeature = false;
2411 binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
2412 if (!status.isOk()) {
2413 ALOGE("%s: hasSystemFeature failed: %s",
2414 __func__, status.exceptionMessage().c_str());
2415 continue;
2416 }
2417 ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
2418 if (hasFeature) {
2419 ALOGV("... which means we are not handheld");
2420 sIsHandheld = false;
2421 break;
2422 }
2423 }
2424 }
2425
2426 });
2427
2428 return true;
2429 }
2430
2431
2432 #if 0
2433 // a construct to force the above dlopen() to run very early.
2434 // goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
2435 // failure of this means that cold start of those apps is slower by the time to dlopen()
2436 // TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
2437 //
2438 static bool forceEarlyLoadingShaper = connectFormatShaper();
2439 #endif
2440
2441 // parse the codec's properties: mapping, whether it meets min quality, etc
2442 // and pass them into the video quality code
2443 //
loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,sp<MediaCodecInfo> codecInfo,AString mediaType)2444 static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
2445 sp<MediaCodecInfo> codecInfo, AString mediaType) {
2446
2447 sp<MediaCodecInfo::Capabilities> capabilities =
2448 codecInfo->getCapabilitiesFor(mediaType.c_str());
2449 if (capabilities == nullptr) {
2450 ALOGI("no capabilities as part of the codec?");
2451 } else {
2452 const sp<AMessage> &details = capabilities->getDetails();
2453 AString mapTarget;
2454 int count = details->countEntries();
2455 for(int ix = 0; ix < count; ix++) {
2456 AMessage::Type entryType;
2457 const char *mapSrc = details->getEntryNameAt(ix, &entryType);
2458 // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
2459 //
2460 static const char *featurePrefix = "feature-";
2461 static const int featurePrefixLen = strlen(featurePrefix);
2462 static const char *tuningPrefix = "tuning-";
2463 static const int tuningPrefixLen = strlen(tuningPrefix);
2464 static const char *mappingPrefix = "mapping-";
2465 static const int mappingPrefixLen = strlen(mappingPrefix);
2466
2467 if (mapSrc == NULL) {
2468 continue;
2469 } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
2470 int32_t intValue;
2471 if (details->findInt32(mapSrc, &intValue)) {
2472 ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
2473 (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
2474 intValue);
2475 }
2476 continue;
2477 } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
2478 AString value;
2479 if (details->findString(mapSrc, &value)) {
2480 ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
2481 (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
2482 value.c_str());
2483 }
2484 continue;
2485 } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
2486 AString target;
2487 if (details->findString(mapSrc, &target)) {
2488 ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
2489 target.c_str());
2490 // key is really "kind-key"
2491 // separate that, so setMap() sees the triple kind, key, value
2492 const char *kind = &mapSrc[mappingPrefixLen];
2493 const char *sep = strchr(kind, '-');
2494 const char *key = sep+1;
2495 if (sep != NULL) {
2496 std::string xkind = std::string(kind, sep-kind);
2497 (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
2498 key, target.c_str());
2499 }
2500 }
2501 }
2502 }
2503 }
2504
2505 // we also carry in the codec description whether we are on a handheld device.
2506 // this info is eventually used by both the Codec and the C2 machinery to inform
2507 // the underlying codec whether to do any shaping.
2508 //
2509 if (sIsHandheld) {
2510 // set if we are indeed a handheld device (or in future 'any eligible device'
2511 // missing on devices that aren't eligible for minimum quality enforcement.
2512 (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
2513 // strictly speaking, it's a tuning, but those are strings and feature stores int
2514 (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
2515 }
2516 }
2517
setupFormatShaper(AString mediaType)2518 status_t MediaCodec::setupFormatShaper(AString mediaType) {
2519 ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
2520 mComponentName.c_str(), mediaType.c_str());
2521
2522 nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
2523
2524 // someone might have beaten us to it.
2525 mediaformatshaper::shaperHandle_t shaperHandle;
2526 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2527 if (shaperHandle != nullptr) {
2528 ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
2529 return OK;
2530 }
2531
2532 // we get to build & register one
2533 shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
2534 if (shaperHandle == nullptr) {
2535 ALOGW("unable to create a shaper for cocodec %s mediaType %s",
2536 mComponentName.c_str(), mediaType.c_str());
2537 return OK;
2538 }
2539
2540 (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
2541
2542 shaperHandle = sShaperOps->registerShaper(shaperHandle,
2543 mComponentName.c_str(), mediaType.c_str());
2544
2545 nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
2546 ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
2547 mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
2548
2549 return OK;
2550 }
2551
2552
2553 // Format Shaping
2554 // Mapping and Manipulation of encoding parameters
2555 //
2556 // All of these decisions are pushed into the shaper instead of here within MediaCodec.
2557 // this includes decisions based on whether the codec implements minimum quality bars
2558 // itself or needs to be shaped outside of the codec.
2559 // This keeps all those decisions in one place.
2560 // It also means that we push some extra decision information (is this a handheld device
2561 // or one that is otherwise eligible for minimum quality manipulation, which generational
2562 // quality target is in force, etc). This allows those values to be cached in the
2563 // per-codec structures that are done 1 time within a process instead of for each
2564 // codec instantiation.
2565 //
2566
shapeMediaFormat(const sp<AMessage> & format,uint32_t flags,mediametrics_handle_t metricsHandle)2567 status_t MediaCodec::shapeMediaFormat(
2568 const sp<AMessage> &format,
2569 uint32_t flags,
2570 mediametrics_handle_t metricsHandle) {
2571 ALOGV("shapeMediaFormat entry");
2572
2573 if (!(flags & CONFIGURE_FLAG_ENCODE)) {
2574 ALOGW("shapeMediaFormat: not encoder");
2575 return OK;
2576 }
2577 if (mCodecInfo == NULL) {
2578 ALOGW("shapeMediaFormat: no codecinfo");
2579 return OK;
2580 }
2581
2582 AString mediaType;
2583 if (!format->findString("mime", &mediaType)) {
2584 ALOGW("shapeMediaFormat: no mediaType information");
2585 return OK;
2586 }
2587
2588 // make sure we have the function entry points for the shaper library
2589 //
2590
2591 connectFormatShaper();
2592 if (sShaperOps == nullptr) {
2593 ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
2594 return OK;
2595 }
2596
2597 // find the shaper information for this codec+mediaType pair
2598 //
2599 mediaformatshaper::shaperHandle_t shaperHandle;
2600 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2601 if (shaperHandle == nullptr) {
2602 setupFormatShaper(mediaType);
2603 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2604 }
2605 if (shaperHandle == nullptr) {
2606 ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
2607 mComponentName.c_str(), mediaType.c_str());
2608 return OK;
2609 }
2610
2611 // run the shaper
2612 //
2613
2614 ALOGV("Shaping input: %s", format->debugString(0).c_str());
2615
2616 sp<AMessage> updatedFormat = format->dup();
2617 AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
2618
2619 int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
2620 if (result == 0) {
2621 AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
2622
2623 sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
2624 size_t changeCount = deltas->countEntries();
2625 ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
2626 if (metricsHandle != 0) {
2627 mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
2628 }
2629 if (changeCount > 0) {
2630 if (metricsHandle != 0) {
2631 // save some old properties before we fold in the new ones
2632 int32_t bitrate;
2633 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
2634 mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
2635 }
2636 int32_t qpIMin = -1;
2637 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
2638 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
2639 }
2640 int32_t qpIMax = -1;
2641 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
2642 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
2643 }
2644 int32_t qpPMin = -1;
2645 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
2646 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
2647 }
2648 int32_t qpPMax = -1;
2649 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
2650 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
2651 }
2652 int32_t qpBMin = -1;
2653 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
2654 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
2655 }
2656 int32_t qpBMax = -1;
2657 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
2658 mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
2659 }
2660 }
2661 // NB: for any field in both format and deltas, the deltas copy wins
2662 format->extend(deltas);
2663 }
2664 }
2665
2666 AMediaFormat_delete(updatedNdkFormat);
2667 return OK;
2668 }
2669
mapFormat(AString componentName,const sp<AMessage> & format,const char * kind,bool reverse)2670 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2671 bool reverse) {
2672 AString mediaType;
2673 if (!format->findString("mime", &mediaType)) {
2674 ALOGV("mapFormat: no mediaType information");
2675 return;
2676 }
2677 ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
2678 mediaType.c_str(), kind ? kind : "<all>", reverse);
2679
2680 // make sure we have the function entry points for the shaper library
2681 //
2682
2683 #if 0
2684 // let's play the faster "only do mapping if we've already loaded the library
2685 connectFormatShaper();
2686 #endif
2687 if (sShaperOps == nullptr) {
2688 ALOGV("mapFormat: no MediaFormatShaper hooks available");
2689 return;
2690 }
2691
2692 // find the shaper information for this codec+mediaType pair
2693 //
2694 mediaformatshaper::shaperHandle_t shaperHandle;
2695 shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
2696 if (shaperHandle == nullptr) {
2697 ALOGV("mapFormat: no shaper handle");
2698 return;
2699 }
2700
2701 const char **mappings;
2702 if (reverse)
2703 mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
2704 else
2705 mappings = sShaperOps->getMappings(shaperHandle, kind);
2706
2707 if (mappings == nullptr) {
2708 ALOGV("no mappings returned");
2709 return;
2710 }
2711
2712 ALOGV("Pre-mapping: %s", format->debugString(2).c_str());
2713 // do the mapping
2714 //
2715 int entries = format->countEntries();
2716 for (int i = 0; ; i += 2) {
2717 if (mappings[i] == nullptr) {
2718 break;
2719 }
2720
2721 size_t ix = format->findEntryByName(mappings[i]);
2722 if (ix < entries) {
2723 ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
2724 status_t status = format->setEntryNameAt(ix, mappings[i+1]);
2725 if (status != OK) {
2726 ALOGW("Unable to map from '%s' to '%s': status %d",
2727 mappings[i], mappings[i+1], status);
2728 }
2729 }
2730 }
2731 ALOGV("Post-mapping: %s", format->debugString(2).c_str());
2732
2733
2734 // reclaim the mapping memory
2735 for (int i = 0; ; i += 2) {
2736 if (mappings[i] == nullptr) {
2737 break;
2738 }
2739 free((void*)mappings[i]);
2740 free((void*)mappings[i + 1]);
2741 }
2742 free(mappings);
2743 mappings = nullptr;
2744 }
2745
2746 //
2747 // end of Format Shaping hooks within MediaCodec
2748 //
2749
releaseCrypto()2750 status_t MediaCodec::releaseCrypto()
2751 {
2752 ALOGV("releaseCrypto");
2753
2754 sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
2755
2756 sp<AMessage> response;
2757 status_t status = msg->postAndAwaitResponse(&response);
2758
2759 if (status == OK && response != NULL) {
2760 CHECK(response->findInt32("status", &status));
2761 ALOGV("releaseCrypto ret: %d ", status);
2762 }
2763 else {
2764 ALOGE("releaseCrypto err: %d", status);
2765 }
2766
2767 return status;
2768 }
2769
onReleaseCrypto(const sp<AMessage> & msg)2770 void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
2771 {
2772 status_t status = INVALID_OPERATION;
2773 if (mCrypto != NULL) {
2774 ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
2775 mBufferChannel->setCrypto(NULL);
2776 // TODO change to ALOGV
2777 ALOGD("onReleaseCrypto: [before clear] mCrypto: %p (%d)",
2778 mCrypto.get(), mCrypto->getStrongCount());
2779 mCrypto.clear();
2780
2781 status = OK;
2782 }
2783 else {
2784 ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
2785 }
2786
2787 sp<AMessage> response = new AMessage;
2788 response->setInt32("status", status);
2789
2790 sp<AReplyToken> replyID;
2791 CHECK(msg->senderAwaitsResponse(&replyID));
2792 response->postReply(replyID);
2793 }
2794
setInputSurface(const sp<PersistentSurface> & surface)2795 status_t MediaCodec::setInputSurface(
2796 const sp<PersistentSurface> &surface) {
2797 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
2798 msg->setObject("input-surface", surface.get());
2799
2800 sp<AMessage> response;
2801 return PostAndAwaitResponse(msg, &response);
2802 }
2803
setSurface(const sp<Surface> & surface)2804 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
2805 sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
2806 msg->setObject("surface", surface);
2807
2808 sp<AMessage> response;
2809 return PostAndAwaitResponse(msg, &response);
2810 }
2811
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)2812 status_t MediaCodec::createInputSurface(
2813 sp<IGraphicBufferProducer>* bufferProducer) {
2814 sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
2815
2816 sp<AMessage> response;
2817 status_t err = PostAndAwaitResponse(msg, &response);
2818 if (err == NO_ERROR) {
2819 // unwrap the sp<IGraphicBufferProducer>
2820 sp<RefBase> obj;
2821 bool found = response->findObject("input-surface", &obj);
2822 CHECK(found);
2823 sp<BufferProducerWrapper> wrapper(
2824 static_cast<BufferProducerWrapper*>(obj.get()));
2825 *bufferProducer = wrapper->getBufferProducer();
2826 } else {
2827 ALOGW("createInputSurface failed, err=%d", err);
2828 }
2829 return err;
2830 }
2831
getGraphicBufferSize()2832 uint64_t MediaCodec::getGraphicBufferSize() {
2833 if (mDomain != DOMAIN_VIDEO && mDomain != DOMAIN_IMAGE) {
2834 return 0;
2835 }
2836
2837 uint64_t size = 0;
2838 size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
2839 for (size_t i = 0; i < portNum; ++i) {
2840 // TODO: this is just an estimation, we should get the real buffer size from ACodec.
2841 size += mPortBuffers[i].size() * mWidth * mHeight * 3 / 2;
2842 }
2843 return size;
2844 }
2845
start()2846 status_t MediaCodec::start() {
2847 sp<AMessage> msg = new AMessage(kWhatStart, this);
2848
2849 sp<AMessage> callback;
2850
2851 status_t err;
2852 std::vector<MediaResourceParcel> resources;
2853 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure,
2854 toMediaResourceSubType(mDomain)));
2855 // Don't know the buffer size at this point, but it's fine to use 1 because
2856 // the reclaimResource call doesn't consider the requester's buffer size for now.
2857 resources.push_back(MediaResource::GraphicMemoryResource(1));
2858 for (int i = 0; i <= kMaxRetry; ++i) {
2859 if (i > 0) {
2860 // Don't try to reclaim resource for the first time.
2861 if (!mResourceManagerProxy->reclaimResource(resources)) {
2862 break;
2863 }
2864 // Recover codec from previous error before retry start.
2865 err = reset();
2866 if (err != OK) {
2867 ALOGE("retrying start: failed to reset codec");
2868 break;
2869 }
2870 sp<AMessage> response;
2871 err = PostAndAwaitResponse(mConfigureMsg, &response);
2872 if (err != OK) {
2873 ALOGE("retrying start: failed to configure codec");
2874 break;
2875 }
2876 if (callback != nullptr) {
2877 err = setCallback(callback);
2878 if (err != OK) {
2879 ALOGE("retrying start: failed to set callback");
2880 break;
2881 }
2882 ALOGD("succeed to set callback for reclaim");
2883 }
2884 }
2885
2886 // Keep callback message after the first iteration if necessary.
2887 if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
2888 callback = mCallback;
2889 ALOGD("keep callback message for reclaim");
2890 }
2891
2892 sp<AMessage> response;
2893 err = PostAndAwaitResponse(msg, &response);
2894 if (!isResourceError(err)) {
2895 break;
2896 }
2897 }
2898 return err;
2899 }
2900
stop()2901 status_t MediaCodec::stop() {
2902 sp<AMessage> msg = new AMessage(kWhatStop, this);
2903
2904 sp<AMessage> response;
2905 return PostAndAwaitResponse(msg, &response);
2906 }
2907
hasPendingBuffer(int portIndex)2908 bool MediaCodec::hasPendingBuffer(int portIndex) {
2909 return std::any_of(
2910 mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
2911 [](const BufferInfo &info) { return info.mOwnedByClient; });
2912 }
2913
hasPendingBuffer()2914 bool MediaCodec::hasPendingBuffer() {
2915 return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
2916 }
2917
reclaim(bool force)2918 status_t MediaCodec::reclaim(bool force) {
2919 ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
2920 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2921 msg->setInt32("reclaimed", 1);
2922 msg->setInt32("force", force ? 1 : 0);
2923
2924 sp<AMessage> response;
2925 status_t ret = PostAndAwaitResponse(msg, &response);
2926 if (ret == -ENOENT) {
2927 ALOGD("MediaCodec looper is gone, skip reclaim");
2928 ret = OK;
2929 }
2930 return ret;
2931 }
2932
release()2933 status_t MediaCodec::release() {
2934 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2935 sp<AMessage> response;
2936 return PostAndAwaitResponse(msg, &response);
2937 }
2938
releaseAsync(const sp<AMessage> & notify)2939 status_t MediaCodec::releaseAsync(const sp<AMessage> ¬ify) {
2940 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2941 msg->setMessage("async", notify);
2942 sp<AMessage> response;
2943 return PostAndAwaitResponse(msg, &response);
2944 }
2945
reset()2946 status_t MediaCodec::reset() {
2947 /* When external-facing MediaCodec object is created,
2948 it is already initialized. Thus, reset is essentially
2949 release() followed by init(), plus clearing the state */
2950
2951 status_t err = release();
2952
2953 // unregister handlers
2954 if (mCodec != NULL) {
2955 if (mCodecLooper != NULL) {
2956 mCodecLooper->unregisterHandler(mCodec->id());
2957 } else {
2958 mLooper->unregisterHandler(mCodec->id());
2959 }
2960 mCodec = NULL;
2961 }
2962 mLooper->unregisterHandler(id());
2963
2964 mFlags = 0; // clear all flags
2965 mStickyError = OK;
2966
2967 // reset state not reset by setState(UNINITIALIZED)
2968 mDequeueInputReplyID = 0;
2969 mDequeueOutputReplyID = 0;
2970 mDequeueInputTimeoutGeneration = 0;
2971 mDequeueOutputTimeoutGeneration = 0;
2972 mHaveInputSurface = false;
2973
2974 if (err == OK) {
2975 err = init(mInitName);
2976 }
2977 return err;
2978 }
2979
queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)2980 status_t MediaCodec::queueInputBuffer(
2981 size_t index,
2982 size_t offset,
2983 size_t size,
2984 int64_t presentationTimeUs,
2985 uint32_t flags,
2986 AString *errorDetailMsg) {
2987 if (errorDetailMsg != NULL) {
2988 errorDetailMsg->clear();
2989 }
2990
2991 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2992 msg->setSize("index", index);
2993 msg->setSize("offset", offset);
2994 msg->setSize("size", size);
2995 msg->setInt64("timeUs", presentationTimeUs);
2996 msg->setInt32("flags", flags);
2997 msg->setPointer("errorDetailMsg", errorDetailMsg);
2998
2999 sp<AMessage> response;
3000 return PostAndAwaitResponse(msg, &response);
3001 }
3002
queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)3003 status_t MediaCodec::queueSecureInputBuffer(
3004 size_t index,
3005 size_t offset,
3006 const CryptoPlugin::SubSample *subSamples,
3007 size_t numSubSamples,
3008 const uint8_t key[16],
3009 const uint8_t iv[16],
3010 CryptoPlugin::Mode mode,
3011 const CryptoPlugin::Pattern &pattern,
3012 int64_t presentationTimeUs,
3013 uint32_t flags,
3014 AString *errorDetailMsg) {
3015 if (errorDetailMsg != NULL) {
3016 errorDetailMsg->clear();
3017 }
3018
3019 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3020 msg->setSize("index", index);
3021 msg->setSize("offset", offset);
3022 msg->setPointer("subSamples", (void *)subSamples);
3023 msg->setSize("numSubSamples", numSubSamples);
3024 msg->setPointer("key", (void *)key);
3025 msg->setPointer("iv", (void *)iv);
3026 msg->setInt32("mode", mode);
3027 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
3028 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
3029 msg->setInt64("timeUs", presentationTimeUs);
3030 msg->setInt32("flags", flags);
3031 msg->setPointer("errorDetailMsg", errorDetailMsg);
3032
3033 sp<AMessage> response;
3034 status_t err = PostAndAwaitResponse(msg, &response);
3035
3036 return err;
3037 }
3038
queueBuffer(size_t index,const std::shared_ptr<C2Buffer> & buffer,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)3039 status_t MediaCodec::queueBuffer(
3040 size_t index,
3041 const std::shared_ptr<C2Buffer> &buffer,
3042 int64_t presentationTimeUs,
3043 uint32_t flags,
3044 const sp<AMessage> &tunings,
3045 AString *errorDetailMsg) {
3046 if (errorDetailMsg != NULL) {
3047 errorDetailMsg->clear();
3048 }
3049
3050 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3051 msg->setSize("index", index);
3052 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
3053 new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
3054 msg->setObject("c2buffer", obj);
3055 msg->setInt64("timeUs", presentationTimeUs);
3056 msg->setInt32("flags", flags);
3057 if (tunings && tunings->countEntries() > 0) {
3058 msg->setMessage("tunings", tunings);
3059 }
3060 msg->setPointer("errorDetailMsg", errorDetailMsg);
3061
3062 sp<AMessage> response;
3063 status_t err = PostAndAwaitResponse(msg, &response);
3064
3065 return err;
3066 }
3067
queueEncryptedBuffer(size_t index,const sp<hardware::HidlMemory> & buffer,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)3068 status_t MediaCodec::queueEncryptedBuffer(
3069 size_t index,
3070 const sp<hardware::HidlMemory> &buffer,
3071 size_t offset,
3072 const CryptoPlugin::SubSample *subSamples,
3073 size_t numSubSamples,
3074 const uint8_t key[16],
3075 const uint8_t iv[16],
3076 CryptoPlugin::Mode mode,
3077 const CryptoPlugin::Pattern &pattern,
3078 int64_t presentationTimeUs,
3079 uint32_t flags,
3080 const sp<AMessage> &tunings,
3081 AString *errorDetailMsg) {
3082 if (errorDetailMsg != NULL) {
3083 errorDetailMsg->clear();
3084 }
3085
3086 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
3087 msg->setSize("index", index);
3088 sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
3089 new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
3090 msg->setObject("memory", memory);
3091 msg->setSize("offset", offset);
3092 msg->setPointer("subSamples", (void *)subSamples);
3093 msg->setSize("numSubSamples", numSubSamples);
3094 msg->setPointer("key", (void *)key);
3095 msg->setPointer("iv", (void *)iv);
3096 msg->setInt32("mode", mode);
3097 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
3098 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
3099 msg->setInt64("timeUs", presentationTimeUs);
3100 msg->setInt32("flags", flags);
3101 if (tunings && tunings->countEntries() > 0) {
3102 msg->setMessage("tunings", tunings);
3103 }
3104 msg->setPointer("errorDetailMsg", errorDetailMsg);
3105
3106 sp<AMessage> response;
3107 status_t err = PostAndAwaitResponse(msg, &response);
3108
3109 return err;
3110 }
3111
dequeueInputBuffer(size_t * index,int64_t timeoutUs)3112 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
3113 sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
3114 msg->setInt64("timeoutUs", timeoutUs);
3115
3116 sp<AMessage> response;
3117 status_t err;
3118 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3119 return err;
3120 }
3121
3122 CHECK(response->findSize("index", index));
3123
3124 return OK;
3125 }
3126
dequeueOutputBuffer(size_t * index,size_t * offset,size_t * size,int64_t * presentationTimeUs,uint32_t * flags,int64_t timeoutUs)3127 status_t MediaCodec::dequeueOutputBuffer(
3128 size_t *index,
3129 size_t *offset,
3130 size_t *size,
3131 int64_t *presentationTimeUs,
3132 uint32_t *flags,
3133 int64_t timeoutUs) {
3134 sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
3135 msg->setInt64("timeoutUs", timeoutUs);
3136
3137 sp<AMessage> response;
3138 status_t err;
3139 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3140 return err;
3141 }
3142
3143 CHECK(response->findSize("index", index));
3144 CHECK(response->findSize("offset", offset));
3145 CHECK(response->findSize("size", size));
3146 CHECK(response->findInt64("timeUs", presentationTimeUs));
3147 CHECK(response->findInt32("flags", (int32_t *)flags));
3148
3149 return OK;
3150 }
3151
renderOutputBufferAndRelease(size_t index)3152 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
3153 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3154 msg->setSize("index", index);
3155 msg->setInt32("render", true);
3156
3157 sp<AMessage> response;
3158 return PostAndAwaitResponse(msg, &response);
3159 }
3160
renderOutputBufferAndRelease(size_t index,int64_t timestampNs)3161 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
3162 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3163 msg->setSize("index", index);
3164 msg->setInt32("render", true);
3165 msg->setInt64("timestampNs", timestampNs);
3166
3167 sp<AMessage> response;
3168 return PostAndAwaitResponse(msg, &response);
3169 }
3170
releaseOutputBuffer(size_t index)3171 status_t MediaCodec::releaseOutputBuffer(size_t index) {
3172 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
3173 msg->setSize("index", index);
3174
3175 sp<AMessage> response;
3176 return PostAndAwaitResponse(msg, &response);
3177 }
3178
signalEndOfInputStream()3179 status_t MediaCodec::signalEndOfInputStream() {
3180 sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
3181
3182 sp<AMessage> response;
3183 return PostAndAwaitResponse(msg, &response);
3184 }
3185
getOutputFormat(sp<AMessage> * format) const3186 status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
3187 sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
3188
3189 sp<AMessage> response;
3190 status_t err;
3191 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3192 return err;
3193 }
3194
3195 CHECK(response->findMessage("format", format));
3196
3197 return OK;
3198 }
3199
getInputFormat(sp<AMessage> * format) const3200 status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
3201 sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
3202
3203 sp<AMessage> response;
3204 status_t err;
3205 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3206 return err;
3207 }
3208
3209 CHECK(response->findMessage("format", format));
3210
3211 return OK;
3212 }
3213
getName(AString * name) const3214 status_t MediaCodec::getName(AString *name) const {
3215 sp<AMessage> msg = new AMessage(kWhatGetName, this);
3216
3217 sp<AMessage> response;
3218 status_t err;
3219 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3220 return err;
3221 }
3222
3223 CHECK(response->findString("name", name));
3224
3225 return OK;
3226 }
3227
getCodecInfo(sp<MediaCodecInfo> * codecInfo) const3228 status_t MediaCodec::getCodecInfo(sp<MediaCodecInfo> *codecInfo) const {
3229 sp<AMessage> msg = new AMessage(kWhatGetCodecInfo, this);
3230
3231 sp<AMessage> response;
3232 status_t err;
3233 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3234 return err;
3235 }
3236
3237 sp<RefBase> obj;
3238 CHECK(response->findObject("codecInfo", &obj));
3239 *codecInfo = static_cast<MediaCodecInfo *>(obj.get());
3240
3241 return OK;
3242 }
3243
3244 // this is the user-callable entry point
getMetrics(mediametrics_handle_t & reply)3245 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
3246
3247 reply = 0;
3248
3249 sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
3250 sp<AMessage> response;
3251 status_t err;
3252 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
3253 return err;
3254 }
3255
3256 CHECK(response->findInt64("metrics", &reply));
3257
3258 return OK;
3259 }
3260
3261 // runs on the looper thread (for mutex purposes)
onGetMetrics(const sp<AMessage> & msg)3262 void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
3263
3264 mediametrics_handle_t results = 0;
3265
3266 sp<AReplyToken> replyID;
3267 CHECK(msg->senderAwaitsResponse(&replyID));
3268
3269 if (mMetricsHandle != 0) {
3270 updateMediametrics();
3271 results = mediametrics_dup(mMetricsHandle);
3272 updateEphemeralMediametrics(results);
3273 } else {
3274 results = mediametrics_dup(mMetricsHandle);
3275 }
3276
3277 sp<AMessage> response = new AMessage;
3278 response->setInt64("metrics", results);
3279 response->postReply(replyID);
3280 }
3281
getInputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const3282 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
3283 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
3284 msg->setInt32("portIndex", kPortIndexInput);
3285 msg->setPointer("buffers", buffers);
3286
3287 sp<AMessage> response;
3288 return PostAndAwaitResponse(msg, &response);
3289 }
3290
getOutputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const3291 status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
3292 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
3293 msg->setInt32("portIndex", kPortIndexOutput);
3294 msg->setPointer("buffers", buffers);
3295
3296 sp<AMessage> response;
3297 return PostAndAwaitResponse(msg, &response);
3298 }
3299
getOutputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)3300 status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
3301 sp<AMessage> format;
3302 return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
3303 }
3304
getOutputFormat(size_t index,sp<AMessage> * format)3305 status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
3306 sp<MediaCodecBuffer> buffer;
3307 return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
3308 }
3309
getInputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)3310 status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
3311 sp<AMessage> format;
3312 return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
3313 }
3314
isExecuting() const3315 bool MediaCodec::isExecuting() const {
3316 return mState == STARTED || mState == FLUSHED;
3317 }
3318
getBufferAndFormat(size_t portIndex,size_t index,sp<MediaCodecBuffer> * buffer,sp<AMessage> * format)3319 status_t MediaCodec::getBufferAndFormat(
3320 size_t portIndex, size_t index,
3321 sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
3322 // use mutex instead of a context switch
3323 if (mReleasedByResourceManager) {
3324 mErrorLog.log(LOG_TAG, "resource already released");
3325 return DEAD_OBJECT;
3326 }
3327
3328 if (buffer == NULL) {
3329 mErrorLog.log(LOG_TAG, "null buffer");
3330 return INVALID_OPERATION;
3331 }
3332
3333 if (format == NULL) {
3334 mErrorLog.log(LOG_TAG, "null format");
3335 return INVALID_OPERATION;
3336 }
3337
3338 buffer->clear();
3339 format->clear();
3340
3341 if (!isExecuting()) {
3342 mErrorLog.log(LOG_TAG, base::StringPrintf(
3343 "Invalid to call %s; only valid in Executing states",
3344 apiStateString().c_str()));
3345 return INVALID_OPERATION;
3346 }
3347
3348 // we do not want mPortBuffers to change during this section
3349 // we also don't want mOwnedByClient to change during this
3350 Mutex::Autolock al(mBufferLock);
3351
3352 std::vector<BufferInfo> &buffers = mPortBuffers[portIndex];
3353 if (index >= buffers.size()) {
3354 ALOGE("getBufferAndFormat - trying to get buffer with "
3355 "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
3356 mErrorLog.log(LOG_TAG, base::StringPrintf("Bad index (index=%zu)", index));
3357 return INVALID_OPERATION;
3358 }
3359
3360 const BufferInfo &info = buffers[index];
3361 if (!info.mOwnedByClient) {
3362 ALOGE("getBufferAndFormat - invalid operation "
3363 "(the index %zu is not owned by client)", index);
3364 mErrorLog.log(LOG_TAG, base::StringPrintf("index %zu is not owned by client", index));
3365 return INVALID_OPERATION;
3366 }
3367
3368 *buffer = info.mData;
3369 *format = info.mData->format();
3370
3371 return OK;
3372 }
3373
flush()3374 status_t MediaCodec::flush() {
3375 sp<AMessage> msg = new AMessage(kWhatFlush, this);
3376
3377 sp<AMessage> response;
3378 return PostAndAwaitResponse(msg, &response);
3379 }
3380
requestIDRFrame()3381 status_t MediaCodec::requestIDRFrame() {
3382 (new AMessage(kWhatRequestIDRFrame, this))->post();
3383
3384 return OK;
3385 }
3386
querySupportedVendorParameters(std::vector<std::string> * names)3387 status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
3388 return mCodec->querySupportedParameters(names);
3389 }
3390
describeParameter(const std::string & name,CodecParameterDescriptor * desc)3391 status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
3392 return mCodec->describeParameter(name, desc);
3393 }
3394
subscribeToVendorParameters(const std::vector<std::string> & names)3395 status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
3396 return mCodec->subscribeToParameters(names);
3397 }
3398
unsubscribeFromVendorParameters(const std::vector<std::string> & names)3399 status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
3400 return mCodec->unsubscribeFromParameters(names);
3401 }
3402
requestActivityNotification(const sp<AMessage> & notify)3403 void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) {
3404 sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
3405 msg->setMessage("notify", notify);
3406 msg->post();
3407 }
3408
requestCpuBoostIfNeeded()3409 void MediaCodec::requestCpuBoostIfNeeded() {
3410 if (mCpuBoostRequested) {
3411 return;
3412 }
3413 int32_t colorFormat;
3414 if (mOutputFormat->contains("hdr-static-info")
3415 && mOutputFormat->findInt32("color-format", &colorFormat)
3416 // check format for OMX only, for C2 the format is always opaque since the
3417 // software rendering doesn't go through client
3418 && ((mSoftRenderer != NULL && colorFormat == OMX_COLOR_FormatYUV420Planar16)
3419 || mOwnerName.equalsIgnoreCase("codec2::software"))) {
3420 int32_t left, top, right, bottom, width, height;
3421 int64_t totalPixel = 0;
3422 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
3423 totalPixel = (right - left + 1) * (bottom - top + 1);
3424 } else if (mOutputFormat->findInt32("width", &width)
3425 && mOutputFormat->findInt32("height", &height)) {
3426 totalPixel = width * height;
3427 }
3428 if (totalPixel >= 1920 * 1080) {
3429 mResourceManagerProxy->addResource(MediaResource::CpuBoostResource());
3430 mCpuBoostRequested = true;
3431 }
3432 }
3433 }
3434
BatteryChecker(const sp<AMessage> & msg,int64_t timeoutUs)3435 BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
3436 : mTimeoutUs(timeoutUs)
3437 , mLastActivityTimeUs(-1ll)
3438 , mBatteryStatNotified(false)
3439 , mBatteryCheckerGeneration(0)
3440 , mIsExecuting(false)
3441 , mBatteryCheckerMsg(msg) {}
3442
onCodecActivity(std::function<void ()> batteryOnCb)3443 void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
3444 if (!isExecuting()) {
3445 // ignore if not executing
3446 return;
3447 }
3448 if (!mBatteryStatNotified) {
3449 batteryOnCb();
3450 mBatteryStatNotified = true;
3451 sp<AMessage> msg = mBatteryCheckerMsg->dup();
3452 msg->setInt32("generation", mBatteryCheckerGeneration);
3453
3454 // post checker and clear last activity time
3455 msg->post(mTimeoutUs);
3456 mLastActivityTimeUs = -1ll;
3457 } else {
3458 // update last activity time
3459 mLastActivityTimeUs = ALooper::GetNowUs();
3460 }
3461 }
3462
onCheckBatteryTimer(const sp<AMessage> & msg,std::function<void ()> batteryOffCb)3463 void BatteryChecker::onCheckBatteryTimer(
3464 const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
3465 // ignore if this checker already expired because the client resource was removed
3466 int32_t generation;
3467 if (!msg->findInt32("generation", &generation)
3468 || generation != mBatteryCheckerGeneration) {
3469 return;
3470 }
3471
3472 if (mLastActivityTimeUs < 0ll) {
3473 // timed out inactive, do not repost checker
3474 batteryOffCb();
3475 mBatteryStatNotified = false;
3476 } else {
3477 // repost checker and clear last activity time
3478 msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
3479 mLastActivityTimeUs = -1ll;
3480 }
3481 }
3482
onClientRemoved()3483 void BatteryChecker::onClientRemoved() {
3484 mBatteryStatNotified = false;
3485 mBatteryCheckerGeneration++;
3486 }
3487
3488 ////////////////////////////////////////////////////////////////////////////////
3489
cancelPendingDequeueOperations()3490 void MediaCodec::cancelPendingDequeueOperations() {
3491 if (mFlags & kFlagDequeueInputPending) {
3492 mErrorLog.log(LOG_TAG, "Pending dequeue input buffer request cancelled");
3493 PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
3494
3495 ++mDequeueInputTimeoutGeneration;
3496 mDequeueInputReplyID = 0;
3497 mFlags &= ~kFlagDequeueInputPending;
3498 }
3499
3500 if (mFlags & kFlagDequeueOutputPending) {
3501 mErrorLog.log(LOG_TAG, "Pending dequeue output buffer request cancelled");
3502 PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
3503
3504 ++mDequeueOutputTimeoutGeneration;
3505 mDequeueOutputReplyID = 0;
3506 mFlags &= ~kFlagDequeueOutputPending;
3507 }
3508 }
3509
handleDequeueInputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3510 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
3511 if (!isExecuting()) {
3512 mErrorLog.log(LOG_TAG, base::StringPrintf(
3513 "Invalid to call %s; only valid in executing state",
3514 apiStateString().c_str()));
3515 PostReplyWithError(replyID, INVALID_OPERATION);
3516 } else if (mFlags & kFlagIsAsync) {
3517 mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
3518 PostReplyWithError(replyID, INVALID_OPERATION);
3519 } else if (newRequest && (mFlags & kFlagDequeueInputPending)) {
3520 mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue input request is pending");
3521 PostReplyWithError(replyID, INVALID_OPERATION);
3522 return true;
3523 } else if (mFlags & kFlagStickyError) {
3524 PostReplyWithError(replyID, getStickyError());
3525 return true;
3526 }
3527
3528 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3529
3530 if (index < 0) {
3531 CHECK_EQ(index, -EAGAIN);
3532 return false;
3533 }
3534
3535 sp<AMessage> response = new AMessage;
3536 response->setSize("index", index);
3537 response->postReply(replyID);
3538
3539 return true;
3540 }
3541
handleDequeueOutputBuffer(const sp<AReplyToken> & replyID,bool newRequest)3542 MediaCodec::DequeueOutputResult MediaCodec::handleDequeueOutputBuffer(
3543 const sp<AReplyToken> &replyID, bool newRequest) {
3544 if (!isExecuting()) {
3545 mErrorLog.log(LOG_TAG, base::StringPrintf(
3546 "Invalid to call %s; only valid in executing state",
3547 apiStateString().c_str()));
3548 PostReplyWithError(replyID, INVALID_OPERATION);
3549 } else if (mFlags & kFlagIsAsync) {
3550 mErrorLog.log(LOG_TAG, "Invalid to call in async mode");
3551 PostReplyWithError(replyID, INVALID_OPERATION);
3552 } else if (newRequest && (mFlags & kFlagDequeueOutputPending)) {
3553 mErrorLog.log(LOG_TAG, "Invalid to call while another dequeue output request is pending");
3554 PostReplyWithError(replyID, INVALID_OPERATION);
3555 } else if (mFlags & kFlagStickyError) {
3556 PostReplyWithError(replyID, getStickyError());
3557 } else if (mFlags & kFlagOutputBuffersChanged) {
3558 PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
3559 mFlags &= ~kFlagOutputBuffersChanged;
3560 } else {
3561 sp<AMessage> response = new AMessage;
3562 BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
3563 if (!info) {
3564 return DequeueOutputResult::kNoBuffer;
3565 }
3566
3567 // In synchronous mode, output format change should be handled
3568 // at dequeue to put the event at the correct order.
3569
3570 const sp<MediaCodecBuffer> &buffer = info->mData;
3571 handleOutputFormatChangeIfNeeded(buffer);
3572 if (mFlags & kFlagOutputFormatChanged) {
3573 PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
3574 mFlags &= ~kFlagOutputFormatChanged;
3575 return DequeueOutputResult::kRepliedWithError;
3576 }
3577
3578 ssize_t index = dequeuePortBuffer(kPortIndexOutput);
3579 if (discardDecodeOnlyOutputBuffer(index)) {
3580 return DequeueOutputResult::kDiscardedBuffer;
3581 }
3582
3583 response->setSize("index", index);
3584 response->setSize("offset", buffer->offset());
3585 response->setSize("size", buffer->size());
3586
3587 int64_t timeUs;
3588 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
3589
3590 response->setInt64("timeUs", timeUs);
3591
3592 int32_t flags;
3593 CHECK(buffer->meta()->findInt32("flags", &flags));
3594
3595 response->setInt32("flags", flags);
3596
3597 statsBufferReceived(timeUs, buffer);
3598
3599 response->postReply(replyID);
3600 return DequeueOutputResult::kSuccess;
3601 }
3602
3603 return DequeueOutputResult::kRepliedWithError;
3604 }
3605
3606
initClientConfigParcel(ClientConfigParcel & clientConfig)3607 inline void MediaCodec::initClientConfigParcel(ClientConfigParcel& clientConfig) {
3608 clientConfig.codecType = toMediaResourceSubType(mDomain);
3609 clientConfig.isEncoder = mFlags & kFlagIsEncoder;
3610 clientConfig.isHardware = !MediaCodecList::isSoftwareCodec(mComponentName);
3611 clientConfig.width = mWidth;
3612 clientConfig.height = mHeight;
3613 clientConfig.timeStamp = systemTime(SYSTEM_TIME_MONOTONIC) / 1000LL;
3614 clientConfig.id = mCodecId;
3615 }
3616
onMessageReceived(const sp<AMessage> & msg)3617 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
3618 switch (msg->what()) {
3619 case kWhatCodecNotify:
3620 {
3621 int32_t what;
3622 CHECK(msg->findInt32("what", &what));
3623 AString codecErrorState;
3624 switch (what) {
3625 case kWhatError:
3626 case kWhatCryptoError:
3627 {
3628 int32_t err, actionCode;
3629 CHECK(msg->findInt32("err", &err));
3630 CHECK(msg->findInt32("actionCode", &actionCode));
3631
3632 ALOGE("Codec reported err %#x/%s, actionCode %d, while in state %d/%s",
3633 err, StrMediaError(err).c_str(), actionCode,
3634 mState, stateString(mState).c_str());
3635 if (err == DEAD_OBJECT) {
3636 mFlags |= kFlagSawMediaServerDie;
3637 mFlags &= ~kFlagIsComponentAllocated;
3638 }
3639 bool sendErrorResponse = true;
3640 std::string origin;
3641 if (what == kWhatCryptoError) {
3642 origin = "kWhatCryptoError:";
3643 } else {
3644 origin = "kWhatError:";
3645 //TODO: add a new error state
3646 }
3647 codecErrorState = kCodecErrorState;
3648 origin += stateString(mState);
3649 if (mCryptoAsync) {
3650 //TODO: do some book keeping on the buffers
3651 mCryptoAsync->stop();
3652 }
3653 switch (mState) {
3654 case INITIALIZING:
3655 {
3656 setState(UNINITIALIZED);
3657 break;
3658 }
3659
3660 case CONFIGURING:
3661 {
3662 if (actionCode == ACTION_CODE_FATAL) {
3663 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3664 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3665 stateString(mState).c_str());
3666 flushMediametrics();
3667 initMediametrics();
3668 }
3669 setState(actionCode == ACTION_CODE_FATAL ?
3670 UNINITIALIZED : INITIALIZED);
3671 break;
3672 }
3673
3674 case STARTING:
3675 {
3676 if (actionCode == ACTION_CODE_FATAL) {
3677 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3678 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3679 stateString(mState).c_str());
3680 flushMediametrics();
3681 initMediametrics();
3682 }
3683 setState(actionCode == ACTION_CODE_FATAL ?
3684 UNINITIALIZED : CONFIGURED);
3685 break;
3686 }
3687
3688 case RELEASING:
3689 {
3690 // Ignore the error, assuming we'll still get
3691 // the shutdown complete notification. If we
3692 // don't, we'll timeout and force release.
3693 sendErrorResponse = false;
3694 FALLTHROUGH_INTENDED;
3695 }
3696 case STOPPING:
3697 {
3698 if (mFlags & kFlagSawMediaServerDie) {
3699 if (mState == RELEASING && !mReplyID) {
3700 ALOGD("Releasing asynchronously, so nothing to reply here.");
3701 }
3702 // MediaServer died, there definitely won't
3703 // be a shutdown complete notification after
3704 // all.
3705
3706 // note that we may be directly going from
3707 // STOPPING->UNINITIALIZED, instead of the
3708 // usual STOPPING->INITIALIZED state.
3709 setState(UNINITIALIZED);
3710 if (mState == RELEASING) {
3711 mComponentName.clear();
3712 }
3713 if (mReplyID) {
3714 postPendingRepliesAndDeferredMessages(origin + ":dead");
3715 } else {
3716 ALOGD("no pending replies: %s:dead following %s",
3717 origin.c_str(), mLastReplyOrigin.c_str());
3718 }
3719 sendErrorResponse = false;
3720 } else if (!mReplyID) {
3721 sendErrorResponse = false;
3722 }
3723 break;
3724 }
3725
3726 case FLUSHING:
3727 {
3728 if (actionCode == ACTION_CODE_FATAL) {
3729 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3730 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3731 stateString(mState).c_str());
3732 flushMediametrics();
3733 initMediametrics();
3734
3735 setState(UNINITIALIZED);
3736 } else {
3737 setState((mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
3738 }
3739 break;
3740 }
3741
3742 case FLUSHED:
3743 case STARTED:
3744 {
3745 sendErrorResponse = (mReplyID != nullptr);
3746
3747 setStickyError(err);
3748 postActivityNotificationIfPossible();
3749
3750 cancelPendingDequeueOperations();
3751
3752 if (mFlags & kFlagIsAsync) {
3753 if (what == kWhatError) {
3754 onError(err, actionCode);
3755 } else if (what == kWhatCryptoError) {
3756 onCryptoError(msg);
3757 }
3758 }
3759 switch (actionCode) {
3760 case ACTION_CODE_TRANSIENT:
3761 break;
3762 case ACTION_CODE_RECOVERABLE:
3763 setState(INITIALIZED);
3764 break;
3765 default:
3766 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3767 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3768 stateString(mState).c_str());
3769 flushMediametrics();
3770 initMediametrics();
3771 setState(UNINITIALIZED);
3772 break;
3773 }
3774 break;
3775 }
3776
3777 default:
3778 {
3779 sendErrorResponse = (mReplyID != nullptr);
3780
3781 setStickyError(err);
3782 postActivityNotificationIfPossible();
3783
3784 // actionCode in an uninitialized state is always fatal.
3785 if (mState == UNINITIALIZED) {
3786 actionCode = ACTION_CODE_FATAL;
3787 }
3788 if (mFlags & kFlagIsAsync) {
3789 if (what == kWhatError) {
3790 onError(err, actionCode);
3791 } else if (what == kWhatCryptoError) {
3792 onCryptoError(msg);
3793 }
3794 }
3795 switch (actionCode) {
3796 case ACTION_CODE_TRANSIENT:
3797 break;
3798 case ACTION_CODE_RECOVERABLE:
3799 setState(INITIALIZED);
3800 break;
3801 default:
3802 setState(UNINITIALIZED);
3803 break;
3804 }
3805 break;
3806 }
3807 }
3808
3809 if (sendErrorResponse) {
3810 // TRICKY: replicate PostReplyWithError logic for
3811 // err code override
3812 int32_t finalErr = err;
3813 if (mReleasedByResourceManager) {
3814 // override the err code if MediaCodec has been
3815 // released by ResourceManager.
3816 finalErr = DEAD_OBJECT;
3817 }
3818 postPendingRepliesAndDeferredMessages(origin, finalErr);
3819 }
3820 break;
3821 }
3822
3823 case kWhatComponentAllocated:
3824 {
3825 if (mState == RELEASING || mState == UNINITIALIZED) {
3826 // In case a kWhatError or kWhatRelease message came in and replied,
3827 // we log a warning and ignore.
3828 ALOGW("allocate interrupted by error or release, current state %d/%s",
3829 mState, stateString(mState).c_str());
3830 break;
3831 }
3832 CHECK_EQ(mState, INITIALIZING);
3833 setState(INITIALIZED);
3834 mFlags |= kFlagIsComponentAllocated;
3835
3836 CHECK(msg->findString("componentName", &mComponentName));
3837
3838 if (mComponentName.c_str()) {
3839 mediametrics_setCString(mMetricsHandle, kCodecCodec,
3840 mComponentName.c_str());
3841 // Update the codec name.
3842 mResourceManagerProxy->setCodecName(mComponentName.c_str());
3843 }
3844
3845 const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
3846 if (mComponentName.startsWith("OMX.google.")
3847 && strncmp(owner, "default", 8) == 0) {
3848 mFlags |= kFlagUsesSoftwareRenderer;
3849 } else {
3850 mFlags &= ~kFlagUsesSoftwareRenderer;
3851 }
3852 mOwnerName = owner;
3853
3854 if (mComponentName.endsWith(".secure")) {
3855 mFlags |= kFlagIsSecure;
3856 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 1);
3857 } else {
3858 mFlags &= ~kFlagIsSecure;
3859 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
3860 }
3861
3862 mediametrics_setInt32(mMetricsHandle, kCodecHardware,
3863 MediaCodecList::isSoftwareCodec(mComponentName) ? 0 : 1);
3864
3865 mResourceManagerProxy->addResource(MediaResource::CodecResource(
3866 mFlags & kFlagIsSecure, toMediaResourceSubType(mDomain)));
3867
3868 postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
3869 break;
3870 }
3871
3872 case kWhatComponentConfigured:
3873 {
3874 if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
3875 // In case a kWhatError or kWhatRelease message came in and replied,
3876 // we log a warning and ignore.
3877 ALOGW("configure interrupted by error or release, current state %d/%s",
3878 mState, stateString(mState).c_str());
3879 break;
3880 }
3881 CHECK_EQ(mState, CONFIGURING);
3882
3883 // reset input surface flag
3884 mHaveInputSurface = false;
3885
3886 CHECK(msg->findMessage("input-format", &mInputFormat));
3887 CHECK(msg->findMessage("output-format", &mOutputFormat));
3888
3889 // limit to confirming the opt-in behavior to minimize any behavioral change
3890 if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
3891 // signal frame dropping mode in the input format as this may also be
3892 // meaningful and confusing for an encoder in a transcoder scenario
3893 mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
3894 }
3895 sp<AMessage> interestingFormat =
3896 (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
3897 ALOGV("[%s] configured as input format: %s, output format: %s",
3898 mComponentName.c_str(),
3899 mInputFormat->debugString(4).c_str(),
3900 mOutputFormat->debugString(4).c_str());
3901 int32_t usingSwRenderer;
3902 if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
3903 && usingSwRenderer) {
3904 mFlags |= kFlagUsesSoftwareRenderer;
3905 }
3906 setState(CONFIGURED);
3907 postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
3908
3909 // augment our media metrics info, now that we know more things
3910 // such as what the codec extracted from any CSD passed in.
3911 if (mMetricsHandle != 0) {
3912 sp<AMessage> format;
3913 if (mConfigureMsg != NULL &&
3914 mConfigureMsg->findMessage("format", &format)) {
3915 // format includes: mime
3916 AString mime;
3917 if (format->findString("mime", &mime)) {
3918 mediametrics_setCString(mMetricsHandle, kCodecMime,
3919 mime.c_str());
3920 }
3921 }
3922 // perhaps video only?
3923 int32_t profile = 0;
3924 if (interestingFormat->findInt32("profile", &profile)) {
3925 mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
3926 }
3927 int32_t level = 0;
3928 if (interestingFormat->findInt32("level", &level)) {
3929 mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
3930 }
3931 sp<AMessage> uncompressedFormat =
3932 (mFlags & kFlagIsEncoder) ? mInputFormat : mOutputFormat;
3933 int32_t componentColorFormat = -1;
3934 if (uncompressedFormat->findInt32("android._color-format",
3935 &componentColorFormat)) {
3936 mediametrics_setInt32(mMetricsHandle,
3937 kCodecComponentColorFormat, componentColorFormat);
3938 }
3939 updateHdrMetrics(true /* isConfig */);
3940 int32_t codecMaxInputSize = -1;
3941 if (mInputFormat->findInt32(KEY_MAX_INPUT_SIZE, &codecMaxInputSize)) {
3942 mApiUsageMetrics.inputBufferSize.codecMax = codecMaxInputSize;
3943 }
3944 // bitrate and bitrate mode, encoder only
3945 if (mFlags & kFlagIsEncoder) {
3946 // encoder specific values
3947 int32_t bitrate_mode = -1;
3948 if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
3949 mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
3950 asString_BitrateMode(bitrate_mode));
3951 }
3952 int32_t bitrate = -1;
3953 if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
3954 mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
3955 }
3956 } else {
3957 // decoder specific values
3958 }
3959 }
3960 break;
3961 }
3962
3963 case kWhatInputSurfaceCreated:
3964 {
3965 if (mState != CONFIGURED) {
3966 // state transitioned unexpectedly; we should have replied already.
3967 ALOGD("received kWhatInputSurfaceCreated message in state %s",
3968 stateString(mState).c_str());
3969 break;
3970 }
3971 // response to initiateCreateInputSurface()
3972 status_t err = NO_ERROR;
3973 sp<AMessage> response = new AMessage;
3974 if (!msg->findInt32("err", &err)) {
3975 sp<RefBase> obj;
3976 msg->findObject("input-surface", &obj);
3977 CHECK(msg->findMessage("input-format", &mInputFormat));
3978 CHECK(msg->findMessage("output-format", &mOutputFormat));
3979 ALOGV("[%s] input surface created as input format: %s, output format: %s",
3980 mComponentName.c_str(),
3981 mInputFormat->debugString(4).c_str(),
3982 mOutputFormat->debugString(4).c_str());
3983 CHECK(obj != NULL);
3984 response->setObject("input-surface", obj);
3985 mHaveInputSurface = true;
3986 } else {
3987 response->setInt32("err", err);
3988 }
3989 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
3990 break;
3991 }
3992
3993 case kWhatInputSurfaceAccepted:
3994 {
3995 if (mState != CONFIGURED) {
3996 // state transitioned unexpectedly; we should have replied already.
3997 ALOGD("received kWhatInputSurfaceAccepted message in state %s",
3998 stateString(mState).c_str());
3999 break;
4000 }
4001 // response to initiateSetInputSurface()
4002 status_t err = NO_ERROR;
4003 sp<AMessage> response = new AMessage();
4004 if (!msg->findInt32("err", &err)) {
4005 CHECK(msg->findMessage("input-format", &mInputFormat));
4006 CHECK(msg->findMessage("output-format", &mOutputFormat));
4007 mHaveInputSurface = true;
4008 } else {
4009 response->setInt32("err", err);
4010 }
4011 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
4012 break;
4013 }
4014
4015 case kWhatSignaledInputEOS:
4016 {
4017 if (!isExecuting()) {
4018 // state transitioned unexpectedly; we should have replied already.
4019 ALOGD("received kWhatSignaledInputEOS message in state %s",
4020 stateString(mState).c_str());
4021 break;
4022 }
4023 // response to signalEndOfInputStream()
4024 sp<AMessage> response = new AMessage;
4025 status_t err;
4026 if (msg->findInt32("err", &err)) {
4027 response->setInt32("err", err);
4028 }
4029 postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
4030 break;
4031 }
4032
4033 case kWhatStartCompleted:
4034 {
4035 if (mState == RELEASING || mState == UNINITIALIZED) {
4036 // In case a kWhatRelease message came in and replied,
4037 // we log a warning and ignore.
4038 ALOGW("start interrupted by release, current state %d/%s",
4039 mState, stateString(mState).c_str());
4040 break;
4041 }
4042
4043 CHECK_EQ(mState, STARTING);
4044 if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
4045 mResourceManagerProxy->addResource(
4046 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
4047 }
4048 // Notify the RM that the codec is in use (has been started).
4049 ClientConfigParcel clientConfig;
4050 initClientConfigParcel(clientConfig);
4051 mResourceManagerProxy->notifyClientStarted(clientConfig);
4052
4053 setState(STARTED);
4054 postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
4055
4056 // Now that the codec has started, configure, by default, the peek behavior to
4057 // be undefined for backwards compatibility with older releases. Later, if an
4058 // app explicitly enables or disables peek, the parameter will be turned off and
4059 // the legacy undefined behavior is disallowed.
4060 // See updateTunnelPeek called in onSetParameters for more details.
4061 if (mTunneled && mTunnelPeekState == TunnelPeekState::kLegacyMode) {
4062 sp<AMessage> params = new AMessage;
4063 params->setInt32("android._tunnel-peek-set-legacy", 1);
4064 mCodec->signalSetParameters(params);
4065 }
4066 break;
4067 }
4068
4069 case kWhatOutputBuffersChanged:
4070 {
4071 mFlags |= kFlagOutputBuffersChanged;
4072 postActivityNotificationIfPossible();
4073 break;
4074 }
4075
4076 case kWhatOutputFramesRendered:
4077 {
4078 // ignore these in all states except running
4079 if (mState != STARTED) {
4080 break;
4081 }
4082 TunnelPeekState previousState = mTunnelPeekState;
4083 if (mTunnelPeekState != TunnelPeekState::kLegacyMode) {
4084 mTunnelPeekState = TunnelPeekState::kBufferRendered;
4085 ALOGV("TunnelPeekState: %s -> %s",
4086 asString(previousState),
4087 asString(TunnelPeekState::kBufferRendered));
4088 }
4089 processRenderedFrames(msg);
4090 // check that we have a notification set
4091 if (mOnFrameRenderedNotification != NULL) {
4092 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
4093 notify->setMessage("data", msg);
4094 notify->post();
4095 }
4096 break;
4097 }
4098
4099 case kWhatFirstTunnelFrameReady:
4100 {
4101 if (mState != STARTED) {
4102 break;
4103 }
4104 TunnelPeekState previousState = mTunnelPeekState;
4105 switch(mTunnelPeekState) {
4106 case TunnelPeekState::kDisabledNoBuffer:
4107 case TunnelPeekState::kDisabledQueued:
4108 mTunnelPeekState = TunnelPeekState::kBufferDecoded;
4109 ALOGV("First tunnel frame ready");
4110 ALOGV("TunnelPeekState: %s -> %s",
4111 asString(previousState),
4112 asString(mTunnelPeekState));
4113 break;
4114 case TunnelPeekState::kEnabledNoBuffer:
4115 case TunnelPeekState::kEnabledQueued:
4116 {
4117 sp<AMessage> parameters = new AMessage();
4118 parameters->setInt32("android._trigger-tunnel-peek", 1);
4119 mCodec->signalSetParameters(parameters);
4120 }
4121 mTunnelPeekState = TunnelPeekState::kBufferRendered;
4122 ALOGV("First tunnel frame ready");
4123 ALOGV("TunnelPeekState: %s -> %s",
4124 asString(previousState),
4125 asString(mTunnelPeekState));
4126 break;
4127 default:
4128 ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
4129 asString(mTunnelPeekState));
4130 break;
4131 }
4132
4133 if (mOnFirstTunnelFrameReadyNotification != nullptr) {
4134 sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
4135 notify->setMessage("data", msg);
4136 notify->post();
4137 }
4138 break;
4139 }
4140
4141 case kWhatFillThisBuffer:
4142 {
4143 /* size_t index = */updateBuffers(kPortIndexInput, msg);
4144
4145 if (mState == FLUSHING
4146 || mState == STOPPING
4147 || mState == RELEASING) {
4148 returnBuffersToCodecOnPort(kPortIndexInput);
4149 break;
4150 }
4151
4152 if (!mCSD.empty()) {
4153 ssize_t index = dequeuePortBuffer(kPortIndexInput);
4154 CHECK_GE(index, 0);
4155
4156 // If codec specific data had been specified as
4157 // part of the format in the call to configure and
4158 // if there's more csd left, we submit it here
4159 // clients only get access to input buffers once
4160 // this data has been exhausted.
4161
4162 status_t err = queueCSDInputBuffer(index);
4163
4164 if (err != OK) {
4165 ALOGE("queueCSDInputBuffer failed w/ error %d",
4166 err);
4167
4168 setStickyError(err);
4169 postActivityNotificationIfPossible();
4170
4171 cancelPendingDequeueOperations();
4172 }
4173 break;
4174 }
4175 if (!mLeftover.empty()) {
4176 ssize_t index = dequeuePortBuffer(kPortIndexInput);
4177 CHECK_GE(index, 0);
4178
4179 status_t err = handleLeftover(index);
4180 if (err != OK) {
4181 setStickyError(err);
4182 postActivityNotificationIfPossible();
4183 cancelPendingDequeueOperations();
4184 }
4185 break;
4186 }
4187
4188 if (mFlags & kFlagIsAsync) {
4189 if (!mHaveInputSurface) {
4190 if (mState == FLUSHED) {
4191 mHavePendingInputBuffers = true;
4192 } else {
4193 onInputBufferAvailable();
4194 }
4195 }
4196 } else if (mFlags & kFlagDequeueInputPending) {
4197 CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
4198
4199 ++mDequeueInputTimeoutGeneration;
4200 mFlags &= ~kFlagDequeueInputPending;
4201 mDequeueInputReplyID = 0;
4202 } else {
4203 postActivityNotificationIfPossible();
4204 }
4205 break;
4206 }
4207
4208 case kWhatDrainThisBuffer:
4209 {
4210 if ((mFlags & kFlagUseBlockModel) == 0 && mTunneled) {
4211 sp<RefBase> obj;
4212 CHECK(msg->findObject("buffer", &obj));
4213 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4214 if (mFlags & kFlagIsAsync) {
4215 // In asynchronous mode, output format change is processed immediately.
4216 handleOutputFormatChangeIfNeeded(buffer);
4217 } else {
4218 postActivityNotificationIfPossible();
4219 }
4220 mBufferChannel->discardBuffer(buffer);
4221 break;
4222 }
4223
4224 /* size_t index = */updateBuffers(kPortIndexOutput, msg);
4225
4226 if (mState == FLUSHING
4227 || mState == STOPPING
4228 || mState == RELEASING) {
4229 returnBuffersToCodecOnPort(kPortIndexOutput);
4230 break;
4231 }
4232
4233 if (mFlags & kFlagIsAsync) {
4234 sp<RefBase> obj;
4235 CHECK(msg->findObject("buffer", &obj));
4236 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4237
4238 // In asynchronous mode, output format change is processed immediately.
4239 handleOutputFormatChangeIfNeeded(buffer);
4240 onOutputBufferAvailable();
4241 } else if (mFlags & kFlagDequeueOutputPending) {
4242 DequeueOutputResult dequeueResult =
4243 handleDequeueOutputBuffer(mDequeueOutputReplyID);
4244 switch (dequeueResult) {
4245 case DequeueOutputResult::kNoBuffer:
4246 TRESPASS();
4247 break;
4248 case DequeueOutputResult::kDiscardedBuffer:
4249 break;
4250 case DequeueOutputResult::kRepliedWithError:
4251 [[fallthrough]];
4252 case DequeueOutputResult::kSuccess:
4253 {
4254 ++mDequeueOutputTimeoutGeneration;
4255 mFlags &= ~kFlagDequeueOutputPending;
4256 mDequeueOutputReplyID = 0;
4257 break;
4258 }
4259 default:
4260 TRESPASS();
4261 }
4262 } else {
4263 postActivityNotificationIfPossible();
4264 }
4265
4266 break;
4267 }
4268
4269 case kWhatMetricsUpdated:
4270 {
4271 sp<AMessage> updatedMetrics;
4272 CHECK(msg->findMessage("updated-metrics", &updatedMetrics));
4273
4274 size_t numEntries = updatedMetrics->countEntries();
4275 AMessage::Type type;
4276 for (size_t i = 0; i < numEntries; ++i) {
4277 const char *name = updatedMetrics->getEntryNameAt(i, &type);
4278 AMessage::ItemData itemData = updatedMetrics->getEntryAt(i);
4279 switch (type) {
4280 case AMessage::kTypeInt32: {
4281 int32_t metricValue;
4282 itemData.find(&metricValue);
4283 mediametrics_setInt32(mMetricsHandle, name, metricValue);
4284 break;
4285 }
4286 case AMessage::kTypeInt64: {
4287 int64_t metricValue;
4288 itemData.find(&metricValue);
4289 mediametrics_setInt64(mMetricsHandle, name, metricValue);
4290 break;
4291 }
4292 case AMessage::kTypeDouble: {
4293 double metricValue;
4294 itemData.find(&metricValue);
4295 mediametrics_setDouble(mMetricsHandle, name, metricValue);
4296 break;
4297 }
4298 case AMessage::kTypeString: {
4299 AString metricValue;
4300 itemData.find(&metricValue);
4301 mediametrics_setCString(mMetricsHandle, name, metricValue.c_str());
4302 break;
4303 }
4304 // ToDo: add support for other types
4305 default:
4306 ALOGW("Updated metrics type not supported.");
4307 }
4308 }
4309 break;
4310 }
4311
4312 case kWhatEOS:
4313 {
4314 // We already notify the client of this by using the
4315 // corresponding flag in "onOutputBufferReady".
4316 break;
4317 }
4318
4319 case kWhatStopCompleted:
4320 {
4321 if (mState != STOPPING) {
4322 ALOGW("Received kWhatStopCompleted in state %d/%s",
4323 mState, stateString(mState).c_str());
4324 break;
4325 }
4326
4327 if (mIsSurfaceToDisplay) {
4328 mVideoRenderQualityTracker.resetForDiscontinuity();
4329 }
4330
4331 // Notify the RM that the codec has been stopped.
4332 ClientConfigParcel clientConfig;
4333 initClientConfigParcel(clientConfig);
4334 mResourceManagerProxy->notifyClientStopped(clientConfig);
4335
4336 setState(INITIALIZED);
4337 if (mReplyID) {
4338 postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
4339 } else {
4340 ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
4341 "but the operation completed anyway. (last reply origin=%s)",
4342 mLastReplyOrigin.c_str());
4343 }
4344 break;
4345 }
4346
4347 case kWhatReleaseCompleted:
4348 {
4349 if (mState != RELEASING) {
4350 ALOGW("Received kWhatReleaseCompleted in state %d/%s",
4351 mState, stateString(mState).c_str());
4352 break;
4353 }
4354 setState(UNINITIALIZED);
4355 mComponentName.clear();
4356
4357 mFlags &= ~kFlagIsComponentAllocated;
4358
4359 // off since we're removing all resources including the battery on
4360 if (mBatteryChecker != nullptr) {
4361 mBatteryChecker->onClientRemoved();
4362 }
4363
4364 mResourceManagerProxy->removeClient();
4365 mReleaseSurface.reset();
4366
4367 if (mReplyID != nullptr) {
4368 postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
4369 }
4370 if (mAsyncReleaseCompleteNotification != nullptr) {
4371 flushMediametrics();
4372 mAsyncReleaseCompleteNotification->post();
4373 mAsyncReleaseCompleteNotification.clear();
4374 }
4375 break;
4376 }
4377
4378 case kWhatFlushCompleted:
4379 {
4380 if (mState != FLUSHING) {
4381 ALOGW("received FlushCompleted message in state %d/%s",
4382 mState, stateString(mState).c_str());
4383 break;
4384 }
4385
4386 if (mIsSurfaceToDisplay) {
4387 mVideoRenderQualityTracker.resetForDiscontinuity();
4388 }
4389
4390 if (mFlags & kFlagIsAsync) {
4391 setState(FLUSHED);
4392 } else {
4393 setState(STARTED);
4394 mCodec->signalResume();
4395 }
4396 mReliabilityContextMetrics.flushCount++;
4397
4398 postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
4399 break;
4400 }
4401
4402 default:
4403 TRESPASS();
4404 }
4405 break;
4406 }
4407
4408 case kWhatInit:
4409 {
4410 if (mState != UNINITIALIZED) {
4411 PostReplyWithError(msg, INVALID_OPERATION);
4412 break;
4413 }
4414
4415 if (mReplyID) {
4416 mDeferredMessages.push_back(msg);
4417 break;
4418 }
4419 sp<AReplyToken> replyID;
4420 CHECK(msg->senderAwaitsResponse(&replyID));
4421
4422 mReplyID = replyID;
4423 setState(INITIALIZING);
4424
4425 sp<RefBase> codecInfo;
4426 (void)msg->findObject("codecInfo", &codecInfo);
4427 AString name;
4428 CHECK(msg->findString("name", &name));
4429
4430 sp<AMessage> format = new AMessage;
4431 if (codecInfo) {
4432 format->setObject("codecInfo", codecInfo);
4433 }
4434 format->setString("componentName", name);
4435
4436 mCodec->initiateAllocateComponent(format);
4437 break;
4438 }
4439
4440 case kWhatSetNotification:
4441 {
4442 sp<AMessage> notify;
4443 if (msg->findMessage("on-frame-rendered", ¬ify)) {
4444 mOnFrameRenderedNotification = notify;
4445 }
4446 if (msg->findMessage("first-tunnel-frame-ready", ¬ify)) {
4447 mOnFirstTunnelFrameReadyNotification = notify;
4448 }
4449 break;
4450 }
4451
4452 case kWhatSetCallback:
4453 {
4454 sp<AReplyToken> replyID;
4455 CHECK(msg->senderAwaitsResponse(&replyID));
4456
4457 if (mState == UNINITIALIZED
4458 || mState == INITIALIZING
4459 || isExecuting()) {
4460 // callback can't be set after codec is executing,
4461 // or before it's initialized (as the callback
4462 // will be cleared when it goes to INITIALIZED)
4463 mErrorLog.log(LOG_TAG, base::StringPrintf(
4464 "Invalid to call %s; only valid at Initialized state",
4465 apiStateString().c_str()));
4466 PostReplyWithError(replyID, INVALID_OPERATION);
4467 break;
4468 }
4469
4470 sp<AMessage> callback;
4471 CHECK(msg->findMessage("callback", &callback));
4472
4473 mCallback = callback;
4474
4475 if (mCallback != NULL) {
4476 ALOGI("MediaCodec will operate in async mode");
4477 mFlags |= kFlagIsAsync;
4478 } else {
4479 mFlags &= ~kFlagIsAsync;
4480 }
4481
4482 sp<AMessage> response = new AMessage;
4483 response->postReply(replyID);
4484 break;
4485 }
4486
4487 case kWhatGetMetrics:
4488 {
4489 onGetMetrics(msg);
4490 break;
4491 }
4492
4493
4494 case kWhatConfigure:
4495 {
4496 if (mState != INITIALIZED) {
4497 mErrorLog.log(LOG_TAG, base::StringPrintf(
4498 "configure() is valid only at Initialized state; currently %s",
4499 apiStateString().c_str()));
4500 PostReplyWithError(msg, INVALID_OPERATION);
4501 break;
4502 }
4503
4504 if (mReplyID) {
4505 mDeferredMessages.push_back(msg);
4506 break;
4507 }
4508 sp<AReplyToken> replyID;
4509 CHECK(msg->senderAwaitsResponse(&replyID));
4510
4511 sp<RefBase> obj;
4512 CHECK(msg->findObject("surface", &obj));
4513
4514 sp<AMessage> format;
4515 CHECK(msg->findMessage("format", &format));
4516
4517 // start with a copy of the passed metrics info for use in this run
4518 mediametrics_handle_t handle;
4519 CHECK(msg->findInt64("metrics", &handle));
4520 if (handle != 0) {
4521 if (mMetricsHandle != 0) {
4522 flushMediametrics();
4523 }
4524 mMetricsHandle = mediametrics_dup(handle);
4525 // and set some additional metrics values
4526 initMediametrics();
4527 }
4528
4529 // from this point forward, in this configure/use/release lifecycle, we want to
4530 // upload our data
4531 mMetricsToUpload = true;
4532
4533 int32_t push;
4534 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
4535 mFlags |= kFlagPushBlankBuffersOnShutdown;
4536 }
4537
4538 if (obj != NULL) {
4539 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
4540 // allow frame dropping by surface by default
4541 mAllowFrameDroppingBySurface = true;
4542 }
4543
4544 format->setObject("native-window", obj);
4545 status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
4546 if (err != OK) {
4547 PostReplyWithError(replyID, err);
4548 break;
4549 }
4550 } else {
4551 // we are not using surface so this variable is not used, but initialize sensibly anyway
4552 mAllowFrameDroppingBySurface = false;
4553
4554 handleSetSurface(NULL);
4555 }
4556
4557 mApiUsageMetrics.isUsingOutputSurface = true;
4558
4559 uint32_t flags;
4560 CHECK(msg->findInt32("flags", (int32_t *)&flags));
4561 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL ||
4562 flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
4563 if (!(mFlags & kFlagIsAsync)) {
4564 mErrorLog.log(
4565 LOG_TAG, "Block model is only valid with callback set (async mode)");
4566 PostReplyWithError(replyID, INVALID_OPERATION);
4567 break;
4568 }
4569 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
4570 mFlags |= kFlagUseBlockModel;
4571 }
4572 if (flags & CONFIGURE_FLAG_USE_CRYPTO_ASYNC) {
4573 mFlags |= kFlagUseCryptoAsync;
4574 if ((mFlags & kFlagUseBlockModel)) {
4575 ALOGW("CrytoAsync not yet enabled for block model,\
4576 falling back to normal");
4577 }
4578 }
4579 }
4580 mReplyID = replyID;
4581 setState(CONFIGURING);
4582
4583 void *crypto;
4584 if (!msg->findPointer("crypto", &crypto)) {
4585 crypto = NULL;
4586 }
4587
4588 ALOGV("kWhatConfigure: Old mCrypto: %p (%d)",
4589 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4590
4591 mCrypto = static_cast<ICrypto *>(crypto);
4592 mBufferChannel->setCrypto(mCrypto);
4593
4594 ALOGV("kWhatConfigure: New mCrypto: %p (%d)",
4595 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4596
4597 void *descrambler;
4598 if (!msg->findPointer("descrambler", &descrambler)) {
4599 descrambler = NULL;
4600 }
4601
4602 mDescrambler = static_cast<IDescrambler *>(descrambler);
4603 mBufferChannel->setDescrambler(mDescrambler);
4604 if ((mFlags & kFlagUseCryptoAsync) &&
4605 mCrypto && (mDomain == DOMAIN_VIDEO)) {
4606 // set kFlagUseCryptoAsync but do-not use this for block model
4607 // this is to propagate the error in onCryptoError()
4608 // TODO (b/274628160): Enable Use of CONFIG_FLAG_USE_CRYPTO_ASYNC
4609 // with CONFIGURE_FLAG_USE_BLOCK_MODEL)
4610 if (!(mFlags & kFlagUseBlockModel)) {
4611 mCryptoAsync = new CryptoAsync(mBufferChannel);
4612 mCryptoAsync->setCallback(
4613 std::make_unique<CryptoAsyncCallback>(new AMessage(kWhatCodecNotify, this)));
4614 mCryptoLooper = new ALooper();
4615 mCryptoLooper->setName("CryptoAsyncLooper");
4616 mCryptoLooper->registerHandler(mCryptoAsync);
4617 status_t err = mCryptoLooper->start();
4618 if (err != OK) {
4619 ALOGE("Crypto Looper failed to start");
4620 mCryptoAsync = nullptr;
4621 mCryptoLooper = nullptr;
4622 }
4623 }
4624 }
4625
4626 format->setInt32("flags", flags);
4627 if (flags & CONFIGURE_FLAG_ENCODE) {
4628 format->setInt32("encoder", true);
4629 mFlags |= kFlagIsEncoder;
4630 }
4631
4632 extractCSD(format);
4633
4634 int32_t tunneled;
4635 if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
4636 ALOGI("Configuring TUNNELED video playback.");
4637 mTunneled = true;
4638 } else {
4639 mTunneled = false;
4640 }
4641 mediametrics_setInt32(mMetricsHandle, kCodecTunneled, mTunneled ? 1 : 0);
4642
4643 int32_t background = 0;
4644 if (format->findInt32("android._background-mode", &background) && background) {
4645 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
4646 }
4647
4648 mCodec->initiateConfigureComponent(format);
4649 break;
4650 }
4651
4652 case kWhatSetSurface:
4653 {
4654 sp<AReplyToken> replyID;
4655 CHECK(msg->senderAwaitsResponse(&replyID));
4656
4657 status_t err = OK;
4658
4659 switch (mState) {
4660 case CONFIGURED:
4661 case STARTED:
4662 case FLUSHED:
4663 {
4664 sp<RefBase> obj;
4665 (void)msg->findObject("surface", &obj);
4666 sp<Surface> surface = static_cast<Surface *>(obj.get());
4667 if (mSurface == NULL) {
4668 // do not support setting surface if it was not set
4669 mErrorLog.log(LOG_TAG,
4670 "Cannot set surface if the codec is not configured with "
4671 "a surface already");
4672 err = INVALID_OPERATION;
4673 } else if (obj == NULL) {
4674 // do not support unsetting surface
4675 mErrorLog.log(LOG_TAG, "Unsetting surface is not supported");
4676 err = BAD_VALUE;
4677 } else {
4678 err = connectToSurface(surface);
4679 if (err == ALREADY_EXISTS) {
4680 // reconnecting to same surface
4681 err = OK;
4682 } else {
4683 if (err == OK) {
4684 if (mFlags & kFlagUsesSoftwareRenderer) {
4685 if (mSoftRenderer != NULL
4686 && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
4687 pushBlankBuffersToNativeWindow(mSurface.get());
4688 }
4689 surface->setDequeueTimeout(-1);
4690 mSoftRenderer = new SoftwareRenderer(surface);
4691 // TODO: check if this was successful
4692 } else {
4693 err = mCodec->setSurface(surface);
4694 }
4695 }
4696 if (err == OK) {
4697 (void)disconnectFromSurface();
4698 mSurface = surface;
4699 }
4700 mReliabilityContextMetrics.setOutputSurfaceCount++;
4701 }
4702 }
4703 break;
4704 }
4705
4706 default:
4707 mErrorLog.log(LOG_TAG, base::StringPrintf(
4708 "setSurface() is valid only at Executing states; currently %s",
4709 apiStateString().c_str()));
4710 err = INVALID_OPERATION;
4711 break;
4712 }
4713
4714 PostReplyWithError(replyID, err);
4715 break;
4716 }
4717
4718 case kWhatCreateInputSurface:
4719 case kWhatSetInputSurface:
4720 {
4721 // Must be configured, but can't have been started yet.
4722 if (mState != CONFIGURED) {
4723 mErrorLog.log(LOG_TAG, base::StringPrintf(
4724 "setInputSurface() is valid only at Configured state; currently %s",
4725 apiStateString().c_str()));
4726 PostReplyWithError(msg, INVALID_OPERATION);
4727 break;
4728 }
4729
4730 if (mReplyID) {
4731 mDeferredMessages.push_back(msg);
4732 break;
4733 }
4734 sp<AReplyToken> replyID;
4735 CHECK(msg->senderAwaitsResponse(&replyID));
4736
4737 mReplyID = replyID;
4738 if (msg->what() == kWhatCreateInputSurface) {
4739 mCodec->initiateCreateInputSurface();
4740 } else {
4741 sp<RefBase> obj;
4742 CHECK(msg->findObject("input-surface", &obj));
4743
4744 mCodec->initiateSetInputSurface(
4745 static_cast<PersistentSurface *>(obj.get()));
4746 }
4747 break;
4748 }
4749 case kWhatStart:
4750 {
4751 if (mState == FLUSHED) {
4752 setState(STARTED);
4753 if (mHavePendingInputBuffers) {
4754 onInputBufferAvailable();
4755 mHavePendingInputBuffers = false;
4756 }
4757 mCodec->signalResume();
4758 PostReplyWithError(msg, OK);
4759 break;
4760 } else if (mState != CONFIGURED) {
4761 mErrorLog.log(LOG_TAG, base::StringPrintf(
4762 "start() is valid only at Configured state; currently %s",
4763 apiStateString().c_str()));
4764 PostReplyWithError(msg, INVALID_OPERATION);
4765 break;
4766 }
4767
4768 if (mReplyID) {
4769 mDeferredMessages.push_back(msg);
4770 break;
4771 }
4772 sp<AReplyToken> replyID;
4773 CHECK(msg->senderAwaitsResponse(&replyID));
4774 TunnelPeekState previousState = mTunnelPeekState;
4775 if (previousState != TunnelPeekState::kLegacyMode) {
4776 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
4777 ALOGV("TunnelPeekState: %s -> %s",
4778 asString(previousState),
4779 asString(TunnelPeekState::kEnabledNoBuffer));
4780 }
4781
4782 mReplyID = replyID;
4783 setState(STARTING);
4784
4785 mCodec->initiateStart();
4786 break;
4787 }
4788
4789 case kWhatStop: {
4790 if (mReplyID) {
4791 mDeferredMessages.push_back(msg);
4792 break;
4793 }
4794 [[fallthrough]];
4795 }
4796 case kWhatRelease:
4797 {
4798 State targetState =
4799 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
4800
4801 if ((mState == RELEASING && targetState == UNINITIALIZED)
4802 || (mState == STOPPING && targetState == INITIALIZED)) {
4803 mDeferredMessages.push_back(msg);
4804 break;
4805 }
4806
4807 sp<AReplyToken> replyID;
4808 CHECK(msg->senderAwaitsResponse(&replyID));
4809 if (mCryptoAsync) {
4810 mCryptoAsync->stop();
4811 }
4812 sp<AMessage> asyncNotify;
4813 (void)msg->findMessage("async", &asyncNotify);
4814 // post asyncNotify if going out of scope.
4815 struct AsyncNotifyPost {
4816 AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
4817 ~AsyncNotifyPost() {
4818 if (mAsyncNotify) {
4819 mAsyncNotify->post();
4820 }
4821 }
4822 void clear() { mAsyncNotify.clear(); }
4823 private:
4824 sp<AMessage> mAsyncNotify;
4825 } asyncNotifyPost{asyncNotify};
4826
4827 // already stopped/released
4828 if (mState == UNINITIALIZED && mReleasedByResourceManager) {
4829 sp<AMessage> response = new AMessage;
4830 response->setInt32("err", OK);
4831 response->postReply(replyID);
4832 break;
4833 }
4834
4835 int32_t reclaimed = 0;
4836 msg->findInt32("reclaimed", &reclaimed);
4837 if (reclaimed) {
4838 if (!mReleasedByResourceManager) {
4839 // notify the async client
4840 if (mFlags & kFlagIsAsync) {
4841 onError(DEAD_OBJECT, ACTION_CODE_FATAL);
4842 }
4843 mErrorLog.log(LOG_TAG, "Released by resource manager");
4844 mReleasedByResourceManager = true;
4845 }
4846
4847 int32_t force = 0;
4848 msg->findInt32("force", &force);
4849 if (!force && hasPendingBuffer()) {
4850 ALOGW("Can't reclaim codec right now due to pending buffers.");
4851
4852 // return WOULD_BLOCK to ask resource manager to retry later.
4853 sp<AMessage> response = new AMessage;
4854 response->setInt32("err", WOULD_BLOCK);
4855 response->postReply(replyID);
4856
4857 break;
4858 }
4859 }
4860
4861 bool isReleasingAllocatedComponent =
4862 (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
4863 if (!isReleasingAllocatedComponent // See 1
4864 && mState != INITIALIZED
4865 && mState != CONFIGURED && !isExecuting()) {
4866 // 1) Permit release to shut down the component if allocated.
4867 //
4868 // 2) We may be in "UNINITIALIZED" state already and
4869 // also shutdown the encoder/decoder without the
4870 // client being aware of this if media server died while
4871 // we were being stopped. The client would assume that
4872 // after stop() returned, it would be safe to call release()
4873 // and it should be in this case, no harm to allow a release()
4874 // if we're already uninitialized.
4875 sp<AMessage> response = new AMessage;
4876 // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
4877 // the previous stop/release completes and then reply with OK.
4878 status_t err = mState == targetState ? OK : INVALID_OPERATION;
4879 response->setInt32("err", err);
4880 // TODO: mErrorLog
4881 if (err == OK && targetState == UNINITIALIZED) {
4882 mComponentName.clear();
4883 }
4884 response->postReply(replyID);
4885 break;
4886 }
4887
4888 // If we're flushing, configuring or starting but
4889 // received a release request, post the reply for the pending call
4890 // first, and consider it done. The reply token will be replaced
4891 // after this, and we'll no longer be able to reply.
4892 if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
4893 // mReply is always set if in these states.
4894 postPendingRepliesAndDeferredMessages(
4895 std::string("kWhatRelease:") + stateString(mState));
4896 }
4897 // If we're stopping but received a release request, post the reply
4898 // for the pending call if necessary. Note that the reply may have been
4899 // already posted due to an error.
4900 if (mState == STOPPING && mReplyID) {
4901 postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
4902 }
4903
4904 if (mFlags & kFlagSawMediaServerDie) {
4905 // It's dead, Jim. Don't expect initiateShutdown to yield
4906 // any useful results now...
4907 // Any pending reply would have been handled at kWhatError.
4908 setState(UNINITIALIZED);
4909 if (targetState == UNINITIALIZED) {
4910 mComponentName.clear();
4911 }
4912 (new AMessage)->postReply(replyID);
4913 break;
4914 }
4915
4916 // If we already have an error, component may not be able to
4917 // complete the shutdown properly. If we're stopping, post the
4918 // reply now with an error to unblock the client, client can
4919 // release after the failure (instead of ANR).
4920 if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
4921 // Any pending reply would have been handled at kWhatError.
4922 PostReplyWithError(replyID, getStickyError());
4923 break;
4924 }
4925
4926 bool forceSync = false;
4927 if (asyncNotify != nullptr && mSurface != NULL) {
4928 if (!mReleaseSurface) {
4929 uint64_t usage = 0;
4930 if (mSurface->getConsumerUsage(&usage) != OK) {
4931 usage = 0;
4932 }
4933 mReleaseSurface.reset(new ReleaseSurface(usage));
4934 }
4935 if (mSurface != mReleaseSurface->getSurface()) {
4936 status_t err = connectToSurface(mReleaseSurface->getSurface());
4937 ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
4938 if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
4939 err = mCodec->setSurface(mReleaseSurface->getSurface());
4940 ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
4941 }
4942 if (err == OK) {
4943 (void)disconnectFromSurface();
4944 mSurface = mReleaseSurface->getSurface();
4945 } else {
4946 // We were not able to switch the surface, so force
4947 // synchronous release.
4948 forceSync = true;
4949 }
4950 }
4951 }
4952
4953 if (mReplyID) {
4954 // State transition replies are handled above, so this reply
4955 // would not be related to state transition. As we are
4956 // shutting down the component, just fail the operation.
4957 postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
4958 }
4959 mReplyID = replyID;
4960 setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
4961
4962 mCodec->initiateShutdown(
4963 msg->what() == kWhatStop /* keepComponentAllocated */);
4964
4965 returnBuffersToCodec(reclaimed);
4966
4967 if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
4968 pushBlankBuffersToNativeWindow(mSurface.get());
4969 }
4970
4971 if (asyncNotify != nullptr) {
4972 if (!forceSync) {
4973 mResourceManagerProxy->markClientForPendingRemoval();
4974 postPendingRepliesAndDeferredMessages("kWhatRelease:async");
4975 }
4976 asyncNotifyPost.clear();
4977 mAsyncReleaseCompleteNotification = asyncNotify;
4978 }
4979
4980 break;
4981 }
4982
4983 case kWhatDequeueInputBuffer:
4984 {
4985 sp<AReplyToken> replyID;
4986 CHECK(msg->senderAwaitsResponse(&replyID));
4987
4988 if (mFlags & kFlagIsAsync) {
4989 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used in async mode");
4990 PostReplyWithError(replyID, INVALID_OPERATION);
4991 break;
4992 }
4993
4994 if (mHaveInputSurface) {
4995 mErrorLog.log(LOG_TAG, "dequeueInputBuffer can't be used with input surface");
4996 PostReplyWithError(replyID, INVALID_OPERATION);
4997 break;
4998 }
4999
5000 if (handleDequeueInputBuffer(replyID, true /* new request */)) {
5001 break;
5002 }
5003
5004 int64_t timeoutUs;
5005 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
5006
5007 if (timeoutUs == 0LL) {
5008 PostReplyWithError(replyID, -EAGAIN);
5009 break;
5010 }
5011
5012 mFlags |= kFlagDequeueInputPending;
5013 mDequeueInputReplyID = replyID;
5014
5015 if (timeoutUs > 0LL) {
5016 sp<AMessage> timeoutMsg =
5017 new AMessage(kWhatDequeueInputTimedOut, this);
5018 timeoutMsg->setInt32(
5019 "generation", ++mDequeueInputTimeoutGeneration);
5020 timeoutMsg->post(timeoutUs);
5021 }
5022 break;
5023 }
5024
5025 case kWhatDequeueInputTimedOut:
5026 {
5027 int32_t generation;
5028 CHECK(msg->findInt32("generation", &generation));
5029
5030 if (generation != mDequeueInputTimeoutGeneration) {
5031 // Obsolete
5032 break;
5033 }
5034
5035 CHECK(mFlags & kFlagDequeueInputPending);
5036
5037 PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
5038
5039 mFlags &= ~kFlagDequeueInputPending;
5040 mDequeueInputReplyID = 0;
5041 break;
5042 }
5043
5044 case kWhatQueueInputBuffer:
5045 {
5046 sp<AReplyToken> replyID;
5047 CHECK(msg->senderAwaitsResponse(&replyID));
5048
5049 if (!isExecuting()) {
5050 mErrorLog.log(LOG_TAG, base::StringPrintf(
5051 "queueInputBuffer() is valid only at Executing states; currently %s",
5052 apiStateString().c_str()));
5053 PostReplyWithError(replyID, INVALID_OPERATION);
5054 break;
5055 } else if (mFlags & kFlagStickyError) {
5056 PostReplyWithError(replyID, getStickyError());
5057 break;
5058 }
5059
5060 status_t err = UNKNOWN_ERROR;
5061 if (!mLeftover.empty()) {
5062 mLeftover.push_back(msg);
5063 size_t index;
5064 msg->findSize("index", &index);
5065 err = handleLeftover(index);
5066 } else {
5067 err = onQueueInputBuffer(msg);
5068 }
5069
5070 PostReplyWithError(replyID, err);
5071 break;
5072 }
5073
5074 case kWhatDequeueOutputBuffer:
5075 {
5076 sp<AReplyToken> replyID;
5077 CHECK(msg->senderAwaitsResponse(&replyID));
5078
5079 if (mFlags & kFlagIsAsync) {
5080 mErrorLog.log(LOG_TAG, "dequeueOutputBuffer can't be used in async mode");
5081 PostReplyWithError(replyID, INVALID_OPERATION);
5082 break;
5083 }
5084
5085 DequeueOutputResult dequeueResult =
5086 handleDequeueOutputBuffer(replyID, true /* new request */);
5087 switch (dequeueResult) {
5088 case DequeueOutputResult::kNoBuffer:
5089 [[fallthrough]];
5090 case DequeueOutputResult::kDiscardedBuffer:
5091 {
5092 int64_t timeoutUs;
5093 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
5094
5095 if (timeoutUs == 0LL) {
5096 PostReplyWithError(replyID, -EAGAIN);
5097 break;
5098 }
5099
5100 mFlags |= kFlagDequeueOutputPending;
5101 mDequeueOutputReplyID = replyID;
5102
5103 if (timeoutUs > 0LL) {
5104 sp<AMessage> timeoutMsg =
5105 new AMessage(kWhatDequeueOutputTimedOut, this);
5106 timeoutMsg->setInt32(
5107 "generation", ++mDequeueOutputTimeoutGeneration);
5108 timeoutMsg->post(timeoutUs);
5109 }
5110 break;
5111 }
5112 case DequeueOutputResult::kRepliedWithError:
5113 [[fallthrough]];
5114 case DequeueOutputResult::kSuccess:
5115 break;
5116 default:
5117 TRESPASS();
5118 }
5119 break;
5120 }
5121
5122 case kWhatDequeueOutputTimedOut:
5123 {
5124 int32_t generation;
5125 CHECK(msg->findInt32("generation", &generation));
5126
5127 if (generation != mDequeueOutputTimeoutGeneration) {
5128 // Obsolete
5129 break;
5130 }
5131
5132 CHECK(mFlags & kFlagDequeueOutputPending);
5133
5134 PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
5135
5136 mFlags &= ~kFlagDequeueOutputPending;
5137 mDequeueOutputReplyID = 0;
5138 break;
5139 }
5140
5141 case kWhatReleaseOutputBuffer:
5142 {
5143 sp<AReplyToken> replyID;
5144 CHECK(msg->senderAwaitsResponse(&replyID));
5145
5146 if (!isExecuting()) {
5147 mErrorLog.log(LOG_TAG, base::StringPrintf(
5148 "releaseOutputBuffer() is valid only at Executing states; currently %s",
5149 apiStateString().c_str()));
5150 PostReplyWithError(replyID, INVALID_OPERATION);
5151 break;
5152 } else if (mFlags & kFlagStickyError) {
5153 PostReplyWithError(replyID, getStickyError());
5154 break;
5155 }
5156
5157 status_t err = onReleaseOutputBuffer(msg);
5158
5159 PostReplyWithError(replyID, err);
5160 break;
5161 }
5162
5163 case kWhatPollForRenderedBuffers:
5164 {
5165 if (isExecuting()) {
5166 mBufferChannel->pollForRenderedBuffers();
5167 }
5168 break;
5169 }
5170
5171 case kWhatSignalEndOfInputStream:
5172 {
5173 if (!isExecuting()) {
5174 mErrorLog.log(LOG_TAG, base::StringPrintf(
5175 "signalEndOfInputStream() is valid only at Executing states; currently %s",
5176 apiStateString().c_str()));
5177 PostReplyWithError(msg, INVALID_OPERATION);
5178 break;
5179 } else if (!mHaveInputSurface) {
5180 mErrorLog.log(
5181 LOG_TAG, "signalEndOfInputStream() called without an input surface set");
5182 PostReplyWithError(msg, INVALID_OPERATION);
5183 break;
5184 } else if (mFlags & kFlagStickyError) {
5185 PostReplyWithError(msg, getStickyError());
5186 break;
5187 }
5188
5189 if (mReplyID) {
5190 mDeferredMessages.push_back(msg);
5191 break;
5192 }
5193 sp<AReplyToken> replyID;
5194 CHECK(msg->senderAwaitsResponse(&replyID));
5195
5196 mReplyID = replyID;
5197 mCodec->signalEndOfInputStream();
5198 break;
5199 }
5200
5201 case kWhatGetBuffers:
5202 {
5203 sp<AReplyToken> replyID;
5204 CHECK(msg->senderAwaitsResponse(&replyID));
5205 if (!isExecuting()) {
5206 mErrorLog.log(LOG_TAG, base::StringPrintf(
5207 "getInput/OutputBuffers() is valid only at Executing states; currently %s",
5208 apiStateString().c_str()));
5209 PostReplyWithError(replyID, INVALID_OPERATION);
5210 break;
5211 } else if (mFlags & kFlagIsAsync) {
5212 mErrorLog.log(LOG_TAG, "getInput/OutputBuffers() is not supported with callbacks");
5213 PostReplyWithError(replyID, INVALID_OPERATION);
5214 break;
5215 } else if (mFlags & kFlagStickyError) {
5216 PostReplyWithError(replyID, getStickyError());
5217 break;
5218 }
5219
5220 int32_t portIndex;
5221 CHECK(msg->findInt32("portIndex", &portIndex));
5222
5223 Vector<sp<MediaCodecBuffer> > *dstBuffers;
5224 CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
5225
5226 dstBuffers->clear();
5227 // If we're using input surface (either non-persistent created by
5228 // createInputSurface(), or persistent set by setInputSurface()),
5229 // give the client an empty input buffers array.
5230 if (portIndex != kPortIndexInput || !mHaveInputSurface) {
5231 if (portIndex == kPortIndexInput) {
5232 mBufferChannel->getInputBufferArray(dstBuffers);
5233 } else {
5234 mBufferChannel->getOutputBufferArray(dstBuffers);
5235 }
5236 }
5237
5238 mApiUsageMetrics.isArrayMode = true;
5239
5240 (new AMessage)->postReply(replyID);
5241 break;
5242 }
5243
5244 case kWhatFlush:
5245 {
5246 if (!isExecuting()) {
5247 mErrorLog.log(LOG_TAG, base::StringPrintf(
5248 "flush() is valid only at Executing states; currently %s",
5249 apiStateString().c_str()));
5250 PostReplyWithError(msg, INVALID_OPERATION);
5251 break;
5252 } else if (mFlags & kFlagStickyError) {
5253 PostReplyWithError(msg, getStickyError());
5254 break;
5255 }
5256
5257 if (mReplyID) {
5258 mDeferredMessages.push_back(msg);
5259 break;
5260 }
5261 sp<AReplyToken> replyID;
5262 CHECK(msg->senderAwaitsResponse(&replyID));
5263
5264 mReplyID = replyID;
5265 // TODO: skip flushing if already FLUSHED
5266 setState(FLUSHING);
5267 if (mCryptoAsync) {
5268 std::list<sp<AMessage>> pendingBuffers;
5269 mCryptoAsync->stop(&pendingBuffers);
5270 //TODO: do something with these buffers
5271 }
5272 mCodec->signalFlush();
5273 returnBuffersToCodec();
5274 TunnelPeekState previousState = mTunnelPeekState;
5275 if (previousState != TunnelPeekState::kLegacyMode) {
5276 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
5277 ALOGV("TunnelPeekState: %s -> %s",
5278 asString(previousState),
5279 asString(TunnelPeekState::kEnabledNoBuffer));
5280 }
5281 break;
5282 }
5283
5284 case kWhatGetInputFormat:
5285 case kWhatGetOutputFormat:
5286 {
5287 sp<AMessage> format =
5288 (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
5289
5290 sp<AReplyToken> replyID;
5291 CHECK(msg->senderAwaitsResponse(&replyID));
5292
5293 if (mState != CONFIGURED && mState != STARTING &&
5294 mState != STARTED && mState != FLUSHING &&
5295 mState != FLUSHED) {
5296 mErrorLog.log(LOG_TAG, base::StringPrintf(
5297 "getInput/OutputFormat() is valid at Executing states "
5298 "and Configured state; currently %s",
5299 apiStateString().c_str()));
5300 PostReplyWithError(replyID, INVALID_OPERATION);
5301 break;
5302 } else if (format == NULL) {
5303 mErrorLog.log(LOG_TAG, "Fatal error: format is not initialized");
5304 PostReplyWithError(replyID, INVALID_OPERATION);
5305 break;
5306 } else if (mFlags & kFlagStickyError) {
5307 PostReplyWithError(replyID, getStickyError());
5308 break;
5309 }
5310
5311 sp<AMessage> response = new AMessage;
5312 response->setMessage("format", format);
5313 response->postReply(replyID);
5314 break;
5315 }
5316
5317 case kWhatRequestIDRFrame:
5318 {
5319 mCodec->signalRequestIDRFrame();
5320 break;
5321 }
5322
5323 case kWhatRequestActivityNotification:
5324 {
5325 CHECK(mActivityNotify == NULL);
5326 CHECK(msg->findMessage("notify", &mActivityNotify));
5327
5328 postActivityNotificationIfPossible();
5329 break;
5330 }
5331
5332 case kWhatGetName:
5333 {
5334 sp<AReplyToken> replyID;
5335 CHECK(msg->senderAwaitsResponse(&replyID));
5336
5337 if (mComponentName.empty()) {
5338 mErrorLog.log(LOG_TAG, "Fatal error: name is not set");
5339 PostReplyWithError(replyID, INVALID_OPERATION);
5340 break;
5341 }
5342
5343 sp<AMessage> response = new AMessage;
5344 response->setString("name", mComponentName.c_str());
5345 response->postReply(replyID);
5346 break;
5347 }
5348
5349 case kWhatGetCodecInfo:
5350 {
5351 sp<AReplyToken> replyID;
5352 CHECK(msg->senderAwaitsResponse(&replyID));
5353
5354 sp<AMessage> response = new AMessage;
5355 response->setObject("codecInfo", mCodecInfo);
5356 response->postReply(replyID);
5357 break;
5358 }
5359
5360 case kWhatSetParameters:
5361 {
5362 sp<AReplyToken> replyID;
5363 CHECK(msg->senderAwaitsResponse(&replyID));
5364
5365 sp<AMessage> params;
5366 CHECK(msg->findMessage("params", ¶ms));
5367
5368 status_t err = onSetParameters(params);
5369
5370 PostReplyWithError(replyID, err);
5371 break;
5372 }
5373
5374 case kWhatDrmReleaseCrypto:
5375 {
5376 onReleaseCrypto(msg);
5377 break;
5378 }
5379
5380 case kWhatCheckBatteryStats:
5381 {
5382 if (mBatteryChecker != nullptr) {
5383 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
5384 mResourceManagerProxy->removeResource(
5385 MediaResource::VideoBatteryResource());
5386 });
5387 }
5388 break;
5389 }
5390
5391 default:
5392 TRESPASS();
5393 }
5394 }
5395
handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> & buffer)5396 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
5397 sp<AMessage> format = buffer->format();
5398 if (mOutputFormat == format) {
5399 return;
5400 }
5401 if (mFlags & kFlagUseBlockModel) {
5402 sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
5403 sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
5404 std::set<std::string> keys;
5405 size_t numEntries = diff1->countEntries();
5406 AMessage::Type type;
5407 for (size_t i = 0; i < numEntries; ++i) {
5408 keys.emplace(diff1->getEntryNameAt(i, &type));
5409 }
5410 numEntries = diff2->countEntries();
5411 for (size_t i = 0; i < numEntries; ++i) {
5412 keys.emplace(diff2->getEntryNameAt(i, &type));
5413 }
5414 sp<WrapperObject<std::set<std::string>>> changedKeys{
5415 new WrapperObject<std::set<std::string>>{std::move(keys)}};
5416 buffer->meta()->setObject("changedKeys", changedKeys);
5417 }
5418 mOutputFormat = format;
5419 mapFormat(mComponentName, format, nullptr, true);
5420 ALOGV("[%s] output format changed to: %s",
5421 mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
5422
5423 if (mSoftRenderer == NULL &&
5424 mSurface != NULL &&
5425 (mFlags & kFlagUsesSoftwareRenderer)) {
5426 AString mime;
5427 CHECK(mOutputFormat->findString("mime", &mime));
5428
5429 // TODO: propagate color aspects to software renderer to allow better
5430 // color conversion to RGB. For now, just mark dataspace for YUV
5431 // rendering.
5432 int32_t dataSpace;
5433 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
5434 ALOGD("[%s] setting dataspace on output surface to %#x",
5435 mComponentName.c_str(), dataSpace);
5436 int err = native_window_set_buffers_data_space(
5437 mSurface.get(), (android_dataspace)dataSpace);
5438 ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
5439 }
5440 if (mOutputFormat->contains("hdr-static-info")) {
5441 HDRStaticInfo info;
5442 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
5443 setNativeWindowHdrMetadata(mSurface.get(), &info);
5444 }
5445 }
5446
5447 sp<ABuffer> hdr10PlusInfo;
5448 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
5449 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
5450 native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
5451 hdr10PlusInfo->size(), hdr10PlusInfo->data());
5452 }
5453
5454 if (mime.startsWithIgnoreCase("video/")) {
5455 mSurface->setDequeueTimeout(-1);
5456 mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
5457 }
5458 }
5459
5460 requestCpuBoostIfNeeded();
5461
5462 if (mFlags & kFlagIsEncoder) {
5463 // Before we announce the format change we should
5464 // collect codec specific data and amend the output
5465 // format as necessary.
5466 int32_t flags = 0;
5467 (void) buffer->meta()->findInt32("flags", &flags);
5468 if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
5469 && !mOwnerName.startsWith("codec2::")) {
5470 status_t err =
5471 amendOutputFormatWithCodecSpecificData(buffer);
5472
5473 if (err != OK) {
5474 ALOGE("Codec spit out malformed codec "
5475 "specific data!");
5476 }
5477 }
5478 }
5479 if (mFlags & kFlagIsAsync) {
5480 onOutputFormatChanged();
5481 } else {
5482 mFlags |= kFlagOutputFormatChanged;
5483 postActivityNotificationIfPossible();
5484 }
5485
5486 // Update the width and the height.
5487 int32_t left = 0, top = 0, right = 0, bottom = 0, width = 0, height = 0;
5488 bool resolutionChanged = false;
5489 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
5490 mWidth = right - left + 1;
5491 mHeight = bottom - top + 1;
5492 resolutionChanged = true;
5493 } else if (mOutputFormat->findInt32("width", &width) &&
5494 mOutputFormat->findInt32("height", &height)) {
5495 mWidth = width;
5496 mHeight = height;
5497 resolutionChanged = true;
5498 }
5499
5500 // Notify mCrypto and the RM of video resolution changes
5501 if (resolutionChanged) {
5502 if (mCrypto != NULL) {
5503 mCrypto->notifyResolution(mWidth, mHeight);
5504 }
5505 ClientConfigParcel clientConfig;
5506 initClientConfigParcel(clientConfig);
5507 mResourceManagerProxy->notifyClientConfigChanged(clientConfig);
5508 mReliabilityContextMetrics.resolutionChangeCount++;
5509 }
5510
5511 updateHdrMetrics(false /* isConfig */);
5512 }
5513
extractCSD(const sp<AMessage> & format)5514 void MediaCodec::extractCSD(const sp<AMessage> &format) {
5515 mCSD.clear();
5516
5517 size_t i = 0;
5518 for (;;) {
5519 sp<ABuffer> csd;
5520 if (!format->findBuffer(base::StringPrintf("csd-%zu", i).c_str(), &csd)) {
5521 break;
5522 }
5523 if (csd->size() == 0) {
5524 ALOGW("csd-%zu size is 0", i);
5525 }
5526
5527 mCSD.push_back(csd);
5528 ++i;
5529 }
5530
5531 ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
5532 }
5533
queueCSDInputBuffer(size_t bufferIndex)5534 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
5535 CHECK(!mCSD.empty());
5536
5537 sp<ABuffer> csd = *mCSD.begin();
5538 mCSD.erase(mCSD.begin());
5539 std::shared_ptr<C2Buffer> c2Buffer;
5540 sp<hardware::HidlMemory> memory;
5541
5542 if (mFlags & kFlagUseBlockModel) {
5543 if (hasCryptoOrDescrambler()) {
5544 constexpr size_t kInitialDealerCapacity = 1048576; // 1MB
5545 thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
5546 kInitialDealerCapacity, "CSD(1MB)");
5547 sp<IMemory> mem = sDealer->allocate(csd->size());
5548 if (mem == nullptr) {
5549 size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
5550 while (csd->size() * 2 > newDealerCapacity) {
5551 newDealerCapacity *= 2;
5552 }
5553 sDealer = new MemoryDealer(
5554 newDealerCapacity,
5555 base::StringPrintf("CSD(%zuMB)", newDealerCapacity / 1048576).c_str());
5556 mem = sDealer->allocate(csd->size());
5557 }
5558 memcpy(mem->unsecurePointer(), csd->data(), csd->size());
5559 ssize_t heapOffset;
5560 memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
5561 } else {
5562 std::shared_ptr<C2LinearBlock> block =
5563 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
5564 C2WriteView view{block->map().get()};
5565 if (view.error() != C2_OK) {
5566 mErrorLog.log(LOG_TAG, "Fatal error: failed to allocate and map a block");
5567 return -EINVAL;
5568 }
5569 if (csd->size() > view.capacity()) {
5570 mErrorLog.log(LOG_TAG, base::StringPrintf(
5571 "Fatal error: allocated block is too small "
5572 "(csd size %zu; block cap %u)",
5573 csd->size(), view.capacity()));
5574 return -EINVAL;
5575 }
5576 memcpy(view.base(), csd->data(), csd->size());
5577 c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
5578 }
5579 } else {
5580 const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
5581 const sp<MediaCodecBuffer> &codecInputData = info.mData;
5582
5583 if (csd->size() > codecInputData->capacity()) {
5584 mErrorLog.log(LOG_TAG, base::StringPrintf(
5585 "CSD is too large to fit in input buffer "
5586 "(csd size %zu; buffer cap %zu)",
5587 csd->size(), codecInputData->capacity()));
5588 return -EINVAL;
5589 }
5590 if (codecInputData->data() == NULL) {
5591 ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
5592 mErrorLog.log(LOG_TAG, base::StringPrintf(
5593 "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
5594 return -EINVAL;
5595 }
5596
5597 memcpy(codecInputData->data(), csd->data(), csd->size());
5598 }
5599
5600 AString errorDetailMsg;
5601
5602 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
5603 msg->setSize("index", bufferIndex);
5604 msg->setSize("offset", 0);
5605 msg->setSize("size", csd->size());
5606 msg->setInt64("timeUs", 0LL);
5607 msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
5608 msg->setPointer("errorDetailMsg", &errorDetailMsg);
5609 if (c2Buffer) {
5610 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
5611 new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
5612 msg->setObject("c2buffer", obj);
5613 } else if (memory) {
5614 sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
5615 new WrapperObject<sp<hardware::HidlMemory>>{memory}};
5616 msg->setObject("memory", obj);
5617 }
5618
5619 return onQueueInputBuffer(msg);
5620 }
5621
setState(State newState)5622 void MediaCodec::setState(State newState) {
5623 if (newState == INITIALIZED || newState == UNINITIALIZED) {
5624 delete mSoftRenderer;
5625 mSoftRenderer = NULL;
5626
5627 if ( mCrypto != NULL ) {
5628 ALOGV("setState: ~mCrypto: %p (%d)",
5629 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
5630 }
5631 mCrypto.clear();
5632 mDescrambler.clear();
5633 handleSetSurface(NULL);
5634
5635 mInputFormat.clear();
5636 mOutputFormat.clear();
5637 mFlags &= ~kFlagOutputFormatChanged;
5638 mFlags &= ~kFlagOutputBuffersChanged;
5639 mFlags &= ~kFlagStickyError;
5640 mFlags &= ~kFlagIsEncoder;
5641 mFlags &= ~kFlagIsAsync;
5642 mStickyError = OK;
5643
5644 mActivityNotify.clear();
5645 mCallback.clear();
5646 mErrorLog.clear();
5647 }
5648
5649 if (newState == UNINITIALIZED) {
5650 // return any straggling buffers, e.g. if we got here on an error
5651 returnBuffersToCodec();
5652
5653 // The component is gone, mediaserver's probably back up already
5654 // but should definitely be back up should we try to instantiate
5655 // another component.. and the cycle continues.
5656 mFlags &= ~kFlagSawMediaServerDie;
5657 }
5658
5659 mState = newState;
5660
5661 if (mBatteryChecker != nullptr) {
5662 mBatteryChecker->setExecuting(isExecuting());
5663 }
5664
5665 cancelPendingDequeueOperations();
5666 }
5667
returnBuffersToCodec(bool isReclaim)5668 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
5669 returnBuffersToCodecOnPort(kPortIndexInput, isReclaim);
5670 returnBuffersToCodecOnPort(kPortIndexOutput, isReclaim);
5671 }
5672
returnBuffersToCodecOnPort(int32_t portIndex,bool isReclaim)5673 void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex, bool isReclaim) {
5674 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5675 Mutex::Autolock al(mBufferLock);
5676
5677 if (portIndex == kPortIndexInput) {
5678 mLeftover.clear();
5679 }
5680 for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
5681 BufferInfo *info = &mPortBuffers[portIndex][i];
5682
5683 if (info->mData != nullptr) {
5684 sp<MediaCodecBuffer> buffer = info->mData;
5685 if (isReclaim && info->mOwnedByClient) {
5686 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
5687 portIndex, i);
5688 } else {
5689 info->mOwnedByClient = false;
5690 info->mData.clear();
5691 }
5692 mBufferChannel->discardBuffer(buffer);
5693 }
5694 }
5695
5696 mAvailPortBuffers[portIndex].clear();
5697 }
5698
updateBuffers(int32_t portIndex,const sp<AMessage> & msg)5699 size_t MediaCodec::updateBuffers(
5700 int32_t portIndex, const sp<AMessage> &msg) {
5701 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5702 size_t index;
5703 CHECK(msg->findSize("index", &index));
5704 sp<RefBase> obj;
5705 CHECK(msg->findObject("buffer", &obj));
5706 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
5707
5708 {
5709 Mutex::Autolock al(mBufferLock);
5710 if (mPortBuffers[portIndex].size() <= index) {
5711 mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
5712 }
5713 mPortBuffers[portIndex][index].mData = buffer;
5714 }
5715 mAvailPortBuffers[portIndex].push_back(index);
5716
5717 return index;
5718 }
5719
onQueueInputBuffer(const sp<AMessage> & msg)5720 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
5721 size_t index;
5722 size_t offset;
5723 size_t size;
5724 int64_t timeUs;
5725 uint32_t flags;
5726 CHECK(msg->findSize("index", &index));
5727 CHECK(msg->findInt64("timeUs", &timeUs));
5728 CHECK(msg->findInt32("flags", (int32_t *)&flags));
5729 std::shared_ptr<C2Buffer> c2Buffer;
5730 sp<hardware::HidlMemory> memory;
5731 sp<RefBase> obj;
5732 if (msg->findObject("c2buffer", &obj)) {
5733 CHECK(obj);
5734 c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
5735 } else if (msg->findObject("memory", &obj)) {
5736 CHECK(obj);
5737 memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
5738 CHECK(msg->findSize("offset", &offset));
5739 } else {
5740 CHECK(msg->findSize("offset", &offset));
5741 }
5742 const CryptoPlugin::SubSample *subSamples;
5743 size_t numSubSamples = 0;
5744 const uint8_t *key = NULL;
5745 const uint8_t *iv = NULL;
5746 CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
5747
5748 // We allow the simpler queueInputBuffer API to be used even in
5749 // secure mode, by fabricating a single unencrypted subSample.
5750 CryptoPlugin::SubSample ss;
5751 CryptoPlugin::Pattern pattern;
5752
5753 if (msg->findSize("size", &size)) {
5754 if (hasCryptoOrDescrambler()) {
5755 ss.mNumBytesOfClearData = size;
5756 ss.mNumBytesOfEncryptedData = 0;
5757
5758 subSamples = &ss;
5759 numSubSamples = 1;
5760 pattern.mEncryptBlocks = 0;
5761 pattern.mSkipBlocks = 0;
5762 }
5763 } else if (!c2Buffer) {
5764 if (!hasCryptoOrDescrambler()) {
5765 ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
5766 mComponentName.c_str());
5767 mErrorLog.log(LOG_TAG, "queuing secure buffer without mCrypto or mDescrambler!");
5768 return -EINVAL;
5769 }
5770 CHECK(msg->findPointer("subSamples", (void **)&subSamples));
5771 CHECK(msg->findSize("numSubSamples", &numSubSamples));
5772 CHECK(msg->findPointer("key", (void **)&key));
5773 CHECK(msg->findPointer("iv", (void **)&iv));
5774 CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
5775 CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
5776
5777 int32_t tmp;
5778 CHECK(msg->findInt32("mode", &tmp));
5779
5780 mode = (CryptoPlugin::Mode)tmp;
5781
5782 size = 0;
5783 for (size_t i = 0; i < numSubSamples; ++i) {
5784 size += subSamples[i].mNumBytesOfClearData;
5785 size += subSamples[i].mNumBytesOfEncryptedData;
5786 }
5787 }
5788
5789 if (index >= mPortBuffers[kPortIndexInput].size()) {
5790 mErrorLog.log(LOG_TAG, base::StringPrintf(
5791 "index out of range (index=%zu)", mPortBuffers[kPortIndexInput].size()));
5792 return -ERANGE;
5793 }
5794
5795 BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
5796 sp<MediaCodecBuffer> buffer = info->mData;
5797 if (buffer == nullptr) {
5798 mErrorLog.log(LOG_TAG, base::StringPrintf(
5799 "Fatal error: failed to fetch buffer for index %zu", index));
5800 return -EACCES;
5801 }
5802 if (!info->mOwnedByClient) {
5803 mErrorLog.log(LOG_TAG, base::StringPrintf(
5804 "client does not own the buffer #%zu", index));
5805 return -EACCES;
5806 }
5807 auto setInputBufferParams = [this, &buffer]
5808 (int64_t timeUs, uint32_t flags = 0) -> status_t {
5809 status_t err = OK;
5810 buffer->meta()->setInt64("timeUs", timeUs);
5811 if (flags & BUFFER_FLAG_EOS) {
5812 buffer->meta()->setInt32("eos", true);
5813 }
5814
5815 if (flags & BUFFER_FLAG_CODECCONFIG) {
5816 buffer->meta()->setInt32("csd", true);
5817 }
5818 bool isBufferDecodeOnly = ((flags & BUFFER_FLAG_DECODE_ONLY) != 0);
5819 if (isBufferDecodeOnly) {
5820 buffer->meta()->setInt32("decode-only", true);
5821 }
5822 if (mTunneled && !isBufferDecodeOnly) {
5823 TunnelPeekState previousState = mTunnelPeekState;
5824 switch(mTunnelPeekState){
5825 case TunnelPeekState::kEnabledNoBuffer:
5826 buffer->meta()->setInt32("tunnel-first-frame", 1);
5827 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
5828 ALOGV("TunnelPeekState: %s -> %s",
5829 asString(previousState),
5830 asString(mTunnelPeekState));
5831 break;
5832 case TunnelPeekState::kDisabledNoBuffer:
5833 buffer->meta()->setInt32("tunnel-first-frame", 1);
5834 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
5835 ALOGV("TunnelPeekState: %s -> %s",
5836 asString(previousState),
5837 asString(mTunnelPeekState));
5838 break;
5839 default:
5840 break;
5841 }
5842 }
5843 return err;
5844 };
5845 auto buildCryptoInfoAMessage = [&](const sp<AMessage> & cryptoInfo, int32_t action) {
5846 size_t key_len = (key != nullptr)? 16 : 0;
5847 size_t iv_len = (iv != nullptr)? 16 : 0;
5848 sp<ABuffer> shared_key;
5849 sp<ABuffer> shared_iv;
5850 if (key_len > 0) {
5851 shared_key = ABuffer::CreateAsCopy((void*)key, key_len);
5852 }
5853 if (iv_len > 0) {
5854 shared_iv = ABuffer::CreateAsCopy((void*)iv, iv_len);
5855 }
5856 sp<ABuffer> subSamples_buffer =
5857 new ABuffer(sizeof(CryptoPlugin::SubSample) * numSubSamples);
5858 CryptoPlugin::SubSample * samples =
5859 (CryptoPlugin::SubSample *)(subSamples_buffer.get()->data());
5860 for (int s = 0 ; s < numSubSamples ; s++) {
5861 samples[s].mNumBytesOfClearData = subSamples[s].mNumBytesOfClearData;
5862 samples[s].mNumBytesOfEncryptedData = subSamples[s].mNumBytesOfEncryptedData;
5863 }
5864 // set decrypt Action
5865 cryptoInfo->setInt32("action", action);
5866 cryptoInfo->setObject("buffer", buffer);
5867 cryptoInfo->setInt32("secure", mFlags & kFlagIsSecure);
5868 cryptoInfo->setBuffer("key", shared_key);
5869 cryptoInfo->setBuffer("iv", shared_iv);
5870 cryptoInfo->setInt32("mode", (int)mode);
5871 cryptoInfo->setInt32("encryptBlocks", pattern.mEncryptBlocks);
5872 cryptoInfo->setInt32("skipBlocks", pattern.mSkipBlocks);
5873 cryptoInfo->setBuffer("subSamples", subSamples_buffer);
5874 cryptoInfo->setSize("numSubSamples", numSubSamples);
5875 };
5876 if (c2Buffer || memory) {
5877 sp<AMessage> tunings = NULL;
5878 if (msg->findMessage("tunings", &tunings) && tunings != NULL) {
5879 onSetParameters(tunings);
5880 }
5881 status_t err = OK;
5882 if (c2Buffer) {
5883 err = mBufferChannel->attachBuffer(c2Buffer, buffer);
5884 } else if (memory) {
5885 AString errorDetailMsg;
5886 err = mBufferChannel->attachEncryptedBuffer(
5887 memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
5888 offset, subSamples, numSubSamples, buffer, &errorDetailMsg);
5889 if (err != OK && hasCryptoOrDescrambler()
5890 && (mFlags & kFlagUseCryptoAsync)) {
5891 // create error detail
5892 AString errorDetailMsg;
5893 sp<AMessage> cryptoErrorInfo = new AMessage();
5894 buildCryptoInfoAMessage(cryptoErrorInfo, CryptoAsync::kActionDecrypt);
5895 cryptoErrorInfo->setInt32("err", err);
5896 cryptoErrorInfo->setInt32("actionCode", ACTION_CODE_FATAL);
5897 cryptoErrorInfo->setString("errorDetail", errorDetailMsg);
5898 onCryptoError(cryptoErrorInfo);
5899 // we want cryptoError to be in the callback
5900 // but Codec IllegalStateException to be triggered.
5901 err = INVALID_OPERATION;
5902 }
5903 } else {
5904 mErrorLog.log(LOG_TAG, "Fatal error: invalid queue request without a buffer");
5905 err = UNKNOWN_ERROR;
5906 }
5907 if (err == OK && !buffer->asC2Buffer()
5908 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
5909 C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
5910 if (block.size() > buffer->size()) {
5911 C2ConstLinearBlock leftover = block.subBlock(
5912 block.offset() + buffer->size(), block.size() - buffer->size());
5913 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
5914 new WrapperObject<std::shared_ptr<C2Buffer>>{
5915 C2Buffer::CreateLinearBuffer(leftover)}};
5916 msg->setObject("c2buffer", obj);
5917 mLeftover.push_front(msg);
5918 // Not sending EOS if we have leftovers
5919 flags &= ~BUFFER_FLAG_EOS;
5920 }
5921 }
5922 offset = buffer->offset();
5923 size = buffer->size();
5924 if (err != OK) {
5925 ALOGE("block model buffer attach failed: err = %s (%d)",
5926 StrMediaError(err).c_str(), err);
5927 return err;
5928 }
5929 }
5930
5931 if (offset + size > buffer->capacity()) {
5932 mErrorLog.log(LOG_TAG, base::StringPrintf(
5933 "buffer offset and size goes beyond the capacity: "
5934 "offset=%zu, size=%zu, cap=%zu",
5935 offset, size, buffer->capacity()));
5936 return -EINVAL;
5937 }
5938 buffer->setRange(offset, size);
5939 status_t err = OK;
5940 err = setInputBufferParams(timeUs, flags);
5941 if (err != OK) {
5942 return -EINVAL;
5943 }
5944
5945 int32_t usedMaxInputSize = mApiUsageMetrics.inputBufferSize.usedMax;
5946 mApiUsageMetrics.inputBufferSize.usedMax = size > usedMaxInputSize ? size : usedMaxInputSize;
5947
5948 if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
5949 AString *errorDetailMsg;
5950 CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
5951 // Notify mCrypto of video resolution changes
5952 if (mTunneled && mCrypto != NULL) {
5953 int32_t width, height;
5954 if (mInputFormat->findInt32("width", &width) &&
5955 mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
5956 if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
5957 mTunneledInputWidth = width;
5958 mTunneledInputHeight = height;
5959 mCrypto->notifyResolution(width, height);
5960 }
5961 }
5962 }
5963 if (mCryptoAsync) {
5964 // prepare a message and enqueue
5965 sp<AMessage> cryptoInfo = new AMessage();
5966 buildCryptoInfoAMessage(cryptoInfo, CryptoAsync::kActionDecrypt);
5967 mCryptoAsync->decrypt(cryptoInfo);
5968 } else {
5969 err = mBufferChannel->queueSecureInputBuffer(
5970 buffer,
5971 (mFlags & kFlagIsSecure),
5972 key,
5973 iv,
5974 mode,
5975 pattern,
5976 subSamples,
5977 numSubSamples,
5978 errorDetailMsg);
5979 }
5980 if (err != OK) {
5981 mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
5982 ALOGW("Log queueSecureInputBuffer error: %d", err);
5983 }
5984 } else {
5985 err = mBufferChannel->queueInputBuffer(buffer);
5986 if (err != OK) {
5987 mediametrics_setInt32(mMetricsHandle, kCodecQueueInputBufferError, err);
5988 ALOGW("Log queueInputBuffer error: %d", err);
5989 }
5990 }
5991
5992 if (err == OK) {
5993 if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
5994 mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
5995 }
5996
5997 // synchronization boundary for getBufferAndFormat
5998 Mutex::Autolock al(mBufferLock);
5999 info->mOwnedByClient = false;
6000 info->mData.clear();
6001
6002 statsBufferSent(timeUs, buffer);
6003 }
6004
6005 return err;
6006 }
6007
handleLeftover(size_t index)6008 status_t MediaCodec::handleLeftover(size_t index) {
6009 if (mLeftover.empty()) {
6010 return OK;
6011 }
6012 sp<AMessage> msg = mLeftover.front();
6013 mLeftover.pop_front();
6014 msg->setSize("index", index);
6015 return onQueueInputBuffer(msg);
6016 }
6017
6018 template<typename T>
CreateFramesRenderedMessageInternal(const std::list<T> & done,sp<AMessage> & msg)6019 static size_t CreateFramesRenderedMessageInternal(const std::list<T> &done, sp<AMessage> &msg) {
6020 size_t index = 0;
6021 for (typename std::list<T>::const_iterator it = done.cbegin(); it != done.cend(); ++it) {
6022 if (it->getRenderTimeNs() < 0) {
6023 continue; // dropped frame from tracking
6024 }
6025 msg->setInt64(base::StringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
6026 msg->setInt64(base::StringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
6027 ++index;
6028 }
6029 return index;
6030 }
6031
6032 //static
CreateFramesRenderedMessage(const std::list<RenderedFrameInfo> & done,sp<AMessage> & msg)6033 size_t MediaCodec::CreateFramesRenderedMessage(
6034 const std::list<RenderedFrameInfo> &done, sp<AMessage> &msg) {
6035 return CreateFramesRenderedMessageInternal(done, msg);
6036 }
6037
6038 //static
CreateFramesRenderedMessage(const std::list<FrameRenderTracker::Info> & done,sp<AMessage> & msg)6039 size_t MediaCodec::CreateFramesRenderedMessage(
6040 const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
6041 return CreateFramesRenderedMessageInternal(done, msg);
6042 }
6043
onReleaseOutputBuffer(const sp<AMessage> & msg)6044 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
6045 size_t index;
6046 CHECK(msg->findSize("index", &index));
6047
6048 int32_t render;
6049 if (!msg->findInt32("render", &render)) {
6050 render = 0;
6051 }
6052
6053 if (!isExecuting()) {
6054 mErrorLog.log(LOG_TAG, base::StringPrintf(
6055 "releaseOutputBuffer() is valid at Executing states; currently %s",
6056 apiStateString().c_str()));
6057 return -EINVAL;
6058 }
6059
6060 if (index >= mPortBuffers[kPortIndexOutput].size()) {
6061 mErrorLog.log(LOG_TAG, base::StringPrintf(
6062 "index out of range (index=%zu)", mPortBuffers[kPortIndexOutput].size()));
6063 return -ERANGE;
6064 }
6065
6066 BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
6067
6068 if (!info->mOwnedByClient) {
6069 mErrorLog.log(LOG_TAG, base::StringPrintf(
6070 "client does not own the buffer #%zu", index));
6071 return -EACCES;
6072 }
6073 if (info->mData == nullptr) {
6074 mErrorLog.log(LOG_TAG, base::StringPrintf(
6075 "Fatal error: null buffer for index %zu", index));
6076 return -EACCES;
6077 }
6078
6079 // synchronization boundary for getBufferAndFormat
6080 sp<MediaCodecBuffer> buffer;
6081 {
6082 Mutex::Autolock al(mBufferLock);
6083 info->mOwnedByClient = false;
6084 buffer = info->mData;
6085 info->mData.clear();
6086 }
6087
6088 if (render && buffer->size() != 0) {
6089 int64_t mediaTimeUs = INT64_MIN;
6090 buffer->meta()->findInt64("timeUs", &mediaTimeUs);
6091
6092 bool noRenderTime = false;
6093 int64_t renderTimeNs = 0;
6094 if (!msg->findInt64("timestampNs", &renderTimeNs)) {
6095 // use media timestamp if client did not request a specific render timestamp
6096 ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
6097 renderTimeNs = mediaTimeUs * 1000;
6098 noRenderTime = true;
6099 }
6100
6101 if (mSoftRenderer != NULL) {
6102 std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
6103 buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
6104 mPortBuffers[kPortIndexOutput].size(), buffer->format());
6105
6106 // if we are running, notify rendered frames
6107 if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
6108 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
6109 sp<AMessage> data = new AMessage;
6110 if (CreateFramesRenderedMessage(doneFrames, data)) {
6111 notify->setMessage("data", data);
6112 notify->post();
6113 }
6114 }
6115 }
6116
6117 // If rendering to the screen, then schedule a time in the future to poll to see if this
6118 // frame was ever rendered to seed onFrameRendered callbacks.
6119 if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
6120 if (mediaTimeUs != INT64_MIN) {
6121 noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
6122 : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
6123 renderTimeNs);
6124 }
6125 // can't initialize this in the constructor because the Looper parent class needs to be
6126 // initialized first
6127 if (mMsgPollForRenderedBuffers == nullptr) {
6128 mMsgPollForRenderedBuffers = new AMessage(kWhatPollForRenderedBuffers, this);
6129 }
6130 // Schedule the poll to occur 100ms after the render time - should be safe for
6131 // determining if the frame was ever rendered. If no render time was specified, the
6132 // presentation timestamp is used instead, which almost certainly occurs in the past,
6133 // since it's almost always a zero-based offset from the start of the stream. In these
6134 // scenarios, we expect the frame to be rendered with no delay.
6135 int64_t nowUs = ALooper::GetNowUs();
6136 int64_t renderTimeUs = renderTimeNs / 1000;
6137 int64_t delayUs = renderTimeUs < nowUs ? 0 : renderTimeUs - nowUs;
6138 delayUs += 100 * 1000; /* 100ms in microseconds */
6139 status_t err =
6140 mMsgPollForRenderedBuffers->postUnique(/* token= */ mMsgPollForRenderedBuffers,
6141 delayUs);
6142 if (err != OK) {
6143 ALOGE("unexpected failure to post pollForRenderedBuffers: %d", err);
6144 }
6145 }
6146 status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
6147
6148 if (err == NO_INIT) {
6149 mErrorLog.log(LOG_TAG, "rendering to non-initialized(obsolete) surface");
6150 return err;
6151 }
6152 if (err != OK) {
6153 ALOGI("rendring output error %d", err);
6154 }
6155 } else {
6156 if (mIsSurfaceToDisplay && buffer->size() != 0) {
6157 int64_t mediaTimeUs = INT64_MIN;
6158 if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
6159 mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
6160 }
6161 }
6162 mBufferChannel->discardBuffer(buffer);
6163 }
6164
6165 return OK;
6166 }
6167
peekNextPortBuffer(int32_t portIndex)6168 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
6169 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6170
6171 std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
6172
6173 if (availBuffers->empty()) {
6174 return nullptr;
6175 }
6176
6177 return &mPortBuffers[portIndex][*availBuffers->begin()];
6178 }
6179
dequeuePortBuffer(int32_t portIndex)6180 ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
6181 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
6182
6183 BufferInfo *info = peekNextPortBuffer(portIndex);
6184 if (!info) {
6185 return -EAGAIN;
6186 }
6187
6188 std::list<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
6189 size_t index = *availBuffers->begin();
6190 CHECK_EQ(info, &mPortBuffers[portIndex][index]);
6191 availBuffers->erase(availBuffers->begin());
6192
6193 CHECK(!info->mOwnedByClient);
6194 {
6195 Mutex::Autolock al(mBufferLock);
6196 info->mOwnedByClient = true;
6197
6198 // set image-data
6199 if (info->mData->format() != NULL) {
6200 sp<ABuffer> imageData;
6201 if (info->mData->format()->findBuffer("image-data", &imageData)) {
6202 info->mData->meta()->setBuffer("image-data", imageData);
6203 }
6204 int32_t left, top, right, bottom;
6205 if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
6206 info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
6207 }
6208 }
6209 }
6210
6211 return index;
6212 }
6213
connectToSurface(const sp<Surface> & surface)6214 status_t MediaCodec::connectToSurface(const sp<Surface> &surface) {
6215 status_t err = OK;
6216 if (surface != NULL) {
6217 uint64_t oldId, newId;
6218 if (mSurface != NULL
6219 && surface->getUniqueId(&newId) == NO_ERROR
6220 && mSurface->getUniqueId(&oldId) == NO_ERROR
6221 && newId == oldId) {
6222 ALOGI("[%s] connecting to the same surface. Nothing to do.", mComponentName.c_str());
6223 return ALREADY_EXISTS;
6224 }
6225
6226 // in case we don't connect, ensure that we don't signal the surface is
6227 // connected to the screen
6228 mIsSurfaceToDisplay = false;
6229
6230 err = nativeWindowConnect(surface.get(), "connectToSurface");
6231 if (err == OK) {
6232 // Require a fresh set of buffers after each connect by using a unique generation
6233 // number. Rely on the fact that max supported process id by Linux is 2^22.
6234 // PID is never 0 so we don't have to worry that we use the default generation of 0.
6235 // TODO: come up with a unique scheme if other producers also set the generation number.
6236 static uint32_t mSurfaceGeneration = 0;
6237 uint32_t generation = (getpid() << 10) | (++mSurfaceGeneration & ((1 << 10) - 1));
6238 surface->setGenerationNumber(generation);
6239 ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), generation);
6240
6241 // HACK: clear any free buffers. Remove when connect will automatically do this.
6242 // This is needed as the consumer may be holding onto stale frames that it can reattach
6243 // to this surface after disconnect/connect, and those free frames would inherit the new
6244 // generation number. Disconnecting after setting a unique generation prevents this.
6245 nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
6246 err = nativeWindowConnect(surface.get(), "connectToSurface(reconnect)");
6247 }
6248
6249 if (err != OK) {
6250 ALOGE("nativeWindowConnect returned an error: %s (%d)", strerror(-err), err);
6251 } else {
6252 if (!mAllowFrameDroppingBySurface) {
6253 disableLegacyBufferDropPostQ(surface);
6254 }
6255 // keep track whether or not the buffers of the connected surface go to the screen
6256 int result = 0;
6257 surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
6258 mIsSurfaceToDisplay = result != 0;
6259 }
6260 }
6261 // do not return ALREADY_EXISTS unless surfaces are the same
6262 return err == ALREADY_EXISTS ? BAD_VALUE : err;
6263 }
6264
disconnectFromSurface()6265 status_t MediaCodec::disconnectFromSurface() {
6266 status_t err = OK;
6267 if (mSurface != NULL) {
6268 // Resetting generation is not technically needed, but there is no need to keep it either
6269 mSurface->setGenerationNumber(0);
6270 err = nativeWindowDisconnect(mSurface.get(), "disconnectFromSurface");
6271 if (err != OK) {
6272 ALOGW("nativeWindowDisconnect returned an error: %s (%d)", strerror(-err), err);
6273 }
6274 // assume disconnected even on error
6275 mSurface.clear();
6276 mIsSurfaceToDisplay = false;
6277 }
6278 return err;
6279 }
6280
handleSetSurface(const sp<Surface> & surface)6281 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
6282 status_t err = OK;
6283 if (mSurface != NULL) {
6284 (void)disconnectFromSurface();
6285 }
6286 if (surface != NULL) {
6287 err = connectToSurface(surface);
6288 if (err == OK) {
6289 mSurface = surface;
6290 }
6291 }
6292 return err;
6293 }
6294
onInputBufferAvailable()6295 void MediaCodec::onInputBufferAvailable() {
6296 int32_t index;
6297 while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
6298 sp<AMessage> msg = mCallback->dup();
6299 msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
6300 msg->setInt32("index", index);
6301 msg->post();
6302 }
6303 }
6304
onOutputBufferAvailable()6305 void MediaCodec::onOutputBufferAvailable() {
6306 int32_t index;
6307 while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
6308 if (discardDecodeOnlyOutputBuffer(index)) {
6309 continue;
6310 }
6311 const sp<MediaCodecBuffer> &buffer =
6312 mPortBuffers[kPortIndexOutput][index].mData;
6313 sp<AMessage> msg = mCallback->dup();
6314 msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
6315 msg->setInt32("index", index);
6316 msg->setSize("offset", buffer->offset());
6317 msg->setSize("size", buffer->size());
6318
6319 int64_t timeUs;
6320 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
6321
6322 msg->setInt64("timeUs", timeUs);
6323
6324 int32_t flags;
6325 CHECK(buffer->meta()->findInt32("flags", &flags));
6326
6327 msg->setInt32("flags", flags);
6328
6329 statsBufferReceived(timeUs, buffer);
6330
6331 msg->post();
6332 }
6333 }
onCryptoError(const sp<AMessage> & msg)6334 void MediaCodec::onCryptoError(const sp<AMessage> & msg) {
6335 if (mCallback != NULL) {
6336 sp<AMessage> cb_msg = mCallback->dup();
6337 cb_msg->setInt32("callbackID", CB_CRYPTO_ERROR);
6338 cb_msg->extend(msg);
6339 cb_msg->post();
6340 }
6341 }
onError(status_t err,int32_t actionCode,const char * detail)6342 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
6343 if (mCallback != NULL) {
6344 sp<AMessage> msg = mCallback->dup();
6345 msg->setInt32("callbackID", CB_ERROR);
6346 msg->setInt32("err", err);
6347 msg->setInt32("actionCode", actionCode);
6348
6349 if (detail != NULL) {
6350 msg->setString("detail", detail);
6351 }
6352
6353 msg->post();
6354 }
6355 }
6356
onOutputFormatChanged()6357 void MediaCodec::onOutputFormatChanged() {
6358 if (mCallback != NULL) {
6359 sp<AMessage> msg = mCallback->dup();
6360 msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
6361 msg->setMessage("format", mOutputFormat);
6362 msg->post();
6363 }
6364 }
6365
postActivityNotificationIfPossible()6366 void MediaCodec::postActivityNotificationIfPossible() {
6367 if (mActivityNotify == NULL) {
6368 return;
6369 }
6370
6371 bool isErrorOrOutputChanged =
6372 (mFlags & (kFlagStickyError
6373 | kFlagOutputBuffersChanged
6374 | kFlagOutputFormatChanged));
6375
6376 if (isErrorOrOutputChanged
6377 || !mAvailPortBuffers[kPortIndexInput].empty()
6378 || !mAvailPortBuffers[kPortIndexOutput].empty()) {
6379 mActivityNotify->setInt32("input-buffers",
6380 mAvailPortBuffers[kPortIndexInput].size());
6381
6382 if (isErrorOrOutputChanged) {
6383 // we want consumer to dequeue as many times as it can
6384 mActivityNotify->setInt32("output-buffers", INT32_MAX);
6385 } else {
6386 mActivityNotify->setInt32("output-buffers",
6387 mAvailPortBuffers[kPortIndexOutput].size());
6388 }
6389 mActivityNotify->post();
6390 mActivityNotify.clear();
6391 }
6392 }
6393
setParameters(const sp<AMessage> & params)6394 status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) {
6395 sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
6396 msg->setMessage("params", params);
6397
6398 sp<AMessage> response;
6399 return PostAndAwaitResponse(msg, &response);
6400 }
6401
onSetParameters(const sp<AMessage> & params)6402 status_t MediaCodec::onSetParameters(const sp<AMessage> ¶ms) {
6403 if (mState == UNINITIALIZED || mState == INITIALIZING) {
6404 return NO_INIT;
6405 }
6406 updateLowLatency(params);
6407 mapFormat(mComponentName, params, nullptr, false);
6408 updateTunnelPeek(params);
6409 mCodec->signalSetParameters(params);
6410
6411 return OK;
6412 }
6413
amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> & buffer)6414 status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
6415 const sp<MediaCodecBuffer> &buffer) {
6416 AString mime;
6417 CHECK(mOutputFormat->findString("mime", &mime));
6418
6419 if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
6420 // Codec specific data should be SPS and PPS in a single buffer,
6421 // each prefixed by a startcode (0x00 0x00 0x00 0x01).
6422 // We separate the two and put them into the output format
6423 // under the keys "csd-0" and "csd-1".
6424
6425 unsigned csdIndex = 0;
6426
6427 const uint8_t *data = buffer->data();
6428 size_t size = buffer->size();
6429
6430 const uint8_t *nalStart;
6431 size_t nalSize;
6432 while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
6433 sp<ABuffer> csd = new ABuffer(nalSize + 4);
6434 memcpy(csd->data(), "\x00\x00\x00\x01", 4);
6435 memcpy(csd->data() + 4, nalStart, nalSize);
6436
6437 mOutputFormat->setBuffer(
6438 base::StringPrintf("csd-%u", csdIndex).c_str(), csd);
6439
6440 ++csdIndex;
6441 }
6442
6443 if (csdIndex != 2) {
6444 mErrorLog.log(LOG_TAG, base::StringPrintf(
6445 "codec config data contains %u NAL units; expected 2.", csdIndex));
6446 return ERROR_MALFORMED;
6447 }
6448 } else {
6449 // For everything else we just stash the codec specific data into
6450 // the output format as a single piece of csd under "csd-0".
6451 sp<ABuffer> csd = new ABuffer(buffer->size());
6452 memcpy(csd->data(), buffer->data(), buffer->size());
6453 csd->setRange(0, buffer->size());
6454 mOutputFormat->setBuffer("csd-0", csd);
6455 }
6456
6457 return OK;
6458 }
6459
postPendingRepliesAndDeferredMessages(std::string origin,status_t err)6460 void MediaCodec::postPendingRepliesAndDeferredMessages(
6461 std::string origin, status_t err /* = OK */) {
6462 sp<AMessage> response{new AMessage};
6463 if (err != OK) {
6464 response->setInt32("err", err);
6465 }
6466 postPendingRepliesAndDeferredMessages(origin, response);
6467 }
6468
postPendingRepliesAndDeferredMessages(std::string origin,const sp<AMessage> & response)6469 void MediaCodec::postPendingRepliesAndDeferredMessages(
6470 std::string origin, const sp<AMessage> &response) {
6471 LOG_ALWAYS_FATAL_IF(
6472 !mReplyID,
6473 "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
6474 origin.c_str(),
6475 mLastReplyOrigin.c_str());
6476 mLastReplyOrigin = origin;
6477 response->postReply(mReplyID);
6478 mReplyID.clear();
6479 ALOGV_IF(!mDeferredMessages.empty(),
6480 "posting %zu deferred messages", mDeferredMessages.size());
6481 for (sp<AMessage> msg : mDeferredMessages) {
6482 msg->post();
6483 }
6484 mDeferredMessages.clear();
6485 }
6486
apiStateString()6487 std::string MediaCodec::apiStateString() {
6488 const char *rval = NULL;
6489 char rawbuffer[16]; // room for "%d"
6490
6491 switch (mState) {
6492 case UNINITIALIZED:
6493 rval = (mFlags & kFlagStickyError) ? "at Error state" : "at Released state";
6494 break;
6495 case INITIALIZING: rval = "while constructing"; break;
6496 case INITIALIZED: rval = "at Uninitialized state"; break;
6497 case CONFIGURING: rval = "during configure()"; break;
6498 case CONFIGURED: rval = "at Configured state"; break;
6499 case STARTING: rval = "during start()"; break;
6500 case STARTED: rval = "at Running state"; break;
6501 case FLUSHING: rval = "during flush()"; break;
6502 case FLUSHED: rval = "at Flushed state"; break;
6503 case STOPPING: rval = "during stop()"; break;
6504 case RELEASING: rval = "during release()"; break;
6505 default:
6506 snprintf(rawbuffer, sizeof(rawbuffer), "at %d", mState);
6507 rval = rawbuffer;
6508 break;
6509 }
6510 return rval;
6511 }
6512
stateString(State state)6513 std::string MediaCodec::stateString(State state) {
6514 const char *rval = NULL;
6515 char rawbuffer[16]; // room for "%d"
6516
6517 switch (state) {
6518 case UNINITIALIZED: rval = "UNINITIALIZED"; break;
6519 case INITIALIZING: rval = "INITIALIZING"; break;
6520 case INITIALIZED: rval = "INITIALIZED"; break;
6521 case CONFIGURING: rval = "CONFIGURING"; break;
6522 case CONFIGURED: rval = "CONFIGURED"; break;
6523 case STARTING: rval = "STARTING"; break;
6524 case STARTED: rval = "STARTED"; break;
6525 case FLUSHING: rval = "FLUSHING"; break;
6526 case FLUSHED: rval = "FLUSHED"; break;
6527 case STOPPING: rval = "STOPPING"; break;
6528 case RELEASING: rval = "RELEASING"; break;
6529 default:
6530 snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
6531 rval = rawbuffer;
6532 break;
6533 }
6534 return rval;
6535 }
6536
6537 // static
CanFetchLinearBlock(const std::vector<std::string> & names,bool * isCompatible)6538 status_t MediaCodec::CanFetchLinearBlock(
6539 const std::vector<std::string> &names, bool *isCompatible) {
6540 *isCompatible = false;
6541 if (names.size() == 0) {
6542 *isCompatible = true;
6543 return OK;
6544 }
6545 const CodecListCache &cache = GetCodecListCache();
6546 for (const std::string &name : names) {
6547 auto it = cache.mCodecInfoMap.find(name);
6548 if (it == cache.mCodecInfoMap.end()) {
6549 return NAME_NOT_FOUND;
6550 }
6551 const char *owner = it->second->getOwnerName();
6552 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
6553 *isCompatible = false;
6554 return OK;
6555 } else if (strncmp(owner, "codec2::", 8) != 0) {
6556 return NAME_NOT_FOUND;
6557 }
6558 }
6559 return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
6560 }
6561
6562 // static
FetchLinearBlock(size_t capacity,const std::vector<std::string> & names)6563 std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
6564 size_t capacity, const std::vector<std::string> &names) {
6565 return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
6566 }
6567
6568 // static
CanFetchGraphicBlock(const std::vector<std::string> & names,bool * isCompatible)6569 status_t MediaCodec::CanFetchGraphicBlock(
6570 const std::vector<std::string> &names, bool *isCompatible) {
6571 *isCompatible = false;
6572 if (names.size() == 0) {
6573 *isCompatible = true;
6574 return OK;
6575 }
6576 const CodecListCache &cache = GetCodecListCache();
6577 for (const std::string &name : names) {
6578 auto it = cache.mCodecInfoMap.find(name);
6579 if (it == cache.mCodecInfoMap.end()) {
6580 return NAME_NOT_FOUND;
6581 }
6582 const char *owner = it->second->getOwnerName();
6583 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
6584 *isCompatible = false;
6585 return OK;
6586 } else if (strncmp(owner, "codec2.", 7) != 0) {
6587 return NAME_NOT_FOUND;
6588 }
6589 }
6590 return CCodec::CanFetchGraphicBlock(names, isCompatible);
6591 }
6592
6593 // static
FetchGraphicBlock(int32_t width,int32_t height,int32_t format,uint64_t usage,const std::vector<std::string> & names)6594 std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
6595 int32_t width,
6596 int32_t height,
6597 int32_t format,
6598 uint64_t usage,
6599 const std::vector<std::string> &names) {
6600 return CCodec::FetchGraphicBlock(width, height, format, usage, names);
6601 }
6602
6603 } // namespace android
6604