1 /*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 #include <algorithm>
17 #include <string_view>
18 #include <type_traits>
19
20 #include <assert.h>
21 #include <ctype.h>
22 #include <fcntl.h>
23 #include <inttypes.h>
24 #include <getopt.h>
25 #include <signal.h>
26 #include <stdio.h>
27 #include <stdlib.h>
28 #include <string.h>
29 #include <sys/stat.h>
30 #include <sys/types.h>
31 #include <sys/wait.h>
32
33 #include <termios.h>
34 #include <unistd.h>
35
36 #define LOG_TAG "ScreenRecord"
37 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
38 //#define LOG_NDEBUG 0
39 #include <utils/Log.h>
40
41 #include <binder/IPCThreadState.h>
42 #include <utils/Errors.h>
43 #include <utils/SystemClock.h>
44 #include <utils/Timers.h>
45 #include <utils/Trace.h>
46
47 #include <gui/ISurfaceComposer.h>
48 #include <gui/Surface.h>
49 #include <gui/SurfaceComposerClient.h>
50 #include <gui/ISurfaceComposer.h>
51 #include <media/MediaCodecBuffer.h>
52 #include <media/NdkMediaCodec.h>
53 #include <media/NdkMediaFormatPriv.h>
54 #include <media/NdkMediaMuxer.h>
55 #include <media/openmax/OMX_IVCommon.h>
56 #include <media/stagefright/MediaCodec.h>
57 #include <media/stagefright/MediaCodecConstants.h>
58 #include <media/stagefright/MediaErrors.h>
59 #include <media/stagefright/PersistentSurface.h>
60 #include <media/stagefright/foundation/ABuffer.h>
61 #include <media/stagefright/foundation/AMessage.h>
62 #include <mediadrm/ICrypto.h>
63 #include <ui/DisplayMode.h>
64 #include <ui/DisplayState.h>
65
66 #include "screenrecord.h"
67 #include "Overlay.h"
68 #include "FrameOutput.h"
69
70 using android::ABuffer;
71 using android::ALooper;
72 using android::AMessage;
73 using android::AString;
74 using android::ui::DisplayMode;
75 using android::FrameOutput;
76 using android::IBinder;
77 using android::IGraphicBufferProducer;
78 using android::ISurfaceComposer;
79 using android::MediaCodec;
80 using android::MediaCodecBuffer;
81 using android::Overlay;
82 using android::PersistentSurface;
83 using android::PhysicalDisplayId;
84 using android::ProcessState;
85 using android::Rect;
86 using android::String8;
87 using android::SurfaceComposerClient;
88 using android::Vector;
89 using android::sp;
90 using android::status_t;
91
92 using android::INVALID_OPERATION;
93 using android::NAME_NOT_FOUND;
94 using android::NO_ERROR;
95 using android::UNKNOWN_ERROR;
96
97 namespace ui = android::ui;
98
99 static const uint32_t kMinBitRate = 100000; // 0.1Mbps
100 static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
101 static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
102 static const uint32_t kFallbackWidth = 1280; // 720p
103 static const uint32_t kFallbackHeight = 720;
104 static const char* kMimeTypeAvc = "video/avc";
105 static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
106
107 // Command-line parameters.
108 static bool gVerbose = false; // chatty on stdout
109 static bool gRotate = false; // rotate 90 degrees
110 static bool gMonotonicTime = false; // use system monotonic time for timestamps
111 static bool gPersistentSurface = false; // use persistent surface
112 static enum {
113 FORMAT_MP4, FORMAT_H264, FORMAT_WEBM, FORMAT_3GPP, FORMAT_FRAMES, FORMAT_RAW_FRAMES
114 } gOutputFormat = FORMAT_MP4; // data format for output
115 static AString gCodecName = ""; // codec name override
116 static bool gSizeSpecified = false; // was size explicitly requested?
117 static bool gWantInfoScreen = false; // do we want initial info screen?
118 static bool gWantFrameTime = false; // do we want times on each frame?
119 static uint32_t gVideoWidth = 0; // default width+height
120 static uint32_t gVideoHeight = 0;
121 static uint32_t gBitRate = 20000000; // 20Mbps
122 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
123 static uint32_t gBframes = 0;
124 static PhysicalDisplayId gPhysicalDisplayId;
125 // Set by signal handler to stop recording.
126 static volatile bool gStopRequested = false;
127
128 // Previous signal handler state, restored after first hit.
129 static struct sigaction gOrigSigactionINT;
130 static struct sigaction gOrigSigactionHUP;
131
132
133 /*
134 * Catch keyboard interrupt signals. On receipt, the "stop requested"
135 * flag is raised, and the original handler is restored (so that, if
136 * we get stuck finishing, a second Ctrl-C will kill the process).
137 */
signalCatcher(int signum)138 static void signalCatcher(int signum)
139 {
140 gStopRequested = true;
141 switch (signum) {
142 case SIGINT:
143 case SIGHUP:
144 sigaction(SIGINT, &gOrigSigactionINT, NULL);
145 sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
146 break;
147 default:
148 abort();
149 break;
150 }
151 }
152
153 /*
154 * Configures signal handlers. The previous handlers are saved.
155 *
156 * If the command is run from an interactive adb shell, we get SIGINT
157 * when Ctrl-C is hit. If we're run from the host, the local adb process
158 * gets the signal, and we get a SIGHUP when the terminal disconnects.
159 */
configureSignals()160 static status_t configureSignals() {
161 struct sigaction act;
162 memset(&act, 0, sizeof(act));
163 act.sa_handler = signalCatcher;
164 if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
165 status_t err = -errno;
166 fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
167 strerror(errno));
168 return err;
169 }
170 if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
171 status_t err = -errno;
172 fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
173 strerror(errno));
174 return err;
175 }
176 signal(SIGPIPE, SIG_IGN);
177 return NO_ERROR;
178 }
179
180 /*
181 * Configures and starts the MediaCodec encoder. Obtains an input surface
182 * from the codec.
183 */
prepareEncoder(float displayFps,sp<MediaCodec> * pCodec,sp<IGraphicBufferProducer> * pBufferProducer)184 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
185 sp<IGraphicBufferProducer>* pBufferProducer) {
186 status_t err;
187
188 if (gVerbose) {
189 printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
190 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
191 fflush(stdout);
192 }
193
194 sp<AMessage> format = new AMessage;
195 format->setInt32(KEY_WIDTH, gVideoWidth);
196 format->setInt32(KEY_HEIGHT, gVideoHeight);
197 format->setString(KEY_MIME, kMimeTypeAvc);
198 format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
199 format->setInt32(KEY_BIT_RATE, gBitRate);
200 format->setFloat(KEY_FRAME_RATE, displayFps);
201 format->setInt32(KEY_I_FRAME_INTERVAL, 10);
202 format->setInt32(KEY_MAX_B_FRAMES, gBframes);
203 if (gBframes > 0) {
204 format->setInt32(KEY_PROFILE, AVCProfileMain);
205 format->setInt32(KEY_LEVEL, AVCLevel41);
206 }
207
208 sp<android::ALooper> looper = new android::ALooper;
209 looper->setName("screenrecord_looper");
210 looper->start();
211 ALOGV("Creating codec");
212 sp<MediaCodec> codec;
213 if (gCodecName.empty()) {
214 codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
215 if (codec == NULL) {
216 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
217 kMimeTypeAvc);
218 return UNKNOWN_ERROR;
219 }
220 } else {
221 codec = MediaCodec::CreateByComponentName(looper, gCodecName);
222 if (codec == NULL) {
223 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
224 gCodecName.c_str());
225 return UNKNOWN_ERROR;
226 }
227 }
228
229 err = codec->configure(format, NULL, NULL,
230 MediaCodec::CONFIGURE_FLAG_ENCODE);
231 if (err != NO_ERROR) {
232 fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
233 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
234 codec->release();
235 return err;
236 }
237
238 ALOGV("Creating encoder input surface");
239 sp<IGraphicBufferProducer> bufferProducer;
240 if (gPersistentSurface) {
241 sp<PersistentSurface> surface = MediaCodec::CreatePersistentInputSurface();
242 bufferProducer = surface->getBufferProducer();
243 err = codec->setInputSurface(surface);
244 } else {
245 err = codec->createInputSurface(&bufferProducer);
246 }
247 if (err != NO_ERROR) {
248 fprintf(stderr,
249 "ERROR: unable to %s encoder input surface (err=%d)\n",
250 gPersistentSurface ? "set" : "create",
251 err);
252 codec->release();
253 return err;
254 }
255
256 ALOGV("Starting codec");
257 err = codec->start();
258 if (err != NO_ERROR) {
259 fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
260 codec->release();
261 return err;
262 }
263
264 ALOGV("Codec prepared");
265 *pCodec = codec;
266 *pBufferProducer = bufferProducer;
267 return 0;
268 }
269
270 /*
271 * Sets the display projection, based on the display dimensions, video size,
272 * and device orientation.
273 */
setDisplayProjection(SurfaceComposerClient::Transaction & t,const sp<IBinder> & dpy,const ui::DisplayState & displayState)274 static status_t setDisplayProjection(
275 SurfaceComposerClient::Transaction& t,
276 const sp<IBinder>& dpy,
277 const ui::DisplayState& displayState) {
278 // Set the region of the layer stack we're interested in, which in our case is "all of it".
279 Rect layerStackRect(displayState.layerStackSpaceRect);
280
281 // We need to preserve the aspect ratio of the display.
282 float displayAspect = layerStackRect.getHeight() / static_cast<float>(layerStackRect.getWidth());
283
284
285 // Set the way we map the output onto the display surface (which will
286 // be e.g. 1280x720 for a 720p video). The rect is interpreted
287 // post-rotation, so if the display is rotated 90 degrees we need to
288 // "pre-rotate" it by flipping width/height, so that the orientation
289 // adjustment changes it back.
290 //
291 // We might want to encode a portrait display as landscape to use more
292 // of the screen real estate. (If players respect a 90-degree rotation
293 // hint, we can essentially get a 720x1280 video instead of 1280x720.)
294 // In that case, we swap the configured video width/height and then
295 // supply a rotation value to the display projection.
296 uint32_t videoWidth, videoHeight;
297 uint32_t outWidth, outHeight;
298 if (!gRotate) {
299 videoWidth = gVideoWidth;
300 videoHeight = gVideoHeight;
301 } else {
302 videoWidth = gVideoHeight;
303 videoHeight = gVideoWidth;
304 }
305 if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
306 // limited by narrow width; reduce height
307 outWidth = videoWidth;
308 outHeight = (uint32_t)(videoWidth * displayAspect);
309 } else {
310 // limited by short height; restrict width
311 outHeight = videoHeight;
312 outWidth = (uint32_t)(videoHeight / displayAspect);
313 }
314 uint32_t offX, offY;
315 offX = (videoWidth - outWidth) / 2;
316 offY = (videoHeight - outHeight) / 2;
317 Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
318
319 if (gVerbose) {
320 if (gRotate) {
321 printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
322 outHeight, outWidth, offY, offX);
323 fflush(stdout);
324 } else {
325 printf("Content area is %ux%u at offset x=%d y=%d\n",
326 outWidth, outHeight, offX, offY);
327 fflush(stdout);
328 }
329 }
330
331 t.setDisplayProjection(dpy,
332 gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
333 layerStackRect, displayRect);
334 return NO_ERROR;
335 }
336
337 /*
338 * Configures the virtual display. When this completes, virtual display
339 * frames will start arriving from the buffer producer.
340 */
prepareVirtualDisplay(const ui::DisplayState & displayState,const sp<IGraphicBufferProducer> & bufferProducer,sp<IBinder> * pDisplayHandle)341 static status_t prepareVirtualDisplay(
342 const ui::DisplayState& displayState,
343 const sp<IGraphicBufferProducer>& bufferProducer,
344 sp<IBinder>* pDisplayHandle) {
345 sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
346 String8("ScreenRecorder"), false /*secure*/);
347 SurfaceComposerClient::Transaction t;
348 t.setDisplaySurface(dpy, bufferProducer);
349 setDisplayProjection(t, dpy, displayState);
350 t.setDisplayLayerStack(dpy, displayState.layerStack);
351 t.apply();
352
353 *pDisplayHandle = dpy;
354
355 return NO_ERROR;
356 }
357
358 /*
359 * Writes an unsigned/signed integer byte-by-byte in little endian order regardless
360 * of the platform endianness.
361 */
362 template <typename T>
writeValueLE(T value,uint8_t * buffer)363 static void writeValueLE(T value, uint8_t* buffer) {
364 std::remove_const_t<T> temp = value;
365 for (int i = 0; i < sizeof(T); ++i) {
366 buffer[i] = static_cast<std::uint8_t>(temp & 0xff);
367 temp >>= 8;
368 }
369 }
370
371 /*
372 * Saves frames presentation time relative to the elapsed realtime clock in microseconds
373 * preceded by a Winscope magic string and frame count to a metadata track.
374 * This metadata is used by the Winscope tool to sync video with SurfaceFlinger
375 * and WindowManager traces.
376 *
377 * The metadata is written as a binary array as follows:
378 * - winscope magic string (kWinscopeMagicString constant), without trailing null char,
379 * - the number of recorded frames (as little endian uint32),
380 * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
381 * (as little endian uint64).
382 */
writeWinscopeMetadataLegacy(const Vector<int64_t> & timestamps,const ssize_t metaTrackIdx,AMediaMuxer * muxer)383 static status_t writeWinscopeMetadataLegacy(const Vector<int64_t>& timestamps,
384 const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
385 static constexpr auto kWinscopeMagicStringLegacy = "#VV1NSC0PET1ME!#";
386
387 ALOGV("Writing winscope metadata legacy");
388 int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
389 - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
390 sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
391 + sizeof(uint32_t) + strlen(kWinscopeMagicStringLegacy));
392 uint8_t* pos = buffer->data();
393 strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicStringLegacy);
394 pos += strlen(kWinscopeMagicStringLegacy);
395 writeValueLE<uint32_t>(timestamps.size(), pos);
396 pos += sizeof(uint32_t);
397 for (size_t idx = 0; idx < timestamps.size(); ++idx) {
398 writeValueLE<uint64_t>(static_cast<uint64_t>(timestamps[idx]
399 + systemTimeToElapsedTimeOffsetMicros), pos);
400 pos += sizeof(uint64_t);
401 }
402 AMediaCodecBufferInfo bufferInfo = {
403 0 /* offset */,
404 static_cast<int32_t>(buffer->size()),
405 timestamps[0] /* presentationTimeUs */,
406 0 /* flags */
407 };
408 return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
409 }
410
411 /*
412 * Saves metadata needed by Winscope to synchronize the screen recording playback with other traces.
413 *
414 * The metadata (version 2) is written as a binary array with the following format:
415 * - winscope magic string (#VV1NSC0PET1ME2#, 16B).
416 * - the metadata version number (4B little endian).
417 * - Realtime-to-elapsed time offset in nanoseconds (8B little endian).
418 * - the recorded frames count (8B little endian)
419 * - for each recorded frame:
420 * - System time in elapsed clock timebase in nanoseconds (8B little endian).
421 *
422 *
423 * Metadata version 2 changes
424 *
425 * Use elapsed time for compatibility with other UI traces (most of them):
426 * - Realtime-to-elapsed time offset (instead of realtime-to-monotonic)
427 * - Frame timestamps in elapsed clock timebase (instead of monotonic)
428 */
writeWinscopeMetadata(const Vector<std::int64_t> & timestampsMonotonicUs,const ssize_t metaTrackIdx,AMediaMuxer * muxer)429 static status_t writeWinscopeMetadata(const Vector<std::int64_t>& timestampsMonotonicUs,
430 const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
431 ALOGV("Writing winscope metadata");
432
433 static constexpr auto kWinscopeMagicString = std::string_view {"#VV1NSC0PET1ME2#"};
434 static constexpr std::uint32_t metadataVersion = 2;
435
436 const auto elapsedTimeNs = android::elapsedRealtimeNano();
437 const std::int64_t elapsedToMonotonicTimeOffsetNs =
438 elapsedTimeNs - systemTime(SYSTEM_TIME_MONOTONIC);
439 const std::int64_t realToElapsedTimeOffsetNs =
440 systemTime(SYSTEM_TIME_REALTIME) - elapsedTimeNs;
441 const std::uint32_t framesCount = static_cast<std::uint32_t>(timestampsMonotonicUs.size());
442
443 sp<ABuffer> buffer = new ABuffer(
444 kWinscopeMagicString.size() +
445 sizeof(decltype(metadataVersion)) +
446 sizeof(decltype(realToElapsedTimeOffsetNs)) +
447 sizeof(decltype(framesCount)) +
448 framesCount * sizeof(std::uint64_t)
449 );
450 std::uint8_t* pos = buffer->data();
451
452 std::copy(kWinscopeMagicString.cbegin(), kWinscopeMagicString.cend(), pos);
453 pos += kWinscopeMagicString.size();
454
455 writeValueLE(metadataVersion, pos);
456 pos += sizeof(decltype(metadataVersion));
457
458 writeValueLE(realToElapsedTimeOffsetNs, pos);
459 pos += sizeof(decltype(realToElapsedTimeOffsetNs));
460
461 writeValueLE(framesCount, pos);
462 pos += sizeof(decltype(framesCount));
463
464 for (const auto timestampMonotonicUs : timestampsMonotonicUs) {
465 const auto timestampElapsedNs =
466 elapsedToMonotonicTimeOffsetNs + timestampMonotonicUs * 1000;
467 writeValueLE<std::uint64_t>(timestampElapsedNs, pos);
468 pos += sizeof(std::uint64_t);
469 }
470
471 AMediaCodecBufferInfo bufferInfo = {
472 0 /* offset */,
473 static_cast<std::int32_t>(buffer->size()),
474 timestampsMonotonicUs[0] /* presentationTimeUs */,
475 0 /* flags */
476 };
477 return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
478 }
479
480 /*
481 * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
482 * input frames are coming from the virtual display as fast as SurfaceFlinger
483 * wants to send them.
484 *
485 * Exactly one of muxer or rawFp must be non-null.
486 *
487 * The muxer must *not* have been started before calling.
488 */
runEncoder(const sp<MediaCodec> & encoder,AMediaMuxer * muxer,FILE * rawFp,const sp<IBinder> & display,const sp<IBinder> & virtualDpy,ui::Rotation orientation)489 static status_t runEncoder(const sp<MediaCodec>& encoder,
490 AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
491 const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
492 static int kTimeout = 250000; // be responsive on signal
493 status_t err;
494 ssize_t trackIdx = -1;
495 ssize_t metaLegacyTrackIdx = -1;
496 ssize_t metaTrackIdx = -1;
497 uint32_t debugNumFrames = 0;
498 int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
499 int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
500 Vector<int64_t> timestampsMonotonicUs;
501 bool firstFrame = true;
502
503 assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
504
505 Vector<sp<MediaCodecBuffer> > buffers;
506 err = encoder->getOutputBuffers(&buffers);
507 if (err != NO_ERROR) {
508 fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
509 return err;
510 }
511
512 // Run until we're signaled.
513 while (!gStopRequested) {
514 size_t bufIndex, offset, size;
515 int64_t ptsUsec;
516 uint32_t flags;
517
518 if (firstFrame) {
519 ATRACE_NAME("first_frame");
520 firstFrame = false;
521 }
522
523 if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
524 if (gVerbose) {
525 printf("Time limit reached\n");
526 fflush(stdout);
527 }
528 break;
529 }
530
531 ALOGV("Calling dequeueOutputBuffer");
532 err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
533 &flags, kTimeout);
534 ALOGV("dequeueOutputBuffer returned %d", err);
535 switch (err) {
536 case NO_ERROR:
537 // got a buffer
538 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
539 ALOGV("Got codec config buffer (%zu bytes)", size);
540 if (muxer != NULL) {
541 // ignore this -- we passed the CSD into MediaMuxer when
542 // we got the format change notification
543 size = 0;
544 }
545 }
546 if (size != 0) {
547 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
548 bufIndex, size, ptsUsec);
549
550 { // scope
551 ATRACE_NAME("orientation");
552 // Check orientation, update if it has changed.
553 //
554 // Polling for changes is inefficient and wrong, but the
555 // useful stuff is hard to get at without a Dalvik VM.
556 ui::DisplayState displayState;
557 err = SurfaceComposerClient::getDisplayState(display, &displayState);
558 if (err != NO_ERROR) {
559 ALOGW("getDisplayState() failed: %d", err);
560 } else if (orientation != displayState.orientation) {
561 ALOGD("orientation changed, now %s", toCString(displayState.orientation));
562 SurfaceComposerClient::Transaction t;
563 setDisplayProjection(t, virtualDpy, displayState);
564 t.apply();
565 orientation = displayState.orientation;
566 }
567 }
568
569 // If the virtual display isn't providing us with timestamps,
570 // use the current time. This isn't great -- we could get
571 // decoded data in clusters -- but we're not expecting
572 // to hit this anyway.
573 if (ptsUsec == 0) {
574 ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
575 }
576
577 if (muxer == NULL) {
578 fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
579 // Flush the data immediately in case we're streaming.
580 // We don't want to do this if all we've written is
581 // the SPS/PPS data because mplayer gets confused.
582 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
583 fflush(rawFp);
584 }
585 } else {
586 // The MediaMuxer docs are unclear, but it appears that we
587 // need to pass either the full set of BufferInfo flags, or
588 // (flags & BUFFER_FLAG_SYNCFRAME).
589 //
590 // If this blocks for too long we could drop frames. We may
591 // want to queue these up and do them on a different thread.
592 ATRACE_NAME("write sample");
593 assert(trackIdx != -1);
594 // TODO
595 sp<ABuffer> buffer = new ABuffer(
596 buffers[bufIndex]->data(), buffers[bufIndex]->size());
597 AMediaCodecBufferInfo bufferInfo = {
598 0 /* offset */,
599 static_cast<int32_t>(buffer->size()),
600 ptsUsec /* presentationTimeUs */,
601 flags
602 };
603 err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
604 if (err != NO_ERROR) {
605 fprintf(stderr,
606 "Failed writing data to muxer (err=%d)\n", err);
607 return err;
608 }
609 if (gOutputFormat == FORMAT_MP4) {
610 timestampsMonotonicUs.add(ptsUsec);
611 }
612 }
613 debugNumFrames++;
614 }
615 err = encoder->releaseOutputBuffer(bufIndex);
616 if (err != NO_ERROR) {
617 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
618 err);
619 return err;
620 }
621 if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
622 // Not expecting EOS from SurfaceFlinger. Go with it.
623 ALOGI("Received end-of-stream");
624 gStopRequested = true;
625 }
626 break;
627 case -EAGAIN: // INFO_TRY_AGAIN_LATER
628 ALOGV("Got -EAGAIN, looping");
629 break;
630 case android::INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
631 {
632 // Format includes CSD, which we must provide to muxer.
633 ALOGV("Encoder format changed");
634 sp<AMessage> newFormat;
635 encoder->getOutputFormat(&newFormat);
636 // TODO remove when MediaCodec has been replaced with AMediaCodec
637 AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
638 if (muxer != NULL) {
639 trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
640 if (gOutputFormat == FORMAT_MP4) {
641 AMediaFormat *metaFormat = AMediaFormat_new();
642 AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
643 metaLegacyTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
644 metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
645 AMediaFormat_delete(metaFormat);
646 }
647 ALOGV("Starting muxer");
648 err = AMediaMuxer_start(muxer);
649 if (err != NO_ERROR) {
650 fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
651 return err;
652 }
653 }
654 }
655 break;
656 case android::INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
657 // Not expected for an encoder; handle it anyway.
658 ALOGV("Encoder buffers changed");
659 err = encoder->getOutputBuffers(&buffers);
660 if (err != NO_ERROR) {
661 fprintf(stderr,
662 "Unable to get new output buffers (err=%d)\n", err);
663 return err;
664 }
665 break;
666 case INVALID_OPERATION:
667 ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
668 return err;
669 default:
670 fprintf(stderr,
671 "Got weird result %d from dequeueOutputBuffer\n", err);
672 return err;
673 }
674 }
675
676 ALOGV("Encoder stopping (req=%d)", gStopRequested);
677 if (gVerbose) {
678 printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
679 debugNumFrames, nanoseconds_to_seconds(
680 systemTime(CLOCK_MONOTONIC) - startWhenNsec));
681 fflush(stdout);
682 }
683 if (metaLegacyTrackIdx >= 0 && metaTrackIdx >= 0 && !timestampsMonotonicUs.isEmpty()) {
684 err = writeWinscopeMetadataLegacy(timestampsMonotonicUs, metaLegacyTrackIdx, muxer);
685 if (err != NO_ERROR) {
686 fprintf(stderr, "Failed writing legacy winscope metadata to muxer (err=%d)\n", err);
687 return err;
688 }
689
690 err = writeWinscopeMetadata(timestampsMonotonicUs, metaTrackIdx, muxer);
691 if (err != NO_ERROR) {
692 fprintf(stderr, "Failed writing winscope metadata to muxer (err=%d)\n", err);
693 return err;
694 }
695 }
696 return NO_ERROR;
697 }
698
699 /*
700 * Raw H.264 byte stream output requested. Send the output to stdout
701 * if desired. If the output is a tty, reconfigure it to avoid the
702 * CRLF line termination that we see with "adb shell" commands.
703 */
prepareRawOutput(const char * fileName)704 static FILE* prepareRawOutput(const char* fileName) {
705 FILE* rawFp = NULL;
706
707 if (strcmp(fileName, "-") == 0) {
708 if (gVerbose) {
709 fprintf(stderr, "ERROR: verbose output and '-' not compatible");
710 return NULL;
711 }
712 rawFp = stdout;
713 } else {
714 rawFp = fopen(fileName, "w");
715 if (rawFp == NULL) {
716 fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
717 return NULL;
718 }
719 }
720
721 int fd = fileno(rawFp);
722 if (isatty(fd)) {
723 // best effort -- reconfigure tty for "raw"
724 ALOGD("raw video output to tty (fd=%d)", fd);
725 struct termios term;
726 if (tcgetattr(fd, &term) == 0) {
727 cfmakeraw(&term);
728 if (tcsetattr(fd, TCSANOW, &term) == 0) {
729 ALOGD("tty successfully configured for raw");
730 }
731 }
732 }
733
734 return rawFp;
735 }
736
floorToEven(uint32_t num)737 static inline uint32_t floorToEven(uint32_t num) {
738 return num & ~1;
739 }
740
741 /*
742 * Main "do work" start point.
743 *
744 * Configures codec, muxer, and virtual display, then starts moving bits
745 * around.
746 */
recordScreen(const char * fileName)747 static status_t recordScreen(const char* fileName) {
748 status_t err;
749
750 // Configure signal handler.
751 err = configureSignals();
752 if (err != NO_ERROR) return err;
753
754 // Start Binder thread pool. MediaCodec needs to be able to receive
755 // messages from mediaserver.
756 sp<ProcessState> self = ProcessState::self();
757 self->startThreadPool();
758
759 // Get main display parameters.
760 sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(
761 gPhysicalDisplayId);
762 if (display == nullptr) {
763 fprintf(stderr, "ERROR: no display\n");
764 return NAME_NOT_FOUND;
765 }
766
767 ui::DisplayState displayState;
768 err = SurfaceComposerClient::getDisplayState(display, &displayState);
769 if (err != NO_ERROR) {
770 fprintf(stderr, "ERROR: unable to get display state\n");
771 return err;
772 }
773
774 DisplayMode displayMode;
775 err = SurfaceComposerClient::getActiveDisplayMode(display, &displayMode);
776 if (err != NO_ERROR) {
777 fprintf(stderr, "ERROR: unable to get display config\n");
778 return err;
779 }
780
781 const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
782 if (gVerbose) {
783 printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
784 layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
785 displayMode.refreshRate, toCString(displayState.orientation),
786 displayState.layerStack.id);
787 fflush(stdout);
788 }
789
790 // Encoder can't take odd number as config
791 if (gVideoWidth == 0) {
792 gVideoWidth = floorToEven(layerStackSpaceRect.getWidth());
793 }
794 if (gVideoHeight == 0) {
795 gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
796 }
797
798 // Configure and start the encoder.
799 sp<MediaCodec> encoder;
800 sp<FrameOutput> frameOutput;
801 sp<IGraphicBufferProducer> encoderInputSurface;
802 if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
803 err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
804
805 if (err != NO_ERROR && !gSizeSpecified) {
806 // fallback is defined for landscape; swap if we're in portrait
807 bool needSwap = gVideoWidth < gVideoHeight;
808 uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
809 uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
810 if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
811 ALOGV("Retrying with 720p");
812 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
813 gVideoWidth, gVideoHeight, newWidth, newHeight);
814 gVideoWidth = newWidth;
815 gVideoHeight = newHeight;
816 err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
817 }
818 }
819 if (err != NO_ERROR) return err;
820
821 // From here on, we must explicitly release() the encoder before it goes
822 // out of scope, or we will get an assertion failure from stagefright
823 // later on in a different thread.
824 } else {
825 // We're not using an encoder at all. The "encoder input surface" we hand to
826 // SurfaceFlinger will just feed directly to us.
827 frameOutput = new FrameOutput();
828 err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
829 if (err != NO_ERROR) {
830 return err;
831 }
832 }
833
834 // Draw the "info" page by rendering a frame with GLES and sending
835 // it directly to the encoder.
836 // TODO: consider displaying this as a regular layer to avoid b/11697754
837 if (gWantInfoScreen) {
838 Overlay::drawInfoPage(encoderInputSurface);
839 }
840
841 // Configure optional overlay.
842 sp<IGraphicBufferProducer> bufferProducer;
843 sp<Overlay> overlay;
844 if (gWantFrameTime) {
845 // Send virtual display frames to an external texture.
846 overlay = new Overlay(gMonotonicTime);
847 err = overlay->start(encoderInputSurface, &bufferProducer);
848 if (err != NO_ERROR) {
849 if (encoder != NULL) encoder->release();
850 return err;
851 }
852 if (gVerbose) {
853 printf("Bugreport overlay created\n");
854 fflush(stdout);
855 }
856 } else {
857 // Use the encoder's input surface as the virtual display surface.
858 bufferProducer = encoderInputSurface;
859 }
860
861 // Configure virtual display.
862 sp<IBinder> dpy;
863 err = prepareVirtualDisplay(displayState, bufferProducer, &dpy);
864 if (err != NO_ERROR) {
865 if (encoder != NULL) encoder->release();
866 return err;
867 }
868
869 AMediaMuxer *muxer = nullptr;
870 FILE* rawFp = NULL;
871 switch (gOutputFormat) {
872 case FORMAT_MP4:
873 case FORMAT_WEBM:
874 case FORMAT_3GPP: {
875 // Configure muxer. We have to wait for the CSD blob from the encoder
876 // before we can start it.
877 err = unlink(fileName);
878 if (err != 0 && errno != ENOENT) {
879 fprintf(stderr, "ERROR: couldn't remove existing file\n");
880 abort();
881 }
882 int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
883 if (fd < 0) {
884 fprintf(stderr, "ERROR: couldn't open file\n");
885 abort();
886 }
887 if (gOutputFormat == FORMAT_MP4) {
888 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
889 } else if (gOutputFormat == FORMAT_WEBM) {
890 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_WEBM);
891 } else {
892 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP);
893 }
894 close(fd);
895 if (gRotate) {
896 AMediaMuxer_setOrientationHint(muxer, 90); // TODO: does this do anything?
897 }
898 break;
899 }
900 case FORMAT_H264:
901 case FORMAT_FRAMES:
902 case FORMAT_RAW_FRAMES: {
903 rawFp = prepareRawOutput(fileName);
904 if (rawFp == NULL) {
905 if (encoder != NULL) encoder->release();
906 return -1;
907 }
908 break;
909 }
910 default:
911 fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
912 abort();
913 }
914
915 if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
916 // TODO: if we want to make this a proper feature, we should output
917 // an outer header with version info. Right now we never change
918 // the frame size or format, so we could conceivably just send
919 // the current frame header once and then follow it with an
920 // unbroken stream of data.
921
922 // Make the EGL context current again. This gets unhooked if we're
923 // using "--bugreport" mode.
924 // TODO: figure out if we can eliminate this
925 frameOutput->prepareToCopy();
926
927 while (!gStopRequested) {
928 // Poll for frames, the same way we do for MediaCodec. We do
929 // all of the work on the main thread.
930 //
931 // Ideally we'd sleep indefinitely and wake when the
932 // stop was requested, but this will do for now. (It almost
933 // works because wait() wakes when a signal hits, but we
934 // need to handle the edge cases.)
935 bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
936 err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
937 if (err == ETIMEDOUT) {
938 err = NO_ERROR;
939 } else if (err != NO_ERROR) {
940 ALOGE("Got error %d from copyFrame()", err);
941 break;
942 }
943 }
944 } else {
945 // Main encoder loop.
946 err = runEncoder(encoder, muxer, rawFp, display, dpy, displayState.orientation);
947 if (err != NO_ERROR) {
948 fprintf(stderr, "Encoder failed (err=%d)\n", err);
949 // fall through to cleanup
950 }
951
952 if (gVerbose) {
953 printf("Stopping encoder and muxer\n");
954 fflush(stdout);
955 }
956 }
957
958 // Shut everything down, starting with the producer side.
959 encoderInputSurface = NULL;
960 SurfaceComposerClient::destroyDisplay(dpy);
961 if (overlay != NULL) overlay->stop();
962 if (encoder != NULL) encoder->stop();
963 if (muxer != NULL) {
964 // If we don't stop muxer explicitly, i.e. let the destructor run,
965 // it may hang (b/11050628).
966 err = AMediaMuxer_stop(muxer);
967 } else if (rawFp != stdout) {
968 fclose(rawFp);
969 }
970 if (encoder != NULL) encoder->release();
971
972 return err;
973 }
974
975 /*
976 * Sends a broadcast to the media scanner to tell it about the new video.
977 *
978 * This is optional, but nice to have.
979 */
notifyMediaScanner(const char * fileName)980 static status_t notifyMediaScanner(const char* fileName) {
981 // need to do allocations before the fork()
982 String8 fileUrl("file://");
983 fileUrl.append(fileName);
984
985 const char* kCommand = "/system/bin/am";
986 const char* const argv[] = {
987 kCommand,
988 "broadcast",
989 "-a",
990 "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
991 "-d",
992 fileUrl.string(),
993 NULL
994 };
995 if (gVerbose) {
996 printf("Executing:");
997 for (int i = 0; argv[i] != NULL; i++) {
998 printf(" %s", argv[i]);
999 }
1000 putchar('\n');
1001 fflush(stdout);
1002 }
1003
1004 pid_t pid = fork();
1005 if (pid < 0) {
1006 int err = errno;
1007 ALOGW("fork() failed: %s", strerror(err));
1008 return -err;
1009 } else if (pid > 0) {
1010 // parent; wait for the child, mostly to make the verbose-mode output
1011 // look right, but also to check for and log failures
1012 int status;
1013 pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
1014 if (actualPid != pid) {
1015 ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
1016 } else if (status != 0) {
1017 ALOGW("'am broadcast' exited with status=%d", status);
1018 } else {
1019 ALOGV("'am broadcast' exited successfully");
1020 }
1021 } else {
1022 if (!gVerbose) {
1023 // non-verbose, suppress 'am' output
1024 ALOGV("closing stdout/stderr in child");
1025 int fd = open("/dev/null", O_WRONLY);
1026 if (fd >= 0) {
1027 dup2(fd, STDOUT_FILENO);
1028 dup2(fd, STDERR_FILENO);
1029 close(fd);
1030 }
1031 }
1032 execv(kCommand, const_cast<char* const*>(argv));
1033 ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
1034 exit(1);
1035 }
1036 return NO_ERROR;
1037 }
1038
1039 /*
1040 * Parses a string of the form "1280x720".
1041 *
1042 * Returns true on success.
1043 */
parseWidthHeight(const char * widthHeight,uint32_t * pWidth,uint32_t * pHeight)1044 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
1045 uint32_t* pHeight) {
1046 long width, height;
1047 char* end;
1048
1049 // Must specify base 10, or "0x0" gets parsed differently.
1050 width = strtol(widthHeight, &end, 10);
1051 if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
1052 // invalid chars in width, or missing 'x', or missing height
1053 return false;
1054 }
1055 height = strtol(end + 1, &end, 10);
1056 if (*end != '\0') {
1057 // invalid chars in height
1058 return false;
1059 }
1060
1061 *pWidth = width;
1062 *pHeight = height;
1063 return true;
1064 }
1065
1066 /*
1067 * Accepts a string with a bare number ("4000000") or with a single-character
1068 * unit ("4m").
1069 *
1070 * Returns an error if parsing fails.
1071 */
parseValueWithUnit(const char * str,uint32_t * pValue)1072 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
1073 long value;
1074 char* endptr;
1075
1076 value = strtol(str, &endptr, 10);
1077 if (*endptr == '\0') {
1078 // bare number
1079 *pValue = value;
1080 return NO_ERROR;
1081 } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
1082 *pValue = value * 1000000; // check for overflow?
1083 return NO_ERROR;
1084 } else {
1085 fprintf(stderr, "Unrecognized value: %s\n", str);
1086 return UNKNOWN_ERROR;
1087 }
1088 }
1089
1090 /*
1091 * Dumps usage on stderr.
1092 */
usage()1093 static void usage() {
1094 fprintf(stderr,
1095 "Usage: screenrecord [options] <filename>\n"
1096 "\n"
1097 "Android screenrecord v%d.%d. Records the device's display to a .mp4 file.\n"
1098 "\n"
1099 "Options:\n"
1100 "--size WIDTHxHEIGHT\n"
1101 " Set the video size, e.g. \"1280x720\". Default is the device's main\n"
1102 " display resolution (if supported), 1280x720 if not. For best results,\n"
1103 " use a size supported by the AVC encoder.\n"
1104 "--bit-rate RATE\n"
1105 " Set the video bit rate, in bits per second. Value may be specified as\n"
1106 " bits or megabits, e.g. '4000000' is equivalent to '4M'. Default %dMbps.\n"
1107 "--bugreport\n"
1108 " Add additional information, such as a timestamp overlay, that is helpful\n"
1109 " in videos captured to illustrate bugs.\n"
1110 "--time-limit TIME\n"
1111 " Set the maximum recording time, in seconds. Default / maximum is %d.\n"
1112 "--display-id ID\n"
1113 " specify the physical display ID to record. Default is the primary display.\n"
1114 " see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
1115 "--verbose\n"
1116 " Display interesting information on stdout.\n"
1117 "--help\n"
1118 " Show this message.\n"
1119 "\n"
1120 "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
1121 "\n",
1122 kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
1123 );
1124 }
1125
1126 /*
1127 * Parses args and kicks things off.
1128 */
main(int argc,char * const argv[])1129 int main(int argc, char* const argv[]) {
1130 static const struct option longOptions[] = {
1131 { "help", no_argument, NULL, 'h' },
1132 { "verbose", no_argument, NULL, 'v' },
1133 { "size", required_argument, NULL, 's' },
1134 { "bit-rate", required_argument, NULL, 'b' },
1135 { "time-limit", required_argument, NULL, 't' },
1136 { "bugreport", no_argument, NULL, 'u' },
1137 // "unofficial" options
1138 { "show-device-info", no_argument, NULL, 'i' },
1139 { "show-frame-time", no_argument, NULL, 'f' },
1140 { "rotate", no_argument, NULL, 'r' },
1141 { "output-format", required_argument, NULL, 'o' },
1142 { "codec-name", required_argument, NULL, 'N' },
1143 { "monotonic-time", no_argument, NULL, 'm' },
1144 { "persistent-surface", no_argument, NULL, 'p' },
1145 { "bframes", required_argument, NULL, 'B' },
1146 { "display-id", required_argument, NULL, 'd' },
1147 { NULL, 0, NULL, 0 }
1148 };
1149
1150 std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
1151 if (!displayId) {
1152 fprintf(stderr, "Failed to get ID for internal display\n");
1153 return 1;
1154 }
1155
1156 gPhysicalDisplayId = *displayId;
1157
1158 while (true) {
1159 int optionIndex = 0;
1160 int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
1161 if (ic == -1) {
1162 break;
1163 }
1164
1165 switch (ic) {
1166 case 'h':
1167 usage();
1168 return 0;
1169 case 'v':
1170 gVerbose = true;
1171 break;
1172 case 's':
1173 if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
1174 fprintf(stderr, "Invalid size '%s', must be width x height\n",
1175 optarg);
1176 return 2;
1177 }
1178 if (gVideoWidth == 0 || gVideoHeight == 0) {
1179 fprintf(stderr,
1180 "Invalid size %ux%u, width and height may not be zero\n",
1181 gVideoWidth, gVideoHeight);
1182 return 2;
1183 }
1184 gSizeSpecified = true;
1185 break;
1186 case 'b':
1187 if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
1188 return 2;
1189 }
1190 if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
1191 fprintf(stderr,
1192 "Bit rate %dbps outside acceptable range [%d,%d]\n",
1193 gBitRate, kMinBitRate, kMaxBitRate);
1194 return 2;
1195 }
1196 break;
1197 case 't':
1198 gTimeLimitSec = atoi(optarg);
1199 if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
1200 fprintf(stderr,
1201 "Time limit %ds outside acceptable range [1,%d]\n",
1202 gTimeLimitSec, kMaxTimeLimitSec);
1203 return 2;
1204 }
1205 break;
1206 case 'u':
1207 gWantInfoScreen = true;
1208 gWantFrameTime = true;
1209 break;
1210 case 'i':
1211 gWantInfoScreen = true;
1212 break;
1213 case 'f':
1214 gWantFrameTime = true;
1215 break;
1216 case 'r':
1217 // experimental feature
1218 gRotate = true;
1219 break;
1220 case 'o':
1221 if (strcmp(optarg, "mp4") == 0) {
1222 gOutputFormat = FORMAT_MP4;
1223 } else if (strcmp(optarg, "h264") == 0) {
1224 gOutputFormat = FORMAT_H264;
1225 } else if (strcmp(optarg, "webm") == 0) {
1226 gOutputFormat = FORMAT_WEBM;
1227 } else if (strcmp(optarg, "3gpp") == 0) {
1228 gOutputFormat = FORMAT_3GPP;
1229 } else if (strcmp(optarg, "frames") == 0) {
1230 gOutputFormat = FORMAT_FRAMES;
1231 } else if (strcmp(optarg, "raw-frames") == 0) {
1232 gOutputFormat = FORMAT_RAW_FRAMES;
1233 } else {
1234 fprintf(stderr, "Unknown format '%s'\n", optarg);
1235 return 2;
1236 }
1237 break;
1238 case 'N':
1239 gCodecName = optarg;
1240 break;
1241 case 'm':
1242 gMonotonicTime = true;
1243 break;
1244 case 'p':
1245 gPersistentSurface = true;
1246 break;
1247 case 'B':
1248 if (parseValueWithUnit(optarg, &gBframes) != NO_ERROR) {
1249 return 2;
1250 }
1251 break;
1252 case 'd':
1253 if (const auto id = android::DisplayId::fromValue<PhysicalDisplayId>(atoll(optarg));
1254 id && SurfaceComposerClient::getPhysicalDisplayToken(*id)) {
1255 gPhysicalDisplayId = *id;
1256 break;
1257 }
1258
1259 fprintf(stderr, "Invalid physical display ID\n");
1260 return 2;
1261 default:
1262 if (ic != '?') {
1263 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
1264 }
1265 return 2;
1266 }
1267 }
1268
1269 if (optind != argc - 1) {
1270 fprintf(stderr, "Must specify output file (see --help).\n");
1271 return 2;
1272 }
1273
1274 const char* fileName = argv[optind];
1275 if (gOutputFormat == FORMAT_MP4) {
1276 // MediaMuxer tries to create the file in the constructor, but we don't
1277 // learn about the failure until muxer.start(), which returns a generic
1278 // error code without logging anything. We attempt to create the file
1279 // now for better diagnostics.
1280 int fd = open(fileName, O_CREAT | O_RDWR, 0644);
1281 if (fd < 0) {
1282 fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
1283 return 1;
1284 }
1285 close(fd);
1286 }
1287
1288 status_t err = recordScreen(fileName);
1289 if (err == NO_ERROR) {
1290 // Try to notify the media scanner. Not fatal if this fails.
1291 notifyMediaScanner(fileName);
1292 }
1293 ALOGD(err == NO_ERROR ? "success" : "failed");
1294 return (int) err;
1295 }
1296