1 /*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <assert.h>
18 #include <ctype.h>
19 #include <fcntl.h>
20 #include <inttypes.h>
21 #include <getopt.h>
22 #include <signal.h>
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 #include <sys/stat.h>
27 #include <sys/types.h>
28 #include <sys/wait.h>
29
30 #include <termios.h>
31 #include <unistd.h>
32
33 #define LOG_TAG "ScreenRecord"
34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
35 //#define LOG_NDEBUG 0
36 #include <utils/Log.h>
37
38 #include <binder/IPCThreadState.h>
39 #include <utils/Errors.h>
40 #include <utils/Timers.h>
41 #include <utils/Trace.h>
42
43 #include <gui/Surface.h>
44 #include <gui/SurfaceComposerClient.h>
45 #include <gui/ISurfaceComposer.h>
46 #include <ui/DisplayInfo.h>
47 #include <media/openmax/OMX_IVCommon.h>
48 #include <media/stagefright/foundation/ABuffer.h>
49 #include <media/stagefright/foundation/AMessage.h>
50 #include <media/stagefright/MediaCodec.h>
51 #include <media/stagefright/MediaErrors.h>
52 #include <media/stagefright/MediaMuxer.h>
53 #include <media/ICrypto.h>
54
55 #include "screenrecord.h"
56 #include "Overlay.h"
57 #include "FrameOutput.h"
58
59 using namespace android;
60
61 static const uint32_t kMinBitRate = 100000; // 0.1Mbps
62 static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
63 static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
64 static const uint32_t kFallbackWidth = 1280; // 720p
65 static const uint32_t kFallbackHeight = 720;
66 static const char* kMimeTypeAvc = "video/avc";
67
68 // Command-line parameters.
69 static bool gVerbose = false; // chatty on stdout
70 static bool gRotate = false; // rotate 90 degrees
71 static enum {
72 FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
73 } gOutputFormat = FORMAT_MP4; // data format for output
74 static bool gSizeSpecified = false; // was size explicitly requested?
75 static bool gWantInfoScreen = false; // do we want initial info screen?
76 static bool gWantFrameTime = false; // do we want times on each frame?
77 static uint32_t gVideoWidth = 0; // default width+height
78 static uint32_t gVideoHeight = 0;
79 static uint32_t gBitRate = 4000000; // 4Mbps
80 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
81
82 // Set by signal handler to stop recording.
83 static volatile bool gStopRequested = false;
84
85 // Previous signal handler state, restored after first hit.
86 static struct sigaction gOrigSigactionINT;
87 static struct sigaction gOrigSigactionHUP;
88
89
90 /*
91 * Catch keyboard interrupt signals. On receipt, the "stop requested"
92 * flag is raised, and the original handler is restored (so that, if
93 * we get stuck finishing, a second Ctrl-C will kill the process).
94 */
signalCatcher(int signum)95 static void signalCatcher(int signum)
96 {
97 gStopRequested = true;
98 switch (signum) {
99 case SIGINT:
100 case SIGHUP:
101 sigaction(SIGINT, &gOrigSigactionINT, NULL);
102 sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
103 break;
104 default:
105 abort();
106 break;
107 }
108 }
109
110 /*
111 * Configures signal handlers. The previous handlers are saved.
112 *
113 * If the command is run from an interactive adb shell, we get SIGINT
114 * when Ctrl-C is hit. If we're run from the host, the local adb process
115 * gets the signal, and we get a SIGHUP when the terminal disconnects.
116 */
configureSignals()117 static status_t configureSignals() {
118 struct sigaction act;
119 memset(&act, 0, sizeof(act));
120 act.sa_handler = signalCatcher;
121 if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
122 status_t err = -errno;
123 fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
124 strerror(errno));
125 return err;
126 }
127 if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
128 status_t err = -errno;
129 fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
130 strerror(errno));
131 return err;
132 }
133 return NO_ERROR;
134 }
135
136 /*
137 * Returns "true" if the device is rotated 90 degrees.
138 */
isDeviceRotated(int orientation)139 static bool isDeviceRotated(int orientation) {
140 return orientation != DISPLAY_ORIENTATION_0 &&
141 orientation != DISPLAY_ORIENTATION_180;
142 }
143
144 /*
145 * Configures and starts the MediaCodec encoder. Obtains an input surface
146 * from the codec.
147 */
prepareEncoder(float displayFps,sp<MediaCodec> * pCodec,sp<IGraphicBufferProducer> * pBufferProducer)148 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
149 sp<IGraphicBufferProducer>* pBufferProducer) {
150 status_t err;
151
152 if (gVerbose) {
153 printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
154 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
155 }
156
157 sp<AMessage> format = new AMessage;
158 format->setInt32("width", gVideoWidth);
159 format->setInt32("height", gVideoHeight);
160 format->setString("mime", kMimeTypeAvc);
161 format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
162 format->setInt32("bitrate", gBitRate);
163 format->setFloat("frame-rate", displayFps);
164 format->setInt32("i-frame-interval", 10);
165
166 sp<ALooper> looper = new ALooper;
167 looper->setName("screenrecord_looper");
168 looper->start();
169 ALOGV("Creating codec");
170 sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
171 if (codec == NULL) {
172 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
173 kMimeTypeAvc);
174 return UNKNOWN_ERROR;
175 }
176
177 err = codec->configure(format, NULL, NULL,
178 MediaCodec::CONFIGURE_FLAG_ENCODE);
179 if (err != NO_ERROR) {
180 fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
181 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
182 codec->release();
183 return err;
184 }
185
186 ALOGV("Creating encoder input surface");
187 sp<IGraphicBufferProducer> bufferProducer;
188 err = codec->createInputSurface(&bufferProducer);
189 if (err != NO_ERROR) {
190 fprintf(stderr,
191 "ERROR: unable to create encoder input surface (err=%d)\n", err);
192 codec->release();
193 return err;
194 }
195
196 ALOGV("Starting codec");
197 err = codec->start();
198 if (err != NO_ERROR) {
199 fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
200 codec->release();
201 return err;
202 }
203
204 ALOGV("Codec prepared");
205 *pCodec = codec;
206 *pBufferProducer = bufferProducer;
207 return 0;
208 }
209
210 /*
211 * Sets the display projection, based on the display dimensions, video size,
212 * and device orientation.
213 */
setDisplayProjection(const sp<IBinder> & dpy,const DisplayInfo & mainDpyInfo)214 static status_t setDisplayProjection(const sp<IBinder>& dpy,
215 const DisplayInfo& mainDpyInfo) {
216 status_t err;
217
218 // Set the region of the layer stack we're interested in, which in our
219 // case is "all of it". If the app is rotated (so that the width of the
220 // app is based on the height of the display), reverse width/height.
221 bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
222 uint32_t sourceWidth, sourceHeight;
223 if (!deviceRotated) {
224 sourceWidth = mainDpyInfo.w;
225 sourceHeight = mainDpyInfo.h;
226 } else {
227 ALOGV("using rotated width/height");
228 sourceHeight = mainDpyInfo.w;
229 sourceWidth = mainDpyInfo.h;
230 }
231 Rect layerStackRect(sourceWidth, sourceHeight);
232
233 // We need to preserve the aspect ratio of the display.
234 float displayAspect = (float) sourceHeight / (float) sourceWidth;
235
236
237 // Set the way we map the output onto the display surface (which will
238 // be e.g. 1280x720 for a 720p video). The rect is interpreted
239 // post-rotation, so if the display is rotated 90 degrees we need to
240 // "pre-rotate" it by flipping width/height, so that the orientation
241 // adjustment changes it back.
242 //
243 // We might want to encode a portrait display as landscape to use more
244 // of the screen real estate. (If players respect a 90-degree rotation
245 // hint, we can essentially get a 720x1280 video instead of 1280x720.)
246 // In that case, we swap the configured video width/height and then
247 // supply a rotation value to the display projection.
248 uint32_t videoWidth, videoHeight;
249 uint32_t outWidth, outHeight;
250 if (!gRotate) {
251 videoWidth = gVideoWidth;
252 videoHeight = gVideoHeight;
253 } else {
254 videoWidth = gVideoHeight;
255 videoHeight = gVideoWidth;
256 }
257 if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
258 // limited by narrow width; reduce height
259 outWidth = videoWidth;
260 outHeight = (uint32_t)(videoWidth * displayAspect);
261 } else {
262 // limited by short height; restrict width
263 outHeight = videoHeight;
264 outWidth = (uint32_t)(videoHeight / displayAspect);
265 }
266 uint32_t offX, offY;
267 offX = (videoWidth - outWidth) / 2;
268 offY = (videoHeight - outHeight) / 2;
269 Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
270
271 if (gVerbose) {
272 if (gRotate) {
273 printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
274 outHeight, outWidth, offY, offX);
275 } else {
276 printf("Content area is %ux%u at offset x=%d y=%d\n",
277 outWidth, outHeight, offX, offY);
278 }
279 }
280
281 SurfaceComposerClient::setDisplayProjection(dpy,
282 gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
283 layerStackRect, displayRect);
284 return NO_ERROR;
285 }
286
287 /*
288 * Configures the virtual display. When this completes, virtual display
289 * frames will start arriving from the buffer producer.
290 */
prepareVirtualDisplay(const DisplayInfo & mainDpyInfo,const sp<IGraphicBufferProducer> & bufferProducer,sp<IBinder> * pDisplayHandle)291 static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
292 const sp<IGraphicBufferProducer>& bufferProducer,
293 sp<IBinder>* pDisplayHandle) {
294 sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
295 String8("ScreenRecorder"), false /*secure*/);
296
297 SurfaceComposerClient::openGlobalTransaction();
298 SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
299 setDisplayProjection(dpy, mainDpyInfo);
300 SurfaceComposerClient::setDisplayLayerStack(dpy, 0); // default stack
301 SurfaceComposerClient::closeGlobalTransaction();
302
303 *pDisplayHandle = dpy;
304
305 return NO_ERROR;
306 }
307
308 /*
309 * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
310 * input frames are coming from the virtual display as fast as SurfaceFlinger
311 * wants to send them.
312 *
313 * Exactly one of muxer or rawFp must be non-null.
314 *
315 * The muxer must *not* have been started before calling.
316 */
runEncoder(const sp<MediaCodec> & encoder,const sp<MediaMuxer> & muxer,FILE * rawFp,const sp<IBinder> & mainDpy,const sp<IBinder> & virtualDpy,uint8_t orientation)317 static status_t runEncoder(const sp<MediaCodec>& encoder,
318 const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
319 const sp<IBinder>& virtualDpy, uint8_t orientation) {
320 static int kTimeout = 250000; // be responsive on signal
321 status_t err;
322 ssize_t trackIdx = -1;
323 uint32_t debugNumFrames = 0;
324 int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
325 int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
326 DisplayInfo mainDpyInfo;
327
328 assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
329
330 Vector<sp<ABuffer> > buffers;
331 err = encoder->getOutputBuffers(&buffers);
332 if (err != NO_ERROR) {
333 fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
334 return err;
335 }
336
337 // Run until we're signaled.
338 while (!gStopRequested) {
339 size_t bufIndex, offset, size;
340 int64_t ptsUsec;
341 uint32_t flags;
342
343 if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
344 if (gVerbose) {
345 printf("Time limit reached\n");
346 }
347 break;
348 }
349
350 ALOGV("Calling dequeueOutputBuffer");
351 err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
352 &flags, kTimeout);
353 ALOGV("dequeueOutputBuffer returned %d", err);
354 switch (err) {
355 case NO_ERROR:
356 // got a buffer
357 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
358 ALOGV("Got codec config buffer (%zu bytes)", size);
359 if (muxer != NULL) {
360 // ignore this -- we passed the CSD into MediaMuxer when
361 // we got the format change notification
362 size = 0;
363 }
364 }
365 if (size != 0) {
366 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
367 bufIndex, size, ptsUsec);
368
369 { // scope
370 ATRACE_NAME("orientation");
371 // Check orientation, update if it has changed.
372 //
373 // Polling for changes is inefficient and wrong, but the
374 // useful stuff is hard to get at without a Dalvik VM.
375 err = SurfaceComposerClient::getDisplayInfo(mainDpy,
376 &mainDpyInfo);
377 if (err != NO_ERROR) {
378 ALOGW("getDisplayInfo(main) failed: %d", err);
379 } else if (orientation != mainDpyInfo.orientation) {
380 ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
381 SurfaceComposerClient::openGlobalTransaction();
382 setDisplayProjection(virtualDpy, mainDpyInfo);
383 SurfaceComposerClient::closeGlobalTransaction();
384 orientation = mainDpyInfo.orientation;
385 }
386 }
387
388 // If the virtual display isn't providing us with timestamps,
389 // use the current time. This isn't great -- we could get
390 // decoded data in clusters -- but we're not expecting
391 // to hit this anyway.
392 if (ptsUsec == 0) {
393 ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
394 }
395
396 if (muxer == NULL) {
397 fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
398 // Flush the data immediately in case we're streaming.
399 // We don't want to do this if all we've written is
400 // the SPS/PPS data because mplayer gets confused.
401 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
402 fflush(rawFp);
403 }
404 } else {
405 // The MediaMuxer docs are unclear, but it appears that we
406 // need to pass either the full set of BufferInfo flags, or
407 // (flags & BUFFER_FLAG_SYNCFRAME).
408 //
409 // If this blocks for too long we could drop frames. We may
410 // want to queue these up and do them on a different thread.
411 ATRACE_NAME("write sample");
412 assert(trackIdx != -1);
413 err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
414 ptsUsec, flags);
415 if (err != NO_ERROR) {
416 fprintf(stderr,
417 "Failed writing data to muxer (err=%d)\n", err);
418 return err;
419 }
420 }
421 debugNumFrames++;
422 }
423 err = encoder->releaseOutputBuffer(bufIndex);
424 if (err != NO_ERROR) {
425 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
426 err);
427 return err;
428 }
429 if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
430 // Not expecting EOS from SurfaceFlinger. Go with it.
431 ALOGI("Received end-of-stream");
432 gStopRequested = true;
433 }
434 break;
435 case -EAGAIN: // INFO_TRY_AGAIN_LATER
436 ALOGV("Got -EAGAIN, looping");
437 break;
438 case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
439 {
440 // Format includes CSD, which we must provide to muxer.
441 ALOGV("Encoder format changed");
442 sp<AMessage> newFormat;
443 encoder->getOutputFormat(&newFormat);
444 if (muxer != NULL) {
445 trackIdx = muxer->addTrack(newFormat);
446 ALOGV("Starting muxer");
447 err = muxer->start();
448 if (err != NO_ERROR) {
449 fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
450 return err;
451 }
452 }
453 }
454 break;
455 case INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
456 // Not expected for an encoder; handle it anyway.
457 ALOGV("Encoder buffers changed");
458 err = encoder->getOutputBuffers(&buffers);
459 if (err != NO_ERROR) {
460 fprintf(stderr,
461 "Unable to get new output buffers (err=%d)\n", err);
462 return err;
463 }
464 break;
465 case INVALID_OPERATION:
466 ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
467 return err;
468 default:
469 fprintf(stderr,
470 "Got weird result %d from dequeueOutputBuffer\n", err);
471 return err;
472 }
473 }
474
475 ALOGV("Encoder stopping (req=%d)", gStopRequested);
476 if (gVerbose) {
477 printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
478 debugNumFrames, nanoseconds_to_seconds(
479 systemTime(CLOCK_MONOTONIC) - startWhenNsec));
480 }
481 return NO_ERROR;
482 }
483
484 /*
485 * Raw H.264 byte stream output requested. Send the output to stdout
486 * if desired. If the output is a tty, reconfigure it to avoid the
487 * CRLF line termination that we see with "adb shell" commands.
488 */
prepareRawOutput(const char * fileName)489 static FILE* prepareRawOutput(const char* fileName) {
490 FILE* rawFp = NULL;
491
492 if (strcmp(fileName, "-") == 0) {
493 if (gVerbose) {
494 fprintf(stderr, "ERROR: verbose output and '-' not compatible");
495 return NULL;
496 }
497 rawFp = stdout;
498 } else {
499 rawFp = fopen(fileName, "w");
500 if (rawFp == NULL) {
501 fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
502 return NULL;
503 }
504 }
505
506 int fd = fileno(rawFp);
507 if (isatty(fd)) {
508 // best effort -- reconfigure tty for "raw"
509 ALOGD("raw video output to tty (fd=%d)", fd);
510 struct termios term;
511 if (tcgetattr(fd, &term) == 0) {
512 cfmakeraw(&term);
513 if (tcsetattr(fd, TCSANOW, &term) == 0) {
514 ALOGD("tty successfully configured for raw");
515 }
516 }
517 }
518
519 return rawFp;
520 }
521
522 /*
523 * Main "do work" start point.
524 *
525 * Configures codec, muxer, and virtual display, then starts moving bits
526 * around.
527 */
recordScreen(const char * fileName)528 static status_t recordScreen(const char* fileName) {
529 status_t err;
530
531 // Configure signal handler.
532 err = configureSignals();
533 if (err != NO_ERROR) return err;
534
535 // Start Binder thread pool. MediaCodec needs to be able to receive
536 // messages from mediaserver.
537 sp<ProcessState> self = ProcessState::self();
538 self->startThreadPool();
539
540 // Get main display parameters.
541 sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
542 ISurfaceComposer::eDisplayIdMain);
543 DisplayInfo mainDpyInfo;
544 err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
545 if (err != NO_ERROR) {
546 fprintf(stderr, "ERROR: unable to get display characteristics\n");
547 return err;
548 }
549 if (gVerbose) {
550 printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
551 mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
552 mainDpyInfo.orientation);
553 }
554
555 bool rotated = isDeviceRotated(mainDpyInfo.orientation);
556 if (gVideoWidth == 0) {
557 gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
558 }
559 if (gVideoHeight == 0) {
560 gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
561 }
562
563 // Configure and start the encoder.
564 sp<MediaCodec> encoder;
565 sp<FrameOutput> frameOutput;
566 sp<IGraphicBufferProducer> encoderInputSurface;
567 if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
568 err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
569
570 if (err != NO_ERROR && !gSizeSpecified) {
571 // fallback is defined for landscape; swap if we're in portrait
572 bool needSwap = gVideoWidth < gVideoHeight;
573 uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
574 uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
575 if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
576 ALOGV("Retrying with 720p");
577 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
578 gVideoWidth, gVideoHeight, newWidth, newHeight);
579 gVideoWidth = newWidth;
580 gVideoHeight = newHeight;
581 err = prepareEncoder(mainDpyInfo.fps, &encoder,
582 &encoderInputSurface);
583 }
584 }
585 if (err != NO_ERROR) return err;
586
587 // From here on, we must explicitly release() the encoder before it goes
588 // out of scope, or we will get an assertion failure from stagefright
589 // later on in a different thread.
590 } else {
591 // We're not using an encoder at all. The "encoder input surface" we hand to
592 // SurfaceFlinger will just feed directly to us.
593 frameOutput = new FrameOutput();
594 err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
595 if (err != NO_ERROR) {
596 return err;
597 }
598 }
599
600 // Draw the "info" page by rendering a frame with GLES and sending
601 // it directly to the encoder.
602 // TODO: consider displaying this as a regular layer to avoid b/11697754
603 if (gWantInfoScreen) {
604 Overlay::drawInfoPage(encoderInputSurface);
605 }
606
607 // Configure optional overlay.
608 sp<IGraphicBufferProducer> bufferProducer;
609 sp<Overlay> overlay;
610 if (gWantFrameTime) {
611 // Send virtual display frames to an external texture.
612 overlay = new Overlay();
613 err = overlay->start(encoderInputSurface, &bufferProducer);
614 if (err != NO_ERROR) {
615 if (encoder != NULL) encoder->release();
616 return err;
617 }
618 if (gVerbose) {
619 printf("Bugreport overlay created\n");
620 }
621 } else {
622 // Use the encoder's input surface as the virtual display surface.
623 bufferProducer = encoderInputSurface;
624 }
625
626 // Configure virtual display.
627 sp<IBinder> dpy;
628 err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
629 if (err != NO_ERROR) {
630 if (encoder != NULL) encoder->release();
631 return err;
632 }
633
634 sp<MediaMuxer> muxer = NULL;
635 FILE* rawFp = NULL;
636 switch (gOutputFormat) {
637 case FORMAT_MP4: {
638 // Configure muxer. We have to wait for the CSD blob from the encoder
639 // before we can start it.
640 err = unlink(fileName);
641 if (err != 0 && errno != ENOENT) {
642 fprintf(stderr, "ERROR: couldn't remove existing file\n");
643 abort();
644 }
645 int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
646 if (fd < 0) {
647 fprintf(stderr, "ERROR: couldn't open file\n");
648 abort();
649 }
650 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
651 close(fd);
652 if (gRotate) {
653 muxer->setOrientationHint(90); // TODO: does this do anything?
654 }
655 break;
656 }
657 case FORMAT_H264:
658 case FORMAT_FRAMES:
659 case FORMAT_RAW_FRAMES: {
660 rawFp = prepareRawOutput(fileName);
661 if (rawFp == NULL) {
662 if (encoder != NULL) encoder->release();
663 return -1;
664 }
665 break;
666 }
667 default:
668 fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
669 abort();
670 }
671
672 if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
673 // TODO: if we want to make this a proper feature, we should output
674 // an outer header with version info. Right now we never change
675 // the frame size or format, so we could conceivably just send
676 // the current frame header once and then follow it with an
677 // unbroken stream of data.
678
679 // Make the EGL context current again. This gets unhooked if we're
680 // using "--bugreport" mode.
681 // TODO: figure out if we can eliminate this
682 frameOutput->prepareToCopy();
683
684 while (!gStopRequested) {
685 // Poll for frames, the same way we do for MediaCodec. We do
686 // all of the work on the main thread.
687 //
688 // Ideally we'd sleep indefinitely and wake when the
689 // stop was requested, but this will do for now. (It almost
690 // works because wait() wakes when a signal hits, but we
691 // need to handle the edge cases.)
692 bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
693 err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
694 if (err == ETIMEDOUT) {
695 err = NO_ERROR;
696 } else if (err != NO_ERROR) {
697 ALOGE("Got error %d from copyFrame()", err);
698 break;
699 }
700 }
701 } else {
702 // Main encoder loop.
703 err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
704 mainDpyInfo.orientation);
705 if (err != NO_ERROR) {
706 fprintf(stderr, "Encoder failed (err=%d)\n", err);
707 // fall through to cleanup
708 }
709
710 if (gVerbose) {
711 printf("Stopping encoder and muxer\n");
712 }
713 }
714
715 // Shut everything down, starting with the producer side.
716 encoderInputSurface = NULL;
717 SurfaceComposerClient::destroyDisplay(dpy);
718 if (overlay != NULL) overlay->stop();
719 if (encoder != NULL) encoder->stop();
720 if (muxer != NULL) {
721 // If we don't stop muxer explicitly, i.e. let the destructor run,
722 // it may hang (b/11050628).
723 err = muxer->stop();
724 } else if (rawFp != stdout) {
725 fclose(rawFp);
726 }
727 if (encoder != NULL) encoder->release();
728
729 return err;
730 }
731
732 /*
733 * Sends a broadcast to the media scanner to tell it about the new video.
734 *
735 * This is optional, but nice to have.
736 */
notifyMediaScanner(const char * fileName)737 static status_t notifyMediaScanner(const char* fileName) {
738 // need to do allocations before the fork()
739 String8 fileUrl("file://");
740 fileUrl.append(fileName);
741
742 const char* kCommand = "/system/bin/am";
743 const char* const argv[] = {
744 kCommand,
745 "broadcast",
746 "-a",
747 "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
748 "-d",
749 fileUrl.string(),
750 NULL
751 };
752 if (gVerbose) {
753 printf("Executing:");
754 for (int i = 0; argv[i] != NULL; i++) {
755 printf(" %s", argv[i]);
756 }
757 putchar('\n');
758 }
759
760 pid_t pid = fork();
761 if (pid < 0) {
762 int err = errno;
763 ALOGW("fork() failed: %s", strerror(err));
764 return -err;
765 } else if (pid > 0) {
766 // parent; wait for the child, mostly to make the verbose-mode output
767 // look right, but also to check for and log failures
768 int status;
769 pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
770 if (actualPid != pid) {
771 ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
772 } else if (status != 0) {
773 ALOGW("'am broadcast' exited with status=%d", status);
774 } else {
775 ALOGV("'am broadcast' exited successfully");
776 }
777 } else {
778 if (!gVerbose) {
779 // non-verbose, suppress 'am' output
780 ALOGV("closing stdout/stderr in child");
781 int fd = open("/dev/null", O_WRONLY);
782 if (fd >= 0) {
783 dup2(fd, STDOUT_FILENO);
784 dup2(fd, STDERR_FILENO);
785 close(fd);
786 }
787 }
788 execv(kCommand, const_cast<char* const*>(argv));
789 ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
790 exit(1);
791 }
792 return NO_ERROR;
793 }
794
795 /*
796 * Parses a string of the form "1280x720".
797 *
798 * Returns true on success.
799 */
parseWidthHeight(const char * widthHeight,uint32_t * pWidth,uint32_t * pHeight)800 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
801 uint32_t* pHeight) {
802 long width, height;
803 char* end;
804
805 // Must specify base 10, or "0x0" gets parsed differently.
806 width = strtol(widthHeight, &end, 10);
807 if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
808 // invalid chars in width, or missing 'x', or missing height
809 return false;
810 }
811 height = strtol(end + 1, &end, 10);
812 if (*end != '\0') {
813 // invalid chars in height
814 return false;
815 }
816
817 *pWidth = width;
818 *pHeight = height;
819 return true;
820 }
821
822 /*
823 * Accepts a string with a bare number ("4000000") or with a single-character
824 * unit ("4m").
825 *
826 * Returns an error if parsing fails.
827 */
parseValueWithUnit(const char * str,uint32_t * pValue)828 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
829 long value;
830 char* endptr;
831
832 value = strtol(str, &endptr, 10);
833 if (*endptr == '\0') {
834 // bare number
835 *pValue = value;
836 return NO_ERROR;
837 } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
838 *pValue = value * 1000000; // check for overflow?
839 return NO_ERROR;
840 } else {
841 fprintf(stderr, "Unrecognized value: %s\n", str);
842 return UNKNOWN_ERROR;
843 }
844 }
845
846 /*
847 * Dumps usage on stderr.
848 */
usage()849 static void usage() {
850 fprintf(stderr,
851 "Usage: screenrecord [options] <filename>\n"
852 "\n"
853 "Android screenrecord v%d.%d. Records the device's display to a .mp4 file.\n"
854 "\n"
855 "Options:\n"
856 "--size WIDTHxHEIGHT\n"
857 " Set the video size, e.g. \"1280x720\". Default is the device's main\n"
858 " display resolution (if supported), 1280x720 if not. For best results,\n"
859 " use a size supported by the AVC encoder.\n"
860 "--bit-rate RATE\n"
861 " Set the video bit rate, in bits per second. Value may be specified as\n"
862 " bits or megabits, e.g. '4000000' is equivalent to '4M'. Default %dMbps.\n"
863 "--bugreport\n"
864 " Add additional information, such as a timestamp overlay, that is helpful\n"
865 " in videos captured to illustrate bugs.\n"
866 "--time-limit TIME\n"
867 " Set the maximum recording time, in seconds. Default / maximum is %d.\n"
868 "--verbose\n"
869 " Display interesting information on stdout.\n"
870 "--help\n"
871 " Show this message.\n"
872 "\n"
873 "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
874 "\n",
875 kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
876 );
877 }
878
879 /*
880 * Parses args and kicks things off.
881 */
main(int argc,char * const argv[])882 int main(int argc, char* const argv[]) {
883 static const struct option longOptions[] = {
884 { "help", no_argument, NULL, 'h' },
885 { "verbose", no_argument, NULL, 'v' },
886 { "size", required_argument, NULL, 's' },
887 { "bit-rate", required_argument, NULL, 'b' },
888 { "time-limit", required_argument, NULL, 't' },
889 { "bugreport", no_argument, NULL, 'u' },
890 // "unofficial" options
891 { "show-device-info", no_argument, NULL, 'i' },
892 { "show-frame-time", no_argument, NULL, 'f' },
893 { "rotate", no_argument, NULL, 'r' },
894 { "output-format", required_argument, NULL, 'o' },
895 { NULL, 0, NULL, 0 }
896 };
897
898 while (true) {
899 int optionIndex = 0;
900 int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
901 if (ic == -1) {
902 break;
903 }
904
905 switch (ic) {
906 case 'h':
907 usage();
908 return 0;
909 case 'v':
910 gVerbose = true;
911 break;
912 case 's':
913 if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
914 fprintf(stderr, "Invalid size '%s', must be width x height\n",
915 optarg);
916 return 2;
917 }
918 if (gVideoWidth == 0 || gVideoHeight == 0) {
919 fprintf(stderr,
920 "Invalid size %ux%u, width and height may not be zero\n",
921 gVideoWidth, gVideoHeight);
922 return 2;
923 }
924 gSizeSpecified = true;
925 break;
926 case 'b':
927 if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
928 return 2;
929 }
930 if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
931 fprintf(stderr,
932 "Bit rate %dbps outside acceptable range [%d,%d]\n",
933 gBitRate, kMinBitRate, kMaxBitRate);
934 return 2;
935 }
936 break;
937 case 't':
938 gTimeLimitSec = atoi(optarg);
939 if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
940 fprintf(stderr,
941 "Time limit %ds outside acceptable range [1,%d]\n",
942 gTimeLimitSec, kMaxTimeLimitSec);
943 return 2;
944 }
945 break;
946 case 'u':
947 gWantInfoScreen = true;
948 gWantFrameTime = true;
949 break;
950 case 'i':
951 gWantInfoScreen = true;
952 break;
953 case 'f':
954 gWantFrameTime = true;
955 break;
956 case 'r':
957 // experimental feature
958 gRotate = true;
959 break;
960 case 'o':
961 if (strcmp(optarg, "mp4") == 0) {
962 gOutputFormat = FORMAT_MP4;
963 } else if (strcmp(optarg, "h264") == 0) {
964 gOutputFormat = FORMAT_H264;
965 } else if (strcmp(optarg, "frames") == 0) {
966 gOutputFormat = FORMAT_FRAMES;
967 } else if (strcmp(optarg, "raw-frames") == 0) {
968 gOutputFormat = FORMAT_RAW_FRAMES;
969 } else {
970 fprintf(stderr, "Unknown format '%s'\n", optarg);
971 return 2;
972 }
973 break;
974 default:
975 if (ic != '?') {
976 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
977 }
978 return 2;
979 }
980 }
981
982 if (optind != argc - 1) {
983 fprintf(stderr, "Must specify output file (see --help).\n");
984 return 2;
985 }
986
987 const char* fileName = argv[optind];
988 if (gOutputFormat == FORMAT_MP4) {
989 // MediaMuxer tries to create the file in the constructor, but we don't
990 // learn about the failure until muxer.start(), which returns a generic
991 // error code without logging anything. We attempt to create the file
992 // now for better diagnostics.
993 int fd = open(fileName, O_CREAT | O_RDWR, 0644);
994 if (fd < 0) {
995 fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
996 return 1;
997 }
998 close(fd);
999 }
1000
1001 status_t err = recordScreen(fileName);
1002 if (err == NO_ERROR) {
1003 // Try to notify the media scanner. Not fatal if this fails.
1004 notifyMediaScanner(fileName);
1005 }
1006 ALOGD(err == NO_ERROR ? "success" : "failed");
1007 return (int) err;
1008 }
1009