1 /*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <assert.h>
18 #include <ctype.h>
19 #include <fcntl.h>
20 #include <inttypes.h>
21 #include <getopt.h>
22 #include <signal.h>
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 #include <sys/stat.h>
27 #include <sys/types.h>
28 #include <sys/wait.h>
29
30 #include <termios.h>
31 #include <unistd.h>
32
33 #define LOG_TAG "ScreenRecord"
34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
35 //#define LOG_NDEBUG 0
36 #include <utils/Log.h>
37
38 #include <binder/IPCThreadState.h>
39 #include <utils/Errors.h>
40 #include <utils/SystemClock.h>
41 #include <utils/Timers.h>
42 #include <utils/Trace.h>
43
44 #include <gui/ISurfaceComposer.h>
45 #include <gui/Surface.h>
46 #include <gui/SurfaceComposerClient.h>
47 #include <gui/ISurfaceComposer.h>
48 #include <media/MediaCodecBuffer.h>
49 #include <media/NdkMediaCodec.h>
50 #include <media/NdkMediaFormatPriv.h>
51 #include <media/NdkMediaMuxer.h>
52 #include <media/openmax/OMX_IVCommon.h>
53 #include <media/stagefright/MediaCodec.h>
54 #include <media/stagefright/MediaCodecConstants.h>
55 #include <media/stagefright/MediaErrors.h>
56 #include <media/stagefright/PersistentSurface.h>
57 #include <media/stagefright/foundation/ABuffer.h>
58 #include <media/stagefright/foundation/AMessage.h>
59 #include <mediadrm/ICrypto.h>
60 #include <ui/DisplayMode.h>
61 #include <ui/DisplayState.h>
62
63 #include "screenrecord.h"
64 #include "Overlay.h"
65 #include "FrameOutput.h"
66
67 using android::ABuffer;
68 using android::ALooper;
69 using android::AMessage;
70 using android::AString;
71 using android::ui::DisplayMode;
72 using android::FrameOutput;
73 using android::IBinder;
74 using android::IGraphicBufferProducer;
75 using android::ISurfaceComposer;
76 using android::MediaCodec;
77 using android::MediaCodecBuffer;
78 using android::Overlay;
79 using android::PersistentSurface;
80 using android::PhysicalDisplayId;
81 using android::ProcessState;
82 using android::Rect;
83 using android::String8;
84 using android::SurfaceComposerClient;
85 using android::Vector;
86 using android::sp;
87 using android::status_t;
88
89 using android::INVALID_OPERATION;
90 using android::NAME_NOT_FOUND;
91 using android::NO_ERROR;
92 using android::UNKNOWN_ERROR;
93
94 namespace ui = android::ui;
95
96 static const uint32_t kMinBitRate = 100000; // 0.1Mbps
97 static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
98 static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
99 static const uint32_t kFallbackWidth = 1280; // 720p
100 static const uint32_t kFallbackHeight = 720;
101 static const char* kMimeTypeAvc = "video/avc";
102 static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
103 static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
104
105 // Command-line parameters.
106 static bool gVerbose = false; // chatty on stdout
107 static bool gRotate = false; // rotate 90 degrees
108 static bool gMonotonicTime = false; // use system monotonic time for timestamps
109 static bool gPersistentSurface = false; // use persistent surface
110 static enum {
111 FORMAT_MP4, FORMAT_H264, FORMAT_WEBM, FORMAT_3GPP, FORMAT_FRAMES, FORMAT_RAW_FRAMES
112 } gOutputFormat = FORMAT_MP4; // data format for output
113 static AString gCodecName = ""; // codec name override
114 static bool gSizeSpecified = false; // was size explicitly requested?
115 static bool gWantInfoScreen = false; // do we want initial info screen?
116 static bool gWantFrameTime = false; // do we want times on each frame?
117 static uint32_t gVideoWidth = 0; // default width+height
118 static uint32_t gVideoHeight = 0;
119 static uint32_t gBitRate = 20000000; // 20Mbps
120 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
121 static uint32_t gBframes = 0;
122 static PhysicalDisplayId gPhysicalDisplayId;
123 // Set by signal handler to stop recording.
124 static volatile bool gStopRequested = false;
125
126 // Previous signal handler state, restored after first hit.
127 static struct sigaction gOrigSigactionINT;
128 static struct sigaction gOrigSigactionHUP;
129
130
131 /*
132 * Catch keyboard interrupt signals. On receipt, the "stop requested"
133 * flag is raised, and the original handler is restored (so that, if
134 * we get stuck finishing, a second Ctrl-C will kill the process).
135 */
signalCatcher(int signum)136 static void signalCatcher(int signum)
137 {
138 gStopRequested = true;
139 switch (signum) {
140 case SIGINT:
141 case SIGHUP:
142 sigaction(SIGINT, &gOrigSigactionINT, NULL);
143 sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
144 break;
145 default:
146 abort();
147 break;
148 }
149 }
150
151 /*
152 * Configures signal handlers. The previous handlers are saved.
153 *
154 * If the command is run from an interactive adb shell, we get SIGINT
155 * when Ctrl-C is hit. If we're run from the host, the local adb process
156 * gets the signal, and we get a SIGHUP when the terminal disconnects.
157 */
configureSignals()158 static status_t configureSignals() {
159 struct sigaction act;
160 memset(&act, 0, sizeof(act));
161 act.sa_handler = signalCatcher;
162 if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
163 status_t err = -errno;
164 fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
165 strerror(errno));
166 return err;
167 }
168 if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
169 status_t err = -errno;
170 fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
171 strerror(errno));
172 return err;
173 }
174 signal(SIGPIPE, SIG_IGN);
175 return NO_ERROR;
176 }
177
178 /*
179 * Configures and starts the MediaCodec encoder. Obtains an input surface
180 * from the codec.
181 */
prepareEncoder(float displayFps,sp<MediaCodec> * pCodec,sp<IGraphicBufferProducer> * pBufferProducer)182 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
183 sp<IGraphicBufferProducer>* pBufferProducer) {
184 status_t err;
185
186 if (gVerbose) {
187 printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
188 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
189 fflush(stdout);
190 }
191
192 sp<AMessage> format = new AMessage;
193 format->setInt32(KEY_WIDTH, gVideoWidth);
194 format->setInt32(KEY_HEIGHT, gVideoHeight);
195 format->setString(KEY_MIME, kMimeTypeAvc);
196 format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
197 format->setInt32(KEY_BIT_RATE, gBitRate);
198 format->setFloat(KEY_FRAME_RATE, displayFps);
199 format->setInt32(KEY_I_FRAME_INTERVAL, 10);
200 format->setInt32(KEY_MAX_B_FRAMES, gBframes);
201 if (gBframes > 0) {
202 format->setInt32(KEY_PROFILE, AVCProfileMain);
203 format->setInt32(KEY_LEVEL, AVCLevel41);
204 }
205
206 sp<android::ALooper> looper = new android::ALooper;
207 looper->setName("screenrecord_looper");
208 looper->start();
209 ALOGV("Creating codec");
210 sp<MediaCodec> codec;
211 if (gCodecName.empty()) {
212 codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
213 if (codec == NULL) {
214 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
215 kMimeTypeAvc);
216 return UNKNOWN_ERROR;
217 }
218 } else {
219 codec = MediaCodec::CreateByComponentName(looper, gCodecName);
220 if (codec == NULL) {
221 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
222 gCodecName.c_str());
223 return UNKNOWN_ERROR;
224 }
225 }
226
227 err = codec->configure(format, NULL, NULL,
228 MediaCodec::CONFIGURE_FLAG_ENCODE);
229 if (err != NO_ERROR) {
230 fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
231 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
232 codec->release();
233 return err;
234 }
235
236 ALOGV("Creating encoder input surface");
237 sp<IGraphicBufferProducer> bufferProducer;
238 if (gPersistentSurface) {
239 sp<PersistentSurface> surface = MediaCodec::CreatePersistentInputSurface();
240 bufferProducer = surface->getBufferProducer();
241 err = codec->setInputSurface(surface);
242 } else {
243 err = codec->createInputSurface(&bufferProducer);
244 }
245 if (err != NO_ERROR) {
246 fprintf(stderr,
247 "ERROR: unable to %s encoder input surface (err=%d)\n",
248 gPersistentSurface ? "set" : "create",
249 err);
250 codec->release();
251 return err;
252 }
253
254 ALOGV("Starting codec");
255 err = codec->start();
256 if (err != NO_ERROR) {
257 fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
258 codec->release();
259 return err;
260 }
261
262 ALOGV("Codec prepared");
263 *pCodec = codec;
264 *pBufferProducer = bufferProducer;
265 return 0;
266 }
267
268 /*
269 * Sets the display projection, based on the display dimensions, video size,
270 * and device orientation.
271 */
setDisplayProjection(SurfaceComposerClient::Transaction & t,const sp<IBinder> & dpy,const ui::DisplayState & displayState)272 static status_t setDisplayProjection(
273 SurfaceComposerClient::Transaction& t,
274 const sp<IBinder>& dpy,
275 const ui::DisplayState& displayState) {
276 // Set the region of the layer stack we're interested in, which in our case is "all of it".
277 Rect layerStackRect(displayState.layerStackSpaceRect);
278
279 // We need to preserve the aspect ratio of the display.
280 float displayAspect = layerStackRect.getHeight() / static_cast<float>(layerStackRect.getWidth());
281
282
283 // Set the way we map the output onto the display surface (which will
284 // be e.g. 1280x720 for a 720p video). The rect is interpreted
285 // post-rotation, so if the display is rotated 90 degrees we need to
286 // "pre-rotate" it by flipping width/height, so that the orientation
287 // adjustment changes it back.
288 //
289 // We might want to encode a portrait display as landscape to use more
290 // of the screen real estate. (If players respect a 90-degree rotation
291 // hint, we can essentially get a 720x1280 video instead of 1280x720.)
292 // In that case, we swap the configured video width/height and then
293 // supply a rotation value to the display projection.
294 uint32_t videoWidth, videoHeight;
295 uint32_t outWidth, outHeight;
296 if (!gRotate) {
297 videoWidth = gVideoWidth;
298 videoHeight = gVideoHeight;
299 } else {
300 videoWidth = gVideoHeight;
301 videoHeight = gVideoWidth;
302 }
303 if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
304 // limited by narrow width; reduce height
305 outWidth = videoWidth;
306 outHeight = (uint32_t)(videoWidth * displayAspect);
307 } else {
308 // limited by short height; restrict width
309 outHeight = videoHeight;
310 outWidth = (uint32_t)(videoHeight / displayAspect);
311 }
312 uint32_t offX, offY;
313 offX = (videoWidth - outWidth) / 2;
314 offY = (videoHeight - outHeight) / 2;
315 Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
316
317 if (gVerbose) {
318 if (gRotate) {
319 printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
320 outHeight, outWidth, offY, offX);
321 fflush(stdout);
322 } else {
323 printf("Content area is %ux%u at offset x=%d y=%d\n",
324 outWidth, outHeight, offX, offY);
325 fflush(stdout);
326 }
327 }
328
329 t.setDisplayProjection(dpy,
330 gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
331 layerStackRect, displayRect);
332 return NO_ERROR;
333 }
334
335 /*
336 * Configures the virtual display. When this completes, virtual display
337 * frames will start arriving from the buffer producer.
338 */
prepareVirtualDisplay(const ui::DisplayState & displayState,const sp<IGraphicBufferProducer> & bufferProducer,sp<IBinder> * pDisplayHandle)339 static status_t prepareVirtualDisplay(
340 const ui::DisplayState& displayState,
341 const sp<IGraphicBufferProducer>& bufferProducer,
342 sp<IBinder>* pDisplayHandle) {
343 sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
344 String8("ScreenRecorder"), false /*secure*/);
345 SurfaceComposerClient::Transaction t;
346 t.setDisplaySurface(dpy, bufferProducer);
347 setDisplayProjection(t, dpy, displayState);
348 t.setDisplayLayerStack(dpy, displayState.layerStack);
349 t.apply();
350
351 *pDisplayHandle = dpy;
352
353 return NO_ERROR;
354 }
355
356 /*
357 * Writes an unsigned integer byte-by-byte in little endian order regardless
358 * of the platform endianness.
359 */
360 template <typename UINT>
writeValueLE(UINT value,uint8_t * buffer)361 static void writeValueLE(UINT value, uint8_t* buffer) {
362 for (int i = 0; i < sizeof(UINT); ++i) {
363 buffer[i] = static_cast<uint8_t>(value);
364 value >>= 8;
365 }
366 }
367
368 /*
369 * Saves frames presentation time relative to the elapsed realtime clock in microseconds
370 * preceded by a Winscope magic string and frame count to a metadata track.
371 * This metadata is used by the Winscope tool to sync video with SurfaceFlinger
372 * and WindowManager traces.
373 *
374 * The metadata is written as a binary array as follows:
375 * - winscope magic string (kWinscopeMagicString constant), without trailing null char,
376 * - the number of recorded frames (as little endian uint32),
377 * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
378 * (as little endian uint64).
379 */
writeWinscopeMetadata(const Vector<int64_t> & timestamps,const ssize_t metaTrackIdx,AMediaMuxer * muxer)380 static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
381 const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
382 ALOGV("Writing metadata");
383 int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
384 - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
385 sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
386 + sizeof(uint32_t) + strlen(kWinscopeMagicString));
387 uint8_t* pos = buffer->data();
388 strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
389 pos += strlen(kWinscopeMagicString);
390 writeValueLE<uint32_t>(timestamps.size(), pos);
391 pos += sizeof(uint32_t);
392 for (size_t idx = 0; idx < timestamps.size(); ++idx) {
393 writeValueLE<uint64_t>(static_cast<uint64_t>(timestamps[idx]
394 + systemTimeToElapsedTimeOffsetMicros), pos);
395 pos += sizeof(uint64_t);
396 }
397 AMediaCodecBufferInfo bufferInfo = {
398 0,
399 static_cast<int32_t>(buffer->size()),
400 timestamps[0],
401 0
402 };
403 return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
404 }
405
406 /*
407 * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
408 * input frames are coming from the virtual display as fast as SurfaceFlinger
409 * wants to send them.
410 *
411 * Exactly one of muxer or rawFp must be non-null.
412 *
413 * The muxer must *not* have been started before calling.
414 */
runEncoder(const sp<MediaCodec> & encoder,AMediaMuxer * muxer,FILE * rawFp,const sp<IBinder> & display,const sp<IBinder> & virtualDpy,ui::Rotation orientation)415 static status_t runEncoder(const sp<MediaCodec>& encoder,
416 AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
417 const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
418 static int kTimeout = 250000; // be responsive on signal
419 status_t err;
420 ssize_t trackIdx = -1;
421 ssize_t metaTrackIdx = -1;
422 uint32_t debugNumFrames = 0;
423 int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
424 int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
425 Vector<int64_t> timestamps;
426 bool firstFrame = true;
427
428 assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
429
430 Vector<sp<MediaCodecBuffer> > buffers;
431 err = encoder->getOutputBuffers(&buffers);
432 if (err != NO_ERROR) {
433 fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
434 return err;
435 }
436
437 // Run until we're signaled.
438 while (!gStopRequested) {
439 size_t bufIndex, offset, size;
440 int64_t ptsUsec;
441 uint32_t flags;
442
443 if (firstFrame) {
444 ATRACE_NAME("first_frame");
445 firstFrame = false;
446 }
447
448 if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
449 if (gVerbose) {
450 printf("Time limit reached\n");
451 fflush(stdout);
452 }
453 break;
454 }
455
456 ALOGV("Calling dequeueOutputBuffer");
457 err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
458 &flags, kTimeout);
459 ALOGV("dequeueOutputBuffer returned %d", err);
460 switch (err) {
461 case NO_ERROR:
462 // got a buffer
463 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
464 ALOGV("Got codec config buffer (%zu bytes)", size);
465 if (muxer != NULL) {
466 // ignore this -- we passed the CSD into MediaMuxer when
467 // we got the format change notification
468 size = 0;
469 }
470 }
471 if (size != 0) {
472 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
473 bufIndex, size, ptsUsec);
474
475 { // scope
476 ATRACE_NAME("orientation");
477 // Check orientation, update if it has changed.
478 //
479 // Polling for changes is inefficient and wrong, but the
480 // useful stuff is hard to get at without a Dalvik VM.
481 ui::DisplayState displayState;
482 err = SurfaceComposerClient::getDisplayState(display, &displayState);
483 if (err != NO_ERROR) {
484 ALOGW("getDisplayState() failed: %d", err);
485 } else if (orientation != displayState.orientation) {
486 ALOGD("orientation changed, now %s", toCString(displayState.orientation));
487 SurfaceComposerClient::Transaction t;
488 setDisplayProjection(t, virtualDpy, displayState);
489 t.apply();
490 orientation = displayState.orientation;
491 }
492 }
493
494 // If the virtual display isn't providing us with timestamps,
495 // use the current time. This isn't great -- we could get
496 // decoded data in clusters -- but we're not expecting
497 // to hit this anyway.
498 if (ptsUsec == 0) {
499 ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
500 }
501
502 if (muxer == NULL) {
503 fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
504 // Flush the data immediately in case we're streaming.
505 // We don't want to do this if all we've written is
506 // the SPS/PPS data because mplayer gets confused.
507 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
508 fflush(rawFp);
509 }
510 } else {
511 // The MediaMuxer docs are unclear, but it appears that we
512 // need to pass either the full set of BufferInfo flags, or
513 // (flags & BUFFER_FLAG_SYNCFRAME).
514 //
515 // If this blocks for too long we could drop frames. We may
516 // want to queue these up and do them on a different thread.
517 ATRACE_NAME("write sample");
518 assert(trackIdx != -1);
519 // TODO
520 sp<ABuffer> buffer = new ABuffer(
521 buffers[bufIndex]->data(), buffers[bufIndex]->size());
522 AMediaCodecBufferInfo bufferInfo = {
523 0,
524 static_cast<int32_t>(buffer->size()),
525 ptsUsec,
526 flags
527 };
528 err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
529 if (err != NO_ERROR) {
530 fprintf(stderr,
531 "Failed writing data to muxer (err=%d)\n", err);
532 return err;
533 }
534 if (gOutputFormat == FORMAT_MP4) {
535 timestamps.add(ptsUsec);
536 }
537 }
538 debugNumFrames++;
539 }
540 err = encoder->releaseOutputBuffer(bufIndex);
541 if (err != NO_ERROR) {
542 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
543 err);
544 return err;
545 }
546 if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
547 // Not expecting EOS from SurfaceFlinger. Go with it.
548 ALOGI("Received end-of-stream");
549 gStopRequested = true;
550 }
551 break;
552 case -EAGAIN: // INFO_TRY_AGAIN_LATER
553 ALOGV("Got -EAGAIN, looping");
554 break;
555 case android::INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
556 {
557 // Format includes CSD, which we must provide to muxer.
558 ALOGV("Encoder format changed");
559 sp<AMessage> newFormat;
560 encoder->getOutputFormat(&newFormat);
561 // TODO remove when MediaCodec has been replaced with AMediaCodec
562 AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
563 if (muxer != NULL) {
564 trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
565 if (gOutputFormat == FORMAT_MP4) {
566 AMediaFormat *metaFormat = AMediaFormat_new();
567 AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
568 metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
569 AMediaFormat_delete(metaFormat);
570 }
571 ALOGV("Starting muxer");
572 err = AMediaMuxer_start(muxer);
573 if (err != NO_ERROR) {
574 fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
575 return err;
576 }
577 }
578 }
579 break;
580 case android::INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
581 // Not expected for an encoder; handle it anyway.
582 ALOGV("Encoder buffers changed");
583 err = encoder->getOutputBuffers(&buffers);
584 if (err != NO_ERROR) {
585 fprintf(stderr,
586 "Unable to get new output buffers (err=%d)\n", err);
587 return err;
588 }
589 break;
590 case INVALID_OPERATION:
591 ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
592 return err;
593 default:
594 fprintf(stderr,
595 "Got weird result %d from dequeueOutputBuffer\n", err);
596 return err;
597 }
598 }
599
600 ALOGV("Encoder stopping (req=%d)", gStopRequested);
601 if (gVerbose) {
602 printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
603 debugNumFrames, nanoseconds_to_seconds(
604 systemTime(CLOCK_MONOTONIC) - startWhenNsec));
605 fflush(stdout);
606 }
607 if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
608 err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
609 if (err != NO_ERROR) {
610 fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
611 return err;
612 }
613 }
614 return NO_ERROR;
615 }
616
617 /*
618 * Raw H.264 byte stream output requested. Send the output to stdout
619 * if desired. If the output is a tty, reconfigure it to avoid the
620 * CRLF line termination that we see with "adb shell" commands.
621 */
prepareRawOutput(const char * fileName)622 static FILE* prepareRawOutput(const char* fileName) {
623 FILE* rawFp = NULL;
624
625 if (strcmp(fileName, "-") == 0) {
626 if (gVerbose) {
627 fprintf(stderr, "ERROR: verbose output and '-' not compatible");
628 return NULL;
629 }
630 rawFp = stdout;
631 } else {
632 rawFp = fopen(fileName, "w");
633 if (rawFp == NULL) {
634 fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
635 return NULL;
636 }
637 }
638
639 int fd = fileno(rawFp);
640 if (isatty(fd)) {
641 // best effort -- reconfigure tty for "raw"
642 ALOGD("raw video output to tty (fd=%d)", fd);
643 struct termios term;
644 if (tcgetattr(fd, &term) == 0) {
645 cfmakeraw(&term);
646 if (tcsetattr(fd, TCSANOW, &term) == 0) {
647 ALOGD("tty successfully configured for raw");
648 }
649 }
650 }
651
652 return rawFp;
653 }
654
floorToEven(uint32_t num)655 static inline uint32_t floorToEven(uint32_t num) {
656 return num & ~1;
657 }
658
659 /*
660 * Main "do work" start point.
661 *
662 * Configures codec, muxer, and virtual display, then starts moving bits
663 * around.
664 */
recordScreen(const char * fileName)665 static status_t recordScreen(const char* fileName) {
666 status_t err;
667
668 // Configure signal handler.
669 err = configureSignals();
670 if (err != NO_ERROR) return err;
671
672 // Start Binder thread pool. MediaCodec needs to be able to receive
673 // messages from mediaserver.
674 sp<ProcessState> self = ProcessState::self();
675 self->startThreadPool();
676
677 // Get main display parameters.
678 sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(
679 gPhysicalDisplayId);
680 if (display == nullptr) {
681 fprintf(stderr, "ERROR: no display\n");
682 return NAME_NOT_FOUND;
683 }
684
685 ui::DisplayState displayState;
686 err = SurfaceComposerClient::getDisplayState(display, &displayState);
687 if (err != NO_ERROR) {
688 fprintf(stderr, "ERROR: unable to get display state\n");
689 return err;
690 }
691
692 DisplayMode displayMode;
693 err = SurfaceComposerClient::getActiveDisplayMode(display, &displayMode);
694 if (err != NO_ERROR) {
695 fprintf(stderr, "ERROR: unable to get display config\n");
696 return err;
697 }
698
699 const ui::Size& layerStackSpaceRect = displayState.layerStackSpaceRect;
700 if (gVerbose) {
701 printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
702 layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
703 displayMode.refreshRate, toCString(displayState.orientation),
704 displayState.layerStack);
705 fflush(stdout);
706 }
707
708 // Encoder can't take odd number as config
709 if (gVideoWidth == 0) {
710 gVideoWidth = floorToEven(layerStackSpaceRect.getWidth());
711 }
712 if (gVideoHeight == 0) {
713 gVideoHeight = floorToEven(layerStackSpaceRect.getHeight());
714 }
715
716 // Configure and start the encoder.
717 sp<MediaCodec> encoder;
718 sp<FrameOutput> frameOutput;
719 sp<IGraphicBufferProducer> encoderInputSurface;
720 if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
721 err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
722
723 if (err != NO_ERROR && !gSizeSpecified) {
724 // fallback is defined for landscape; swap if we're in portrait
725 bool needSwap = gVideoWidth < gVideoHeight;
726 uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
727 uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
728 if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
729 ALOGV("Retrying with 720p");
730 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
731 gVideoWidth, gVideoHeight, newWidth, newHeight);
732 gVideoWidth = newWidth;
733 gVideoHeight = newHeight;
734 err = prepareEncoder(displayMode.refreshRate, &encoder, &encoderInputSurface);
735 }
736 }
737 if (err != NO_ERROR) return err;
738
739 // From here on, we must explicitly release() the encoder before it goes
740 // out of scope, or we will get an assertion failure from stagefright
741 // later on in a different thread.
742 } else {
743 // We're not using an encoder at all. The "encoder input surface" we hand to
744 // SurfaceFlinger will just feed directly to us.
745 frameOutput = new FrameOutput();
746 err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
747 if (err != NO_ERROR) {
748 return err;
749 }
750 }
751
752 // Draw the "info" page by rendering a frame with GLES and sending
753 // it directly to the encoder.
754 // TODO: consider displaying this as a regular layer to avoid b/11697754
755 if (gWantInfoScreen) {
756 Overlay::drawInfoPage(encoderInputSurface);
757 }
758
759 // Configure optional overlay.
760 sp<IGraphicBufferProducer> bufferProducer;
761 sp<Overlay> overlay;
762 if (gWantFrameTime) {
763 // Send virtual display frames to an external texture.
764 overlay = new Overlay(gMonotonicTime);
765 err = overlay->start(encoderInputSurface, &bufferProducer);
766 if (err != NO_ERROR) {
767 if (encoder != NULL) encoder->release();
768 return err;
769 }
770 if (gVerbose) {
771 printf("Bugreport overlay created\n");
772 fflush(stdout);
773 }
774 } else {
775 // Use the encoder's input surface as the virtual display surface.
776 bufferProducer = encoderInputSurface;
777 }
778
779 // Configure virtual display.
780 sp<IBinder> dpy;
781 err = prepareVirtualDisplay(displayState, bufferProducer, &dpy);
782 if (err != NO_ERROR) {
783 if (encoder != NULL) encoder->release();
784 return err;
785 }
786
787 AMediaMuxer *muxer = nullptr;
788 FILE* rawFp = NULL;
789 switch (gOutputFormat) {
790 case FORMAT_MP4:
791 case FORMAT_WEBM:
792 case FORMAT_3GPP: {
793 // Configure muxer. We have to wait for the CSD blob from the encoder
794 // before we can start it.
795 err = unlink(fileName);
796 if (err != 0 && errno != ENOENT) {
797 fprintf(stderr, "ERROR: couldn't remove existing file\n");
798 abort();
799 }
800 int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
801 if (fd < 0) {
802 fprintf(stderr, "ERROR: couldn't open file\n");
803 abort();
804 }
805 if (gOutputFormat == FORMAT_MP4) {
806 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
807 } else if (gOutputFormat == FORMAT_WEBM) {
808 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_WEBM);
809 } else {
810 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP);
811 }
812 close(fd);
813 if (gRotate) {
814 AMediaMuxer_setOrientationHint(muxer, 90); // TODO: does this do anything?
815 }
816 break;
817 }
818 case FORMAT_H264:
819 case FORMAT_FRAMES:
820 case FORMAT_RAW_FRAMES: {
821 rawFp = prepareRawOutput(fileName);
822 if (rawFp == NULL) {
823 if (encoder != NULL) encoder->release();
824 return -1;
825 }
826 break;
827 }
828 default:
829 fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
830 abort();
831 }
832
833 if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
834 // TODO: if we want to make this a proper feature, we should output
835 // an outer header with version info. Right now we never change
836 // the frame size or format, so we could conceivably just send
837 // the current frame header once and then follow it with an
838 // unbroken stream of data.
839
840 // Make the EGL context current again. This gets unhooked if we're
841 // using "--bugreport" mode.
842 // TODO: figure out if we can eliminate this
843 frameOutput->prepareToCopy();
844
845 while (!gStopRequested) {
846 // Poll for frames, the same way we do for MediaCodec. We do
847 // all of the work on the main thread.
848 //
849 // Ideally we'd sleep indefinitely and wake when the
850 // stop was requested, but this will do for now. (It almost
851 // works because wait() wakes when a signal hits, but we
852 // need to handle the edge cases.)
853 bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
854 err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
855 if (err == ETIMEDOUT) {
856 err = NO_ERROR;
857 } else if (err != NO_ERROR) {
858 ALOGE("Got error %d from copyFrame()", err);
859 break;
860 }
861 }
862 } else {
863 // Main encoder loop.
864 err = runEncoder(encoder, muxer, rawFp, display, dpy, displayState.orientation);
865 if (err != NO_ERROR) {
866 fprintf(stderr, "Encoder failed (err=%d)\n", err);
867 // fall through to cleanup
868 }
869
870 if (gVerbose) {
871 printf("Stopping encoder and muxer\n");
872 fflush(stdout);
873 }
874 }
875
876 // Shut everything down, starting with the producer side.
877 encoderInputSurface = NULL;
878 SurfaceComposerClient::destroyDisplay(dpy);
879 if (overlay != NULL) overlay->stop();
880 if (encoder != NULL) encoder->stop();
881 if (muxer != NULL) {
882 // If we don't stop muxer explicitly, i.e. let the destructor run,
883 // it may hang (b/11050628).
884 err = AMediaMuxer_stop(muxer);
885 } else if (rawFp != stdout) {
886 fclose(rawFp);
887 }
888 if (encoder != NULL) encoder->release();
889
890 return err;
891 }
892
893 /*
894 * Sends a broadcast to the media scanner to tell it about the new video.
895 *
896 * This is optional, but nice to have.
897 */
notifyMediaScanner(const char * fileName)898 static status_t notifyMediaScanner(const char* fileName) {
899 // need to do allocations before the fork()
900 String8 fileUrl("file://");
901 fileUrl.append(fileName);
902
903 const char* kCommand = "/system/bin/am";
904 const char* const argv[] = {
905 kCommand,
906 "broadcast",
907 "-a",
908 "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
909 "-d",
910 fileUrl.string(),
911 NULL
912 };
913 if (gVerbose) {
914 printf("Executing:");
915 for (int i = 0; argv[i] != NULL; i++) {
916 printf(" %s", argv[i]);
917 }
918 putchar('\n');
919 fflush(stdout);
920 }
921
922 pid_t pid = fork();
923 if (pid < 0) {
924 int err = errno;
925 ALOGW("fork() failed: %s", strerror(err));
926 return -err;
927 } else if (pid > 0) {
928 // parent; wait for the child, mostly to make the verbose-mode output
929 // look right, but also to check for and log failures
930 int status;
931 pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
932 if (actualPid != pid) {
933 ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
934 } else if (status != 0) {
935 ALOGW("'am broadcast' exited with status=%d", status);
936 } else {
937 ALOGV("'am broadcast' exited successfully");
938 }
939 } else {
940 if (!gVerbose) {
941 // non-verbose, suppress 'am' output
942 ALOGV("closing stdout/stderr in child");
943 int fd = open("/dev/null", O_WRONLY);
944 if (fd >= 0) {
945 dup2(fd, STDOUT_FILENO);
946 dup2(fd, STDERR_FILENO);
947 close(fd);
948 }
949 }
950 execv(kCommand, const_cast<char* const*>(argv));
951 ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
952 exit(1);
953 }
954 return NO_ERROR;
955 }
956
957 /*
958 * Parses a string of the form "1280x720".
959 *
960 * Returns true on success.
961 */
parseWidthHeight(const char * widthHeight,uint32_t * pWidth,uint32_t * pHeight)962 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
963 uint32_t* pHeight) {
964 long width, height;
965 char* end;
966
967 // Must specify base 10, or "0x0" gets parsed differently.
968 width = strtol(widthHeight, &end, 10);
969 if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
970 // invalid chars in width, or missing 'x', or missing height
971 return false;
972 }
973 height = strtol(end + 1, &end, 10);
974 if (*end != '\0') {
975 // invalid chars in height
976 return false;
977 }
978
979 *pWidth = width;
980 *pHeight = height;
981 return true;
982 }
983
984 /*
985 * Accepts a string with a bare number ("4000000") or with a single-character
986 * unit ("4m").
987 *
988 * Returns an error if parsing fails.
989 */
parseValueWithUnit(const char * str,uint32_t * pValue)990 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
991 long value;
992 char* endptr;
993
994 value = strtol(str, &endptr, 10);
995 if (*endptr == '\0') {
996 // bare number
997 *pValue = value;
998 return NO_ERROR;
999 } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
1000 *pValue = value * 1000000; // check for overflow?
1001 return NO_ERROR;
1002 } else {
1003 fprintf(stderr, "Unrecognized value: %s\n", str);
1004 return UNKNOWN_ERROR;
1005 }
1006 }
1007
1008 /*
1009 * Dumps usage on stderr.
1010 */
usage()1011 static void usage() {
1012 fprintf(stderr,
1013 "Usage: screenrecord [options] <filename>\n"
1014 "\n"
1015 "Android screenrecord v%d.%d. Records the device's display to a .mp4 file.\n"
1016 "\n"
1017 "Options:\n"
1018 "--size WIDTHxHEIGHT\n"
1019 " Set the video size, e.g. \"1280x720\". Default is the device's main\n"
1020 " display resolution (if supported), 1280x720 if not. For best results,\n"
1021 " use a size supported by the AVC encoder.\n"
1022 "--bit-rate RATE\n"
1023 " Set the video bit rate, in bits per second. Value may be specified as\n"
1024 " bits or megabits, e.g. '4000000' is equivalent to '4M'. Default %dMbps.\n"
1025 "--bugreport\n"
1026 " Add additional information, such as a timestamp overlay, that is helpful\n"
1027 " in videos captured to illustrate bugs.\n"
1028 "--time-limit TIME\n"
1029 " Set the maximum recording time, in seconds. Default / maximum is %d.\n"
1030 "--display-id ID\n"
1031 " specify the physical display ID to record. Default is the primary display.\n"
1032 " see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
1033 "--verbose\n"
1034 " Display interesting information on stdout.\n"
1035 "--help\n"
1036 " Show this message.\n"
1037 "\n"
1038 "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
1039 "\n",
1040 kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
1041 );
1042 }
1043
1044 /*
1045 * Parses args and kicks things off.
1046 */
main(int argc,char * const argv[])1047 int main(int argc, char* const argv[]) {
1048 static const struct option longOptions[] = {
1049 { "help", no_argument, NULL, 'h' },
1050 { "verbose", no_argument, NULL, 'v' },
1051 { "size", required_argument, NULL, 's' },
1052 { "bit-rate", required_argument, NULL, 'b' },
1053 { "time-limit", required_argument, NULL, 't' },
1054 { "bugreport", no_argument, NULL, 'u' },
1055 // "unofficial" options
1056 { "show-device-info", no_argument, NULL, 'i' },
1057 { "show-frame-time", no_argument, NULL, 'f' },
1058 { "rotate", no_argument, NULL, 'r' },
1059 { "output-format", required_argument, NULL, 'o' },
1060 { "codec-name", required_argument, NULL, 'N' },
1061 { "monotonic-time", no_argument, NULL, 'm' },
1062 { "persistent-surface", no_argument, NULL, 'p' },
1063 { "bframes", required_argument, NULL, 'B' },
1064 { "display-id", required_argument, NULL, 'd' },
1065 { NULL, 0, NULL, 0 }
1066 };
1067
1068 std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
1069 if (!displayId) {
1070 fprintf(stderr, "Failed to get token for internal display\n");
1071 return 1;
1072 }
1073
1074 gPhysicalDisplayId = *displayId;
1075
1076 while (true) {
1077 int optionIndex = 0;
1078 int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
1079 if (ic == -1) {
1080 break;
1081 }
1082
1083 switch (ic) {
1084 case 'h':
1085 usage();
1086 return 0;
1087 case 'v':
1088 gVerbose = true;
1089 break;
1090 case 's':
1091 if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
1092 fprintf(stderr, "Invalid size '%s', must be width x height\n",
1093 optarg);
1094 return 2;
1095 }
1096 if (gVideoWidth == 0 || gVideoHeight == 0) {
1097 fprintf(stderr,
1098 "Invalid size %ux%u, width and height may not be zero\n",
1099 gVideoWidth, gVideoHeight);
1100 return 2;
1101 }
1102 gSizeSpecified = true;
1103 break;
1104 case 'b':
1105 if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
1106 return 2;
1107 }
1108 if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
1109 fprintf(stderr,
1110 "Bit rate %dbps outside acceptable range [%d,%d]\n",
1111 gBitRate, kMinBitRate, kMaxBitRate);
1112 return 2;
1113 }
1114 break;
1115 case 't':
1116 gTimeLimitSec = atoi(optarg);
1117 if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
1118 fprintf(stderr,
1119 "Time limit %ds outside acceptable range [1,%d]\n",
1120 gTimeLimitSec, kMaxTimeLimitSec);
1121 return 2;
1122 }
1123 break;
1124 case 'u':
1125 gWantInfoScreen = true;
1126 gWantFrameTime = true;
1127 break;
1128 case 'i':
1129 gWantInfoScreen = true;
1130 break;
1131 case 'f':
1132 gWantFrameTime = true;
1133 break;
1134 case 'r':
1135 // experimental feature
1136 gRotate = true;
1137 break;
1138 case 'o':
1139 if (strcmp(optarg, "mp4") == 0) {
1140 gOutputFormat = FORMAT_MP4;
1141 } else if (strcmp(optarg, "h264") == 0) {
1142 gOutputFormat = FORMAT_H264;
1143 } else if (strcmp(optarg, "webm") == 0) {
1144 gOutputFormat = FORMAT_WEBM;
1145 } else if (strcmp(optarg, "3gpp") == 0) {
1146 gOutputFormat = FORMAT_3GPP;
1147 } else if (strcmp(optarg, "frames") == 0) {
1148 gOutputFormat = FORMAT_FRAMES;
1149 } else if (strcmp(optarg, "raw-frames") == 0) {
1150 gOutputFormat = FORMAT_RAW_FRAMES;
1151 } else {
1152 fprintf(stderr, "Unknown format '%s'\n", optarg);
1153 return 2;
1154 }
1155 break;
1156 case 'N':
1157 gCodecName = optarg;
1158 break;
1159 case 'm':
1160 gMonotonicTime = true;
1161 break;
1162 case 'p':
1163 gPersistentSurface = true;
1164 break;
1165 case 'B':
1166 if (parseValueWithUnit(optarg, &gBframes) != NO_ERROR) {
1167 return 2;
1168 }
1169 break;
1170 case 'd':
1171 gPhysicalDisplayId = PhysicalDisplayId(atoll(optarg));
1172 if (gPhysicalDisplayId.value == 0) {
1173 fprintf(stderr, "Please specify a valid physical display id\n");
1174 return 2;
1175 } else if (SurfaceComposerClient::
1176 getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
1177 fprintf(stderr, "Invalid physical display id: %s\n",
1178 to_string(gPhysicalDisplayId).c_str());
1179 return 2;
1180 }
1181 break;
1182 default:
1183 if (ic != '?') {
1184 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
1185 }
1186 return 2;
1187 }
1188 }
1189
1190 if (optind != argc - 1) {
1191 fprintf(stderr, "Must specify output file (see --help).\n");
1192 return 2;
1193 }
1194
1195 const char* fileName = argv[optind];
1196 if (gOutputFormat == FORMAT_MP4) {
1197 // MediaMuxer tries to create the file in the constructor, but we don't
1198 // learn about the failure until muxer.start(), which returns a generic
1199 // error code without logging anything. We attempt to create the file
1200 // now for better diagnostics.
1201 int fd = open(fileName, O_CREAT | O_RDWR, 0644);
1202 if (fd < 0) {
1203 fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
1204 return 1;
1205 }
1206 close(fd);
1207 }
1208
1209 status_t err = recordScreen(fileName);
1210 if (err == NO_ERROR) {
1211 // Try to notify the media scanner. Not fatal if this fails.
1212 notifyMediaScanner(fileName);
1213 }
1214 ALOGD(err == NO_ERROR ? "success" : "failed");
1215 return (int) err;
1216 }
1217