1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5  * in compliance with the License. You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software distributed under the License
10  * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11  * or implied. See the License for the specific language governing permissions and limitations under
12  * the License.
13  */
14 
15 package com.android.camera.one.v2;
16 
17 import android.annotation.TargetApi;
18 import android.app.Activity;
19 import android.graphics.ImageFormat;
20 import android.graphics.Rect;
21 import android.hardware.camera2.CameraAccessException;
22 import android.hardware.camera2.CameraCaptureSession;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraMetadata;
26 import android.hardware.camera2.CaptureRequest;
27 import android.hardware.camera2.CaptureResult;
28 import android.hardware.camera2.CaptureResult.Key;
29 import android.hardware.camera2.TotalCaptureResult;
30 import android.hardware.camera2.params.MeteringRectangle;
31 import android.hardware.camera2.params.StreamConfigurationMap;
32 import android.location.Location;
33 import android.media.CameraProfile;
34 import android.media.Image;
35 import android.media.ImageReader;
36 import android.media.MediaActionSound;
37 import android.net.Uri;
38 import android.os.Build;
39 import android.os.Handler;
40 import android.os.HandlerThread;
41 import android.os.SystemClock;
42 import androidx.core.util.Pools;
43 import android.view.Surface;
44 
45 import com.android.camera.CaptureModuleUtil;
46 import com.android.camera.debug.Log;
47 import com.android.camera.debug.Log.Tag;
48 import com.android.camera.exif.ExifInterface;
49 import com.android.camera.exif.ExifTag;
50 import com.android.camera.exif.Rational;
51 import com.android.camera.one.AbstractOneCamera;
52 import com.android.camera.one.CameraDirectionProvider;
53 import com.android.camera.one.OneCamera;
54 import com.android.camera.one.OneCamera.PhotoCaptureParameters.Flash;
55 import com.android.camera.one.Settings3A;
56 import com.android.camera.one.v2.ImageCaptureManager.ImageCaptureListener;
57 import com.android.camera.one.v2.ImageCaptureManager.MetadataChangeListener;
58 import com.android.camera.one.v2.camera2proxy.AndroidCaptureResultProxy;
59 import com.android.camera.one.v2.camera2proxy.AndroidImageProxy;
60 import com.android.camera.one.v2.camera2proxy.CaptureResultProxy;
61 import com.android.camera.processing.imagebackend.TaskImageContainer;
62 import com.android.camera.session.CaptureSession;
63 import com.android.camera.ui.focus.LensRangeCalculator;
64 import com.android.camera.ui.motion.LinearScale;
65 import com.android.camera.util.CameraUtil;
66 import com.android.camera.util.ExifUtil;
67 import com.android.camera.util.JpegUtilNative;
68 import com.android.camera.util.ListenerCombiner;
69 import com.android.camera.util.Size;
70 import com.google.common.base.Optional;
71 import com.google.common.util.concurrent.FutureCallback;
72 import com.google.common.util.concurrent.Futures;
73 import com.google.common.util.concurrent.ListenableFuture;
74 import com.google.common.util.concurrent.MoreExecutors;
75 
76 import java.nio.ByteBuffer;
77 import java.security.InvalidParameterException;
78 import java.util.ArrayList;
79 import java.util.Collections;
80 import java.util.HashSet;
81 import java.util.List;
82 import java.util.Set;
83 import java.util.concurrent.LinkedBlockingQueue;
84 import java.util.concurrent.ThreadPoolExecutor;
85 import java.util.concurrent.TimeUnit;
86 
87 /**
88  * {@link OneCamera} implementation directly on top of the Camera2 API with zero
89  * shutter lag.<br>
90  * TODO: Determine what the maximum number of full YUV capture frames is.
91  */
92 @TargetApi(Build.VERSION_CODES.LOLLIPOP)
93 @Deprecated
94 public class OneCameraZslImpl extends AbstractOneCamera {
95     private static final Tag TAG = new Tag("OneCameraZslImpl2");
96 
97     /** Default JPEG encoding quality. */
98     private static final int JPEG_QUALITY =
99             CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
100     /**
101      * The maximum number of images to store in the full-size ZSL ring buffer.
102      * <br>
103      * TODO: Determine this number dynamically based on available memory and the
104      * size of frames.
105      */
106     private static final int MAX_CAPTURE_IMAGES = 12;
107     /**
108      * True if zero-shutter-lag images should be captured. Some devices produce
109      * lower-quality images for the high-frequency stream, so we may wish to
110      * disable ZSL in that case.
111      */
112     private static final boolean ZSL_ENABLED = true;
113 
114     /**
115      * Tags which may be used in CaptureRequests.
116      */
117     private static enum RequestTag {
118         /**
119          * Indicates that the request was explicitly sent for a single
120          * high-quality still capture. Unlike other requests, such as the
121          * repeating (ZSL) stream and AF/AE triggers, requests with this tag
122          * should always be saved.
123          */
124         EXPLICIT_CAPTURE
125     }
126 
127     /**
128      * Set to ImageFormat.JPEG to use the hardware encoder, or
129      * ImageFormat.YUV_420_888 to use the software encoder. No other image
130      * formats are supported.
131      */
132     private static final int sCaptureImageFormat = ImageFormat.YUV_420_888;
133     /**
134      * Token for callbacks posted to {@link #mCameraHandler} to resume
135      * continuous AF.
136      */
137     private static final String FOCUS_RESUME_CALLBACK_TOKEN = "RESUME_CONTINUOUS_AF";
138 
139     /** Zero weight 3A region, to reset regions per API. */
140     /*package*/ MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper.getZeroWeightRegion();
141 
142     /**
143      * Thread on which high-priority camera operations, such as grabbing preview
144      * frames for the viewfinder, are running.
145      */
146     private final HandlerThread mCameraThread;
147     /** Handler of the {@link #mCameraThread}. */
148     private final Handler mCameraHandler;
149 
150     /** Thread on which low-priority camera listeners are running. */
151     private final HandlerThread mCameraListenerThread;
152     private final Handler mCameraListenerHandler;
153 
154     /** The characteristics of this camera. */
155     private final CameraCharacteristics mCharacteristics;
156     /** Converts focus distance units into ratio values */
157     private final LinearScale mLensRange;
158     /** The underlying Camera2 API camera device. */
159     private final CameraDevice mDevice;
160     private final CameraDirectionProvider mDirection;
161 
162     /**
163      * The aspect ratio (width/height) of the full resolution for this camera.
164      * Usually the native aspect ratio of this camera.
165      */
166     private final float mFullSizeAspectRatio;
167     /** The Camera2 API capture session currently active. */
168     private CameraCaptureSession mCaptureSession;
169     /** The surface onto which to render the preview. */
170     private Surface mPreviewSurface;
171     /** Whether closing of this device has been requested. */
172     private volatile boolean mIsClosed = false;
173 
174     /** Receives the normal captured images. */
175     private final ImageReader mCaptureImageReader;
176 
177     /**
178      * Maintains a buffer of images and their associated {@link CaptureResult}s.
179      */
180     private ImageCaptureManager mCaptureManager;
181 
182     /**
183      * The sensor timestamps (which may not be relative to the system time) of
184      * the most recently captured images.
185      */
186     private final Set<Long> mCapturedImageTimestamps = Collections.synchronizedSet(
187             new HashSet<Long>());
188 
189     /** Thread pool for performing slow jpeg encoding and saving tasks. */
190     private final ThreadPoolExecutor mImageSaverThreadPool;
191 
192     /** Pool of native byte buffers on which to store jpeg-encoded images. */
193     private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool =
194             new Pools.SynchronizedPool<ByteBuffer>(64);
195 
196     /** Current zoom value. 1.0 is no zoom. */
197     private float mZoomValue = 1f;
198     /** Current crop region: set from mZoomValue. */
199     private Rect mCropRegion;
200     /** Current AE, AF, and AWB regions */
201     private MeteringRectangle[] mAFRegions = ZERO_WEIGHT_3A_REGION;
202     private MeteringRectangle[] mAERegions = ZERO_WEIGHT_3A_REGION;
203 
204     private MediaActionSound mMediaActionSound = new MediaActionSound();
205 
206     /**
207      * Ready state (typically displayed by the UI shutter-button) depends on two
208      * things:<br>
209      * <ol>
210      * <li>{@link #mCaptureManager} must be ready.</li>
211      * <li>We must not be in the process of capturing a single, high-quality,
212      * image.</li>
213      * </ol>
214      * See {@link ListenerCombiner} and {@link #mReadyStateManager} for
215      * details of how this is managed.
216      */
217     private static enum ReadyStateRequirement {
218         CAPTURE_MANAGER_READY, CAPTURE_NOT_IN_PROGRESS
219     }
220 
221     /**
222      * Handles the thread-safe logic of dispatching whenever the logical AND of
223      * these constraints changes.
224      */
225     private final ListenerCombiner<ReadyStateRequirement>
226             mReadyStateManager = new ListenerCombiner<ReadyStateRequirement>(
227                     ReadyStateRequirement.class, new ListenerCombiner.StateChangeListener() {
228                             @Override
229                         public void onStateChange(boolean state) {
230                             broadcastReadyState(state);
231                         }
232                     });
233 
234     /**
235      * An {@link ImageCaptureListener} which will compress and save an image to
236      * disk.
237      */
238     private class ImageCaptureTask implements ImageCaptureListener {
239         private final PhotoCaptureParameters mParams;
240         private final CaptureSession mSession;
241 
ImageCaptureTask(PhotoCaptureParameters parameters, CaptureSession session)242         public ImageCaptureTask(PhotoCaptureParameters parameters, CaptureSession session) {
243             mParams = parameters;
244             mSession = session;
245         }
246 
247         @Override
onImageCaptured(Image image, TotalCaptureResult captureResult)248         public void onImageCaptured(Image image, TotalCaptureResult captureResult) {
249             long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
250 
251             // We should only capture the image if it hasn't been captured
252             // before. Synchronization is necessary since
253             // mCapturedImageTimestamps is read & modified elsewhere.
254             synchronized (mCapturedImageTimestamps) {
255                 if (!mCapturedImageTimestamps.contains(timestamp)) {
256                     mCapturedImageTimestamps.add(timestamp);
257                 } else {
258                     // There was a more recent (or identical) image which has
259                     // begun being saved, so abort.
260                     return;
261                 }
262 
263                 // Clear out old timestamps from the set.
264                 // We must keep old timestamps in the set a little longer (a
265                 // factor of 2 seems adequate) to ensure they are cleared out of
266                 // the ring buffer before their timestamp is removed from the
267                 // set.
268                 long maxTimestamps = MAX_CAPTURE_IMAGES * 2;
269                 if (mCapturedImageTimestamps.size() > maxTimestamps) {
270                     ArrayList<Long> timestamps = new ArrayList<Long>(mCapturedImageTimestamps);
271                     Collections.sort(timestamps);
272                     for (int i = 0; i < timestamps.size()
273                             && mCapturedImageTimestamps.size() > maxTimestamps; i++) {
274                         mCapturedImageTimestamps.remove(timestamps.get(i));
275                     }
276                 }
277             }
278 
279             mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
280 
281             savePicture(image, mParams, mSession, captureResult);
282             mParams.callback.onPictureTaken(mSession);
283             Log.v(TAG, "Image saved.  Frame number = " + captureResult.getFrameNumber());
284         }
285     }
286 
287     /**
288      * Instantiates a new camera based on Camera 2 API.
289      *
290      * @param device The underlying Camera 2 device.
291      * @param characteristics The device's characteristics.
292      * @param pictureSize the size of the final image to be taken.
293      */
OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize)294     OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
295         Log.v(TAG, "Creating new OneCameraZslImpl");
296 
297         mDevice = device;
298         mCharacteristics = characteristics;
299         mLensRange = LensRangeCalculator
300               .getDiopterToRatioCalculator(characteristics);
301         mDirection = new CameraDirectionProvider(mCharacteristics);
302         mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);
303 
304         mCameraThread = new HandlerThread("OneCamera2");
305         // If this thread stalls, it will delay viewfinder frames.
306         mCameraThread.setPriority(Thread.MAX_PRIORITY);
307         mCameraThread.start();
308         mCameraHandler = new Handler(mCameraThread.getLooper());
309 
310         mCameraListenerThread = new HandlerThread("OneCamera2-Listener");
311         mCameraListenerThread.start();
312         mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper());
313 
314         // TODO: Encoding on multiple cores results in preview jank due to
315         // excessive GC.
316         int numEncodingCores = CameraUtil.getNumCpuCores();
317         mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10,
318                 TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
319 
320         mCaptureManager =
321                 new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
322                         mImageSaverThreadPool);
323         mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
324                 @Override
325             public void onReadyStateChange(boolean capturePossible) {
326                 mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY,
327                         capturePossible);
328             }
329         });
330 
331         // Listen for changes to auto focus state and dispatch to
332         // mFocusStateListener.
333         mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
334                 new ImageCaptureManager.MetadataChangeListener() {
335                 @Override
336                     public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
337                             CaptureResult result) {
338                         FocusStateListener listener = mFocusStateListener;
339                         if (listener != null) {
340                             listener.onFocusStatusUpdate(
341                                     AutoFocusHelper.stateFromCamera2State(
342                                             result.get(CaptureResult.CONTROL_AF_STATE)),
343                                 result.getFrameNumber());
344                         }
345                     }
346                 });
347 
348         // Allocate the image reader to store all images received from the
349         // camera.
350         if (pictureSize == null) {
351             // TODO The default should be selected by the caller, and
352             // pictureSize should never be null.
353             pictureSize = getDefaultPictureSize();
354         }
355         mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
356                 pictureSize.getHeight(),
357                 sCaptureImageFormat, MAX_CAPTURE_IMAGES);
358 
359         mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler);
360         mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
361     }
362 
363     @Override
setFocusDistanceListener(FocusDistanceListener focusDistanceListener)364     public void setFocusDistanceListener(FocusDistanceListener focusDistanceListener) {
365         if(mFocusDistanceListener == null) {
366             mCaptureManager.addMetadataChangeListener(CaptureResult.LENS_FOCUS_DISTANCE,
367                   new ImageCaptureManager.MetadataChangeListener() {
368                       @Override
369                       public void onImageMetadataChange(Key<?> key, Object oldValue,
370                             Object newValue,
371                             CaptureResult result) {
372                           Integer state = result.get(CaptureResult.LENS_STATE);
373 
374                           // Forward changes if we have a new value and the camera
375                           // A) Doesn't support lens state or B) lens state is
376                           // reported and it is reported as moving.
377                           if (newValue != null &&
378                                 (state == null || state == CameraMetadata.LENS_STATE_MOVING)) {
379                               mFocusDistanceListener.onFocusDistance((float) newValue, mLensRange);
380                           }
381                       }
382                   });
383         }
384         mFocusDistanceListener = focusDistanceListener;
385     }
386 
387     /**
388      * @return The largest supported picture size.
389      */
getDefaultPictureSize()390     public Size getDefaultPictureSize() {
391         StreamConfigurationMap configs =
392                 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
393         android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
394 
395         // Find the largest supported size.
396         android.util.Size largestSupportedSize = supportedSizes[0];
397         long largestSupportedSizePixels =
398                 largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
399         for (int i = 0; i < supportedSizes.length; i++) {
400             long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
401             if (numPixels > largestSupportedSizePixels) {
402                 largestSupportedSize = supportedSizes[i];
403                 largestSupportedSizePixels = numPixels;
404             }
405         }
406 
407         return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
408     }
409 
onShutterInvokeUI(final PhotoCaptureParameters params)410     private void onShutterInvokeUI(final PhotoCaptureParameters params) {
411         // Tell CaptureModule shutter has occurred so it can flash the screen.
412         params.callback.onQuickExpose();
413         // Play shutter click sound.
414         mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
415     }
416 
417     /**
418      * Take a picture.
419      */
420     @Override
takePicture(final PhotoCaptureParameters params, final CaptureSession session)421     public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
422         mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
423 
424         boolean useZSL = ZSL_ENABLED;
425 
426         // We will only capture images from the zsl ring-buffer which satisfy
427         // this constraint.
428         ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints =
429                 new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
430         zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
431                 @Override
432             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
433                 Long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
434                 Integer lensState = captureResult.get(CaptureResult.LENS_STATE);
435                 Integer flashState = captureResult.get(CaptureResult.FLASH_STATE);
436                 Integer flashMode = captureResult.get(CaptureResult.FLASH_MODE);
437                 Integer aeState = captureResult.get(CaptureResult.CONTROL_AE_STATE);
438                 Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
439                 Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);
440 
441                 if (lensState == null) {
442                     lensState = CaptureResult.LENS_STATE_STATIONARY;
443                 }
444                 if (flashState == null) {
445                     flashState = CaptureResult.FLASH_STATE_UNAVAILABLE;
446                 }
447                 if (flashMode == null) {
448                     flashMode = CaptureResult.FLASH_MODE_OFF;
449                 }
450                 if (aeState == null) {
451                     aeState = CaptureResult.CONTROL_AE_STATE_INACTIVE;
452                 }
453                 if (afState == null) {
454                     afState = CaptureResult.CONTROL_AF_STATE_INACTIVE;
455                 }
456                 if (awbState == null) {
457                     awbState = CaptureResult.CONTROL_AWB_STATE_INACTIVE;
458                 }
459 
460                 synchronized (mCapturedImageTimestamps) {
461                     if (mCapturedImageTimestamps.contains(timestamp)) {
462                         // Don't save frames which we've already saved.
463                         return false;
464                     }
465                 }
466 
467                 if (lensState == CaptureResult.LENS_STATE_MOVING) {
468                     // If we know the lens was moving, don't use this image.
469                     return false;
470                 }
471 
472                 if (aeState == CaptureResult.CONTROL_AE_STATE_SEARCHING
473                         || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
474                     return false;
475                 }
476 
477                 if (afState == CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN
478                         || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN) {
479                     return false;
480                 }
481 
482                 if (awbState == CaptureResult.CONTROL_AWB_STATE_SEARCHING) {
483                     return false;
484                 }
485 
486                 return true;
487             }
488         });
489         // This constraint lets us capture images which have been explicitly
490         // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
491         ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint =
492                 new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
493         singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
494                 @Override
495             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
496                 Object tag = captureResult.getRequest().getTag();
497                 return tag == RequestTag.EXPLICIT_CAPTURE;
498             }
499         });
500 
501         // If we can use ZSL, try to save a previously-captured frame, if an
502         // acceptable one exists in the buffer.
503         if (useZSL) {
504             boolean capturedPreviousFrame = mCaptureManager.tryCaptureExistingImage(
505                     new ImageCaptureTask(params, session), zslConstraints);
506             if (capturedPreviousFrame) {
507                 Log.v(TAG, "Saving previous frame");
508                 onShutterInvokeUI(params);
509             } else {
510                 Log.v(TAG, "No good image Available.  Capturing next available good image.");
511                 // If there was no good frame available in the ring buffer
512                 // already, capture the next good image.
513                 // TODO Disable the shutter button until this image is captured.
514 
515                 Flash flashMode = Flash.OFF;
516 
517                 if (flashMode == Flash.ON || flashMode == Flash.AUTO) {
518                     // We must issue a request for a single capture using the
519                     // flash, including an AE precapture trigger.
520 
521                     // The following sets up a sequence of events which will
522                     // occur in reverse order to the associated method
523                     // calls:
524                     // 1. Send a request to trigger the Auto Exposure Precapture
525                     // 2. Wait for the AE_STATE to leave the PRECAPTURE state,
526                     // and then send a request for a single image, with the
527                     // appropriate flash settings.
528                     // 3. Capture the next appropriate image, which should be
529                     // the one we requested in (2).
530 
531                     mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
532                             singleCaptureConstraint);
533 
534                     mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AE_STATE,
535                             new MetadataChangeListener() {
536                             @Override
537                                 public void onImageMetadataChange(Key<?> key, Object oldValue,
538                                         Object newValue,
539                                         CaptureResult result) {
540                                     Log.v(TAG, "AE State Changed");
541                                     if (oldValue.equals(Integer.valueOf(
542                                             CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
543                                         mCaptureManager.removeMetadataChangeListener(key, this);
544                                         sendSingleRequest(params);
545                                         // TODO: Delay this until
546                                         // onCaptureStarted().
547                                         onShutterInvokeUI(params);
548                                     }
549                                 }
550                             });
551 
552                     sendAutoExposureTriggerRequest(flashMode);
553                 } else {
554                     // We may get here if, for example, the auto focus is in the
555                     // middle of a scan.
556                     // If the flash is off, we should just wait for the next
557                     // image that arrives. This will have minimal delay since we
558                     // do not need to send a new capture request.
559                     mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
560                             zslConstraints);
561                 }
562             }
563         } else {
564             // TODO If we can't save a previous frame, create a new capture
565             // request to do what we need (e.g. flash) and call
566             // captureNextImage().
567             throw new UnsupportedOperationException("Non-ZSL capture not yet supported");
568         }
569     }
570 
571     @Override
startPreview(Surface previewSurface, CaptureReadyCallback listener)572     public void startPreview(Surface previewSurface, CaptureReadyCallback listener) {
573         mPreviewSurface = previewSurface;
574         setupAsync(mPreviewSurface, listener);
575     }
576 
577     @Override
close()578     public void close() {
579         if (mIsClosed) {
580             Log.w(TAG, "Camera is already closed.");
581             return;
582         }
583         try {
584             mCaptureSession.stopRepeating();
585         } catch (CameraAccessException e) {
586             Log.e(TAG, "Could not abort captures in progress.");
587         }
588         mIsClosed = true;
589         mCameraThread.quitSafely();
590         mDevice.close();
591         mCaptureManager.close();
592         mCaptureImageReader.close();
593     }
594 
getSupportedPreviewSizes()595     public Size[] getSupportedPreviewSizes() {
596         StreamConfigurationMap config =
597                 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
598         return Size.convert(config.getOutputSizes(sCaptureImageFormat));
599     }
600 
getFullSizeAspectRatio()601     public float getFullSizeAspectRatio() {
602         return mFullSizeAspectRatio;
603     }
604 
605     @Override
getDirection()606     public Facing getDirection() {
607         return mDirection.getDirection();
608    }
609 
610 
savePicture(Image image, final PhotoCaptureParameters captureParams, CaptureSession session, CaptureResult result)611     private void savePicture(Image image, final PhotoCaptureParameters captureParams,
612             CaptureSession session, CaptureResult result) {
613         int heading = captureParams.heading;
614         int degrees = CameraUtil.getJpegRotation(captureParams.orientation, mCharacteristics);
615 
616         ExifInterface exif = new ExifInterface();
617         // TODO: Add more exif tags here.
618 
619         Size size = getImageSizeForOrientation(image.getWidth(), image.getHeight(),
620                 degrees);
621 
622         exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, size.getWidth()));
623         exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, size.getHeight()));
624 
625         exif.setTag(
626                 exif.buildTag(ExifInterface.TAG_ORIENTATION, ExifInterface.Orientation.TOP_LEFT));
627 
628         // Set GPS heading direction based on sensor, if location is on.
629         if (heading >= 0) {
630             ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
631                     ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
632             ExifTag directionTag =
633                     exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1));
634             exif.setTag(directionRefTag);
635             exif.setTag(directionTag);
636         }
637         new ExifUtil(exif).populateExif(Optional.<TaskImageContainer.TaskImage>absent(),
638                 Optional.of((CaptureResultProxy) new AndroidCaptureResultProxy(result)),
639                 Optional.<Location>absent());
640         ListenableFuture<Optional<Uri>> futureUri = session.saveAndFinish(
641                 acquireJpegBytes(image, degrees),
642                 size.getWidth(), size.getHeight(), 0, exif);
643         Futures.addCallback(futureUri, new FutureCallback<Optional<Uri>>() {
644             @Override
645             public void onSuccess(Optional<Uri> uriOptional) {
646                 captureParams.callback.onPictureSaved(uriOptional.orNull());
647             }
648 
649             @Override
650             public void onFailure(Throwable throwable) {
651                 captureParams.callback.onPictureSaved(null);
652             }
653         }, MoreExecutors.directExecutor());
654     }
655 
656     /**
657      * Asynchronously sets up the capture session.
658      *
659      * @param previewSurface the surface onto which the preview should be
660      *            rendered.
661      * @param listener called when setup is completed.
662      */
setupAsync(final Surface previewSurface, final CaptureReadyCallback listener)663     private void setupAsync(final Surface previewSurface, final CaptureReadyCallback listener) {
664         mCameraHandler.post(new Runnable() {
665                 @Override
666             public void run() {
667                 setup(previewSurface, listener);
668             }
669         });
670     }
671 
672     /**
673      * Configures and attempts to create a capture session.
674      *
675      * @param previewSurface the surface onto which the preview should be
676      *            rendered.
677      * @param listener called when the setup is completed.
678      */
setup(Surface previewSurface, final CaptureReadyCallback listener)679     private void setup(Surface previewSurface, final CaptureReadyCallback listener) {
680         try {
681             if (mCaptureSession != null) {
682                 mCaptureSession.abortCaptures();
683                 mCaptureSession = null;
684             }
685             List<Surface> outputSurfaces = new ArrayList<Surface>(2);
686             outputSurfaces.add(previewSurface);
687             outputSurfaces.add(mCaptureImageReader.getSurface());
688 
689             mDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
690                     @Override
691                 public void onConfigureFailed(CameraCaptureSession session) {
692                     listener.onSetupFailed();
693                 }
694 
695                     @Override
696                 public void onConfigured(CameraCaptureSession session) {
697                     mCaptureSession = session;
698                     mAFRegions = ZERO_WEIGHT_3A_REGION;
699                     mAERegions = ZERO_WEIGHT_3A_REGION;
700                     mZoomValue = 1f;
701                     mCropRegion = cropRegionForZoom(mZoomValue);
702                     boolean success = sendRepeatingCaptureRequest();
703                     if (success) {
704                         mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS,
705                                 true);
706                         mReadyStateManager.notifyListeners();
707                         listener.onReadyForCapture();
708                     } else {
709                         listener.onSetupFailed();
710                     }
711                 }
712 
713                     @Override
714                 public void onClosed(CameraCaptureSession session) {
715                     super.onClosed(session);
716                 }
717             }, mCameraHandler);
718         } catch (CameraAccessException ex) {
719             Log.e(TAG, "Could not set up capture session", ex);
720             listener.onSetupFailed();
721         }
722     }
723 
addRegionsToCaptureRequestBuilder(CaptureRequest.Builder builder)724     private void addRegionsToCaptureRequestBuilder(CaptureRequest.Builder builder) {
725         builder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions);
726         builder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions);
727         builder.set(CaptureRequest.SCALER_CROP_REGION, mCropRegion);
728     }
729 
addFlashToCaptureRequestBuilder(CaptureRequest.Builder builder, Flash flashMode)730     private void addFlashToCaptureRequestBuilder(CaptureRequest.Builder builder, Flash flashMode) {
731         switch (flashMode) {
732             case ON:
733                 builder.set(CaptureRequest.CONTROL_AE_MODE,
734                         CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
735                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
736                 break;
737             case OFF:
738                 builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
739                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
740                 break;
741             case AUTO:
742                 builder.set(CaptureRequest.CONTROL_AE_MODE,
743                         CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
744                 break;
745         }
746     }
747 
748     /**
749      * Request a stream of images.
750      *
751      * @return true if successful, false if there was an error submitting the
752      *         capture request.
753      */
sendRepeatingCaptureRequest()754     private boolean sendRepeatingCaptureRequest() {
755         Log.v(TAG, "sendRepeatingCaptureRequest()");
756         try {
757             CaptureRequest.Builder builder;
758             if (ZSL_ENABLED) {
759                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
760             } else {
761                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
762             }
763 
764             builder.addTarget(mPreviewSurface);
765 
766             if (ZSL_ENABLED) {
767                 builder.addTarget(mCaptureImageReader.getSurface());
768             }
769 
770             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
771 
772             builder.set(CaptureRequest.CONTROL_AF_MODE,
773                     CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
774             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
775 
776             builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
777             builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
778 
779             addRegionsToCaptureRequestBuilder(builder);
780 
781             mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
782             return true;
783         } catch (CameraAccessException e) {
784             if (ZSL_ENABLED) {
785                 Log.v(TAG, "Could not execute zero-shutter-lag repeating request.", e);
786             } else {
787                 Log.v(TAG, "Could not execute preview request.", e);
788             }
789             return false;
790         }
791     }
792 
793     /**
794      * Request a single image.
795      *
796      * @return true if successful, false if there was an error submitting the
797      *         capture request.
798      */
sendSingleRequest(OneCamera.PhotoCaptureParameters params)799     private boolean sendSingleRequest(OneCamera.PhotoCaptureParameters params) {
800         Log.v(TAG, "sendSingleRequest()");
801         try {
802             CaptureRequest.Builder builder;
803             builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
804 
805             builder.addTarget(mPreviewSurface);
806 
807             // Always add this surface for single image capture requests.
808             builder.addTarget(mCaptureImageReader.getSurface());
809 
810             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
811 
812             Flash flashMode = Flash.OFF;
813             addFlashToCaptureRequestBuilder(builder, flashMode);
814             addRegionsToCaptureRequestBuilder(builder);
815 
816             builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
817             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
818 
819             // Tag this as a special request which should be saved.
820             builder.setTag(RequestTag.EXPLICIT_CAPTURE);
821 
822             if (sCaptureImageFormat == ImageFormat.JPEG) {
823                 builder.set(CaptureRequest.JPEG_QUALITY, (byte) (JPEG_QUALITY));
824                 builder.set(CaptureRequest.JPEG_ORIENTATION,
825                         CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
826             }
827 
828             mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
829             return true;
830         } catch (CameraAccessException e) {
831             Log.v(TAG, "Could not execute single still capture request.", e);
832             return false;
833         }
834     }
835 
sendRepeatingBurstCaptureRequest()836     private boolean sendRepeatingBurstCaptureRequest() {
837         Log.v(TAG, "sendRepeatingBurstCaptureRequest()");
838         try {
839             CaptureRequest.Builder builder;
840             builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
841             builder.addTarget(mPreviewSurface);
842 
843             if (ZSL_ENABLED) {
844                 builder.addTarget(mCaptureImageReader.getSurface());
845             }
846 
847             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
848             builder.set(CaptureRequest.CONTROL_AF_MODE,
849                     CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
850             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
851 
852             builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
853             builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
854 
855             addRegionsToCaptureRequestBuilder(builder);
856 
857             mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
858             return true;
859         } catch (CameraAccessException e) {
860             Log.v(TAG, "Could not send repeating burst capture request.", e);
861             return false;
862         }
863     }
864 
sendAutoExposureTriggerRequest(Flash flashMode)865     private boolean sendAutoExposureTriggerRequest(Flash flashMode) {
866         Log.v(TAG, "sendAutoExposureTriggerRequest()");
867         try {
868             CaptureRequest.Builder builder;
869             if (ZSL_ENABLED) {
870                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
871             } else {
872                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
873             }
874 
875             builder.addTarget(mPreviewSurface);
876 
877             if (ZSL_ENABLED) {
878                 builder.addTarget(mCaptureImageReader.getSurface());
879             }
880 
881             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
882 
883             builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
884                     CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
885 
886             addRegionsToCaptureRequestBuilder(builder);
887             addFlashToCaptureRequestBuilder(builder, flashMode);
888 
889             mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
890 
891             return true;
892         } catch (CameraAccessException e) {
893             Log.v(TAG, "Could not execute auto exposure trigger request.", e);
894             return false;
895         }
896     }
897 
898     /**
899      */
sendAutoFocusTriggerRequest()900     private boolean sendAutoFocusTriggerRequest() {
901         Log.v(TAG, "sendAutoFocusTriggerRequest()");
902         try {
903             CaptureRequest.Builder builder;
904             if (ZSL_ENABLED) {
905                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
906             } else {
907                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
908             }
909 
910             builder.addTarget(mPreviewSurface);
911 
912             if (ZSL_ENABLED) {
913                 builder.addTarget(mCaptureImageReader.getSurface());
914             }
915 
916             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
917 
918             addRegionsToCaptureRequestBuilder(builder);
919 
920             builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
921             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
922 
923             mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
924 
925             return true;
926         } catch (CameraAccessException e) {
927             Log.v(TAG, "Could not execute auto focus trigger request.", e);
928             return false;
929         }
930     }
931 
932     /**
933      * Like {@link #sendRepeatingCaptureRequest()}, but with the focus held
934      * constant.
935      *
936      * @return true if successful, false if there was an error submitting the
937      *         capture request.
938      */
sendAutoFocusHoldRequest()939     private boolean sendAutoFocusHoldRequest() {
940         Log.v(TAG, "sendAutoFocusHoldRequest()");
941         try {
942             CaptureRequest.Builder builder;
943             if (ZSL_ENABLED) {
944                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
945             } else {
946                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
947             }
948 
949             builder.addTarget(mPreviewSurface);
950 
951             if (ZSL_ENABLED) {
952                 builder.addTarget(mCaptureImageReader.getSurface());
953             }
954 
955             builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
956 
957             builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
958             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
959 
960             addRegionsToCaptureRequestBuilder(builder);
961             // TODO: This should fire the torch, if appropriate.
962 
963             mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
964 
965             return true;
966         } catch (CameraAccessException e) {
967             Log.v(TAG, "Could not execute auto focus hold request.", e);
968             return false;
969         }
970     }
971 
972     /**
973      * Calculate the aspect ratio of the full size capture on this device.
974      *
975      * @param characteristics the characteristics of the camera device.
976      * @return The aspect ration, in terms of width/height of the full capture
977      *         size.
978      */
calculateFullSizeAspectRatio(CameraCharacteristics characteristics)979     private static float calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
980         Rect activeArraySize =
981                 characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
982         return ((float) activeArraySize.width()) / activeArraySize.height();
983     }
984 
985     /**
986      * @param originalWidth the width of the original image captured from the
987      *            camera
988      * @param originalHeight the height of the original image captured from the
989      *            camera
990      * @param orientation the rotation to apply, in degrees.
991      * @return The size of the final rotated image
992      */
getImageSizeForOrientation(int originalWidth, int originalHeight, int orientation)993     private Size getImageSizeForOrientation(int originalWidth, int originalHeight,
994             int orientation) {
995         if (orientation == 0 || orientation == 180) {
996             return new Size(originalWidth, originalHeight);
997         } else if (orientation == 90 || orientation == 270) {
998             return new Size(originalHeight, originalWidth);
999         } else {
1000             throw new InvalidParameterException("Orientation not supported.");
1001         }
1002     }
1003 
1004     /**
1005      * Given an image reader, extracts the JPEG image bytes and then closes the
1006      * reader.
1007      *
1008      * @param img the image from which to extract jpeg bytes or compress to
1009      *            jpeg.
1010      * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is
1011      *            only applied to YUV images.
1012      * @return The bytes of the JPEG image. Newly allocated.
1013      */
acquireJpegBytes(Image img, int degrees)1014     private byte[] acquireJpegBytes(Image img, int degrees) {
1015         ByteBuffer buffer;
1016 
1017         if (img.getFormat() == ImageFormat.JPEG) {
1018             Image.Plane plane0 = img.getPlanes()[0];
1019             buffer = plane0.getBuffer();
1020 
1021             byte[] imageBytes = new byte[buffer.remaining()];
1022             buffer.get(imageBytes);
1023             buffer.rewind();
1024             return imageBytes;
1025         } else if (img.getFormat() == ImageFormat.YUV_420_888) {
1026             buffer = mJpegByteBufferPool.acquire();
1027             if (buffer == null) {
1028                 buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
1029             }
1030 
1031             int numBytes = JpegUtilNative.compressJpegFromYUV420Image(
1032                     new AndroidImageProxy(img), buffer, JPEG_QUALITY,
1033                     degrees);
1034 
1035             if (numBytes < 0) {
1036                 throw new RuntimeException("Error compressing jpeg.");
1037             }
1038 
1039             buffer.limit(numBytes);
1040 
1041             byte[] imageBytes = new byte[buffer.remaining()];
1042             buffer.get(imageBytes);
1043 
1044             buffer.clear();
1045             mJpegByteBufferPool.release(buffer);
1046 
1047             return imageBytes;
1048         } else {
1049             throw new RuntimeException("Unsupported image format.");
1050         }
1051     }
1052 
startAFCycle()1053     private void startAFCycle() {
1054         // Clean up any existing AF cycle's pending callbacks.
1055         mCameraHandler.removeCallbacksAndMessages(FOCUS_RESUME_CALLBACK_TOKEN);
1056 
1057         // Send a single CONTROL_AF_TRIGGER_START capture request.
1058         sendAutoFocusTriggerRequest();
1059 
1060         // Immediately send a request for a regular preview stream, but with
1061         // CONTROL_AF_MODE_AUTO set so that the focus remains constant after the
1062         // AF cycle completes.
1063         sendAutoFocusHoldRequest();
1064 
1065         // Waits Settings3A.getFocusHoldMillis() milliseconds before sending
1066         // a request for a regular preview stream to resume.
1067         mCameraHandler.postAtTime(new Runnable() {
1068                 @Override
1069             public void run() {
1070                 mAERegions = ZERO_WEIGHT_3A_REGION;
1071                 mAFRegions = ZERO_WEIGHT_3A_REGION;
1072                 sendRepeatingCaptureRequest();
1073             }
1074         }, FOCUS_RESUME_CALLBACK_TOKEN,
1075                 SystemClock.uptimeMillis() + Settings3A.getFocusHoldMillis());
1076     }
1077 
1078     /**
1079      * @see com.android.camera.one.OneCamera#triggerFocusAndMeterAtPoint(float,
1080      *      float)
1081      */
1082     @Override
triggerFocusAndMeterAtPoint(float nx, float ny)1083     public void triggerFocusAndMeterAtPoint(float nx, float ny) {
1084         int sensorOrientation = mCharacteristics.get(
1085             CameraCharacteristics.SENSOR_ORIENTATION);
1086         mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
1087         mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
1088 
1089         startAFCycle();
1090     }
1091 
1092     @Override
pickPreviewSize(Size pictureSize, Activity context)1093     public Size pickPreviewSize(Size pictureSize, Activity context) {
1094         if (pictureSize == null) {
1095             // TODO The default should be selected by the caller, and
1096             // pictureSize should never be null.
1097             pictureSize = getDefaultPictureSize();
1098         }
1099         float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
1100         return CaptureModuleUtil.getOptimalPreviewSize(getSupportedPreviewSizes(),
1101               pictureAspectRatio, context);
1102     }
1103 
1104     @Override
getMaxZoom()1105     public float getMaxZoom() {
1106         return mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
1107     }
1108 
1109     @Override
setZoom(float zoom)1110     public void setZoom(float zoom) {
1111         mZoomValue = zoom;
1112         mCropRegion = cropRegionForZoom(zoom);
1113         sendRepeatingCaptureRequest();
1114     }
1115 
cropRegionForZoom(float zoom)1116     private Rect cropRegionForZoom(float zoom) {
1117         return AutoFocusHelper.cropRegionForZoom(mCharacteristics, zoom);
1118     }
1119 }
1120