1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.hardware.camera2.impl; 18 19 import android.annotation.NonNull; 20 import android.compat.annotation.UnsupportedAppUsage; 21 import android.graphics.ImageFormat; 22 import android.graphics.Point; 23 import android.graphics.Rect; 24 import android.hardware.camera2.CameraCharacteristics; 25 import android.hardware.camera2.CameraMetadata; 26 import android.hardware.camera2.CaptureRequest; 27 import android.hardware.camera2.CaptureResult; 28 import android.hardware.camera2.marshal.MarshalQueryable; 29 import android.hardware.camera2.marshal.MarshalRegistry; 30 import android.hardware.camera2.marshal.Marshaler; 31 import android.hardware.camera2.marshal.impl.MarshalQueryableArray; 32 import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern; 33 import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean; 34 import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform; 35 import android.hardware.camera2.marshal.impl.MarshalQueryableEnum; 36 import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration; 37 import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle; 38 import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger; 39 import android.hardware.camera2.marshal.impl.MarshalQueryablePair; 40 import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable; 41 import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive; 42 import android.hardware.camera2.marshal.impl.MarshalQueryableRange; 43 import android.hardware.camera2.marshal.impl.MarshalQueryableRecommendedStreamConfiguration; 44 import android.hardware.camera2.marshal.impl.MarshalQueryableRect; 45 import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap; 46 import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector; 47 import android.hardware.camera2.marshal.impl.MarshalQueryableSize; 48 import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF; 49 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration; 50 import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration; 51 import android.hardware.camera2.marshal.impl.MarshalQueryableString; 52 import android.hardware.camera2.params.Capability; 53 import android.hardware.camera2.params.ColorSpaceProfiles; 54 import android.hardware.camera2.params.DeviceStateSensorOrientationMap; 55 import android.hardware.camera2.params.DynamicRangeProfiles; 56 import android.hardware.camera2.params.Face; 57 import android.hardware.camera2.params.HighSpeedVideoConfiguration; 58 import android.hardware.camera2.params.LensShadingMap; 59 import android.hardware.camera2.params.MandatoryStreamCombination; 60 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap; 61 import android.hardware.camera2.params.OisSample; 62 import android.hardware.camera2.params.RecommendedStreamConfiguration; 63 import android.hardware.camera2.params.RecommendedStreamConfigurationMap; 64 import android.hardware.camera2.params.ReprocessFormatsMap; 65 import android.hardware.camera2.params.StreamConfiguration; 66 import android.hardware.camera2.params.StreamConfigurationDuration; 67 import android.hardware.camera2.params.StreamConfigurationMap; 68 import android.hardware.camera2.params.TonemapCurve; 69 import android.hardware.camera2.utils.ArrayUtils; 70 import android.hardware.camera2.utils.TypeReference; 71 import android.location.Location; 72 import android.location.LocationManager; 73 import android.os.Build; 74 import android.os.Parcel; 75 import android.os.Parcelable; 76 import android.os.ServiceSpecificException; 77 import android.util.Log; 78 import android.util.Range; 79 import android.util.Size; 80 81 import dalvik.annotation.optimization.FastNative; 82 import dalvik.system.VMRuntime; 83 84 import java.io.IOException; 85 import java.nio.ByteBuffer; 86 import java.nio.ByteOrder; 87 import java.util.ArrayList; 88 import java.util.Arrays; 89 import java.util.Collections; 90 import java.util.HashMap; 91 import java.util.HashSet; 92 import java.util.List; 93 import java.util.Map; 94 import java.util.Objects; 95 import java.util.Set; 96 97 /** 98 * Implementation of camera metadata marshal/unmarshal across Binder to 99 * the camera service 100 */ 101 public class CameraMetadataNative implements Parcelable { 102 103 public static class Key<T> { 104 private boolean mHasTag; 105 private int mTag; 106 private long mVendorId = Long.MAX_VALUE; 107 private final Class<T> mType; 108 private final TypeReference<T> mTypeReference; 109 private final String mName; 110 private final String mFallbackName; 111 private final int mHash; 112 113 /** 114 * @hide 115 */ Key(String name, Class<T> type, long vendorId)116 public Key(String name, Class<T> type, long vendorId) { 117 if (name == null) { 118 throw new NullPointerException("Key needs a valid name"); 119 } else if (type == null) { 120 throw new NullPointerException("Type needs to be non-null"); 121 } 122 mName = name; 123 mFallbackName = null; 124 mType = type; 125 mVendorId = vendorId; 126 mTypeReference = TypeReference.createSpecializedTypeReference(type); 127 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 128 } 129 130 /** 131 * @hide 132 */ Key(String name, String fallbackName, Class<T> type)133 public Key(String name, String fallbackName, Class<T> type) { 134 if (name == null) { 135 throw new NullPointerException("Key needs a valid name"); 136 } else if (type == null) { 137 throw new NullPointerException("Type needs to be non-null"); 138 } 139 mName = name; 140 mFallbackName = fallbackName; 141 mType = type; 142 mTypeReference = TypeReference.createSpecializedTypeReference(type); 143 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 144 } 145 146 /** 147 * Visible for testing only. 148 * 149 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 150 * for application code or vendor-extended keys.</p> 151 */ Key(String name, Class<T> type)152 public Key(String name, Class<T> type) { 153 if (name == null) { 154 throw new NullPointerException("Key needs a valid name"); 155 } else if (type == null) { 156 throw new NullPointerException("Type needs to be non-null"); 157 } 158 mName = name; 159 mFallbackName = null; 160 mType = type; 161 mTypeReference = TypeReference.createSpecializedTypeReference(type); 162 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 163 } 164 165 /** 166 * Visible for testing only. 167 * 168 * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key 169 * for application code or vendor-extended keys.</p> 170 */ 171 @SuppressWarnings("unchecked") Key(String name, TypeReference<T> typeReference)172 public Key(String name, TypeReference<T> typeReference) { 173 if (name == null) { 174 throw new NullPointerException("Key needs a valid name"); 175 } else if (typeReference == null) { 176 throw new NullPointerException("TypeReference needs to be non-null"); 177 } 178 mName = name; 179 mFallbackName = null; 180 mType = (Class<T>)typeReference.getRawType(); 181 mTypeReference = typeReference; 182 mHash = mName.hashCode() ^ mTypeReference.hashCode(); 183 } 184 185 /** 186 * Return a camelCase, period separated name formatted like: 187 * {@code "root.section[.subsections].name"}. 188 * 189 * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."}; 190 * keys that are device/platform-specific are prefixed with {@code "com."}.</p> 191 * 192 * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would 193 * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device 194 * specific key might look like {@code "com.google.nexus.data.private"}.</p> 195 * 196 * @return String representation of the key name 197 */ getName()198 public final String getName() { 199 return mName; 200 } 201 202 /** 203 * {@inheritDoc} 204 */ 205 @Override hashCode()206 public final int hashCode() { 207 return mHash; 208 } 209 210 /** 211 * Compare this key against other native keys, request keys, result keys, and 212 * characteristics keys. 213 * 214 * <p>Two keys are considered equal if their name and type reference are equal.</p> 215 * 216 * <p>Note that the equality against non-native keys is one-way. A native key may be equal 217 * to a result key; but that same result key will not be equal to a native key.</p> 218 */ 219 @SuppressWarnings("rawtypes") 220 @Override equals(Object o)221 public final boolean equals(Object o) { 222 if (this == o) { 223 return true; 224 } 225 226 if (o == null || this.hashCode() != o.hashCode()) { 227 return false; 228 } 229 230 Key<?> lhs; 231 232 if (o instanceof CaptureResult.Key) { 233 lhs = ((CaptureResult.Key)o).getNativeKey(); 234 } else if (o instanceof CaptureRequest.Key) { 235 lhs = ((CaptureRequest.Key)o).getNativeKey(); 236 } else if (o instanceof CameraCharacteristics.Key) { 237 lhs = ((CameraCharacteristics.Key)o).getNativeKey(); 238 } else if ((o instanceof Key)) { 239 lhs = (Key<?>)o; 240 } else { 241 return false; 242 } 243 244 return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference); 245 } 246 247 /** 248 * <p> 249 * Get the tag corresponding to this key. This enables insertion into the 250 * native metadata. 251 * </p> 252 * 253 * <p>This value is looked up the first time, and cached subsequently.</p> 254 * 255 * <p>This function may be called without cacheTag() if this is not a vendor key. 256 * If this is a vendor key, cacheTag() must be called first before getTag() can 257 * be called. Otherwise, mVendorId could be default (Long.MAX_VALUE) and vendor 258 * tag lookup could fail.</p> 259 * 260 * @return The tag numeric value corresponding to the string 261 */ 262 @UnsupportedAppUsage getTag()263 public final int getTag() { 264 if (!mHasTag) { 265 mTag = CameraMetadataNative.getTag(mName, mVendorId); 266 mHasTag = true; 267 } 268 return mTag; 269 } 270 271 /** 272 * Whether this key's tag is cached. 273 * 274 * @hide 275 */ 276 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) hasTag()277 public final boolean hasTag() { 278 return mHasTag; 279 } 280 281 /** 282 * Cache this key's tag. 283 * 284 * @hide 285 */ 286 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) cacheTag(int tag)287 public final void cacheTag(int tag) { 288 mHasTag = true; 289 mTag = tag; 290 } 291 292 /** 293 * Get the raw class backing the type {@code T} for this key. 294 * 295 * <p>The distinction is only important if {@code T} is a generic, e.g. 296 * {@code Range<Integer>} since the nested type will be erased.</p> 297 */ getType()298 public final Class<T> getType() { 299 // TODO: remove this; other places should use #getTypeReference() instead 300 return mType; 301 } 302 303 /** 304 * Get the vendor tag provider id. 305 * 306 * @hide 307 */ getVendorId()308 public final long getVendorId() { 309 return mVendorId; 310 } 311 312 /** 313 * Get the type reference backing the type {@code T} for this key. 314 * 315 * <p>The distinction is only important if {@code T} is a generic, e.g. 316 * {@code Range<Integer>} since the nested type will be retained.</p> 317 */ getTypeReference()318 public final TypeReference<T> getTypeReference() { 319 return mTypeReference; 320 } 321 } 322 323 private static final String TAG = "CameraMetadataJV"; 324 private static final boolean DEBUG = false; 325 326 // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h 327 public static final int NATIVE_JPEG_FORMAT = 0x21; 328 329 private static final String CELLID_PROCESS = "CELLID"; 330 private static final String GPS_PROCESS = "GPS"; 331 private static final int FACE_LANDMARK_SIZE = 6; 332 333 private static final int MANDATORY_STREAM_CONFIGURATIONS_DEFAULT = 0; 334 private static final int MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION = 1; 335 private static final int MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT = 2; 336 private static final int MANDATORY_STREAM_CONFIGURATIONS_10BIT = 3; 337 private static final int MANDATORY_STREAM_CONFIGURATIONS_USE_CASE = 4; 338 private static final int MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION = 5; 339 translateLocationProviderToProcess(final String provider)340 private static String translateLocationProviderToProcess(final String provider) { 341 if (provider == null) { 342 return null; 343 } 344 switch(provider) { 345 case LocationManager.GPS_PROVIDER: 346 return GPS_PROCESS; 347 case LocationManager.NETWORK_PROVIDER: 348 return CELLID_PROCESS; 349 default: 350 return null; 351 } 352 } 353 translateProcessToLocationProvider(final String process)354 private static String translateProcessToLocationProvider(final String process) { 355 if (process == null) { 356 return null; 357 } 358 switch(process) { 359 case GPS_PROCESS: 360 return LocationManager.GPS_PROVIDER; 361 case CELLID_PROCESS: 362 return LocationManager.NETWORK_PROVIDER; 363 default: 364 return null; 365 } 366 } 367 CameraMetadataNative()368 public CameraMetadataNative() { 369 super(); 370 mMetadataPtr = nativeAllocate(); 371 if (mMetadataPtr == 0) { 372 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 373 } 374 updateNativeAllocation(); 375 } 376 377 /** 378 * Copy constructor - clone metadata 379 */ CameraMetadataNative(CameraMetadataNative other)380 public CameraMetadataNative(CameraMetadataNative other) { 381 super(); 382 mMetadataPtr = nativeAllocateCopy(other.mMetadataPtr); 383 if (mMetadataPtr == 0) { 384 throw new OutOfMemoryError("Failed to allocate native CameraMetadata"); 385 } 386 updateNativeAllocation(); 387 } 388 389 /** 390 * Move the contents from {@code other} into a new camera metadata instance.</p> 391 * 392 * <p>After this call, {@code other} will become empty.</p> 393 * 394 * @param other the previous metadata instance which will get pilfered 395 * @return a new metadata instance with the values from {@code other} moved into it 396 */ move(CameraMetadataNative other)397 public static CameraMetadataNative move(CameraMetadataNative other) { 398 CameraMetadataNative newObject = new CameraMetadataNative(); 399 newObject.swap(other); 400 return newObject; 401 } 402 403 /** 404 * Set all metadata values in the destination argument by using the corresponding 405 * values from the source. Metadata tags present in the destination and absent 406 * from the source will remain unmodified. 407 * 408 * @param dst Destination metadata 409 * @param src Source metadata 410 * @hide 411 */ update(CameraMetadataNative dst, CameraMetadataNative src)412 public static void update(CameraMetadataNative dst, CameraMetadataNative src) { 413 nativeUpdate(dst.mMetadataPtr, src.mMetadataPtr); 414 } 415 416 public static final @android.annotation.NonNull Parcelable.Creator<CameraMetadataNative> CREATOR = 417 new Parcelable.Creator<CameraMetadataNative>() { 418 @Override 419 public CameraMetadataNative createFromParcel(Parcel in) { 420 CameraMetadataNative metadata = new CameraMetadataNative(); 421 metadata.readFromParcel(in); 422 return metadata; 423 } 424 425 @Override 426 public CameraMetadataNative[] newArray(int size) { 427 return new CameraMetadataNative[size]; 428 } 429 }; 430 431 @Override describeContents()432 public int describeContents() { 433 return 0; 434 } 435 436 @Override writeToParcel(Parcel dest, int flags)437 public void writeToParcel(Parcel dest, int flags) { 438 nativeWriteToParcel(dest, mMetadataPtr); 439 } 440 441 /** 442 * @hide 443 */ get(CameraCharacteristics.Key<T> key)444 public <T> T get(CameraCharacteristics.Key<T> key) { 445 return get(key.getNativeKey()); 446 } 447 448 /** 449 * @hide 450 */ get(CaptureResult.Key<T> key)451 public <T> T get(CaptureResult.Key<T> key) { 452 return get(key.getNativeKey()); 453 } 454 455 /** 456 * @hide 457 */ get(CaptureRequest.Key<T> key)458 public <T> T get(CaptureRequest.Key<T> key) { 459 return get(key.getNativeKey()); 460 } 461 462 /** 463 * Look-up a metadata field value by its key. 464 * 465 * @param key a non-{@code null} key instance 466 * @return the field corresponding to the {@code key}, or {@code null} if no value was set 467 */ get(Key<T> key)468 public <T> T get(Key<T> key) { 469 Objects.requireNonNull(key, "key must not be null"); 470 471 // Check if key has been overridden to use a wrapper class on the java side. 472 GetCommand g = sGetCommandMap.get(key); 473 if (g != null) { 474 return g.getValue(this, key); 475 } 476 return getBase(key); 477 } 478 readFromParcel(Parcel in)479 public void readFromParcel(Parcel in) { 480 nativeReadFromParcel(in, mMetadataPtr); 481 updateNativeAllocation(); 482 } 483 484 /** 485 * Set the global client-side vendor tag descriptor to allow use of vendor 486 * tags in camera applications. 487 * 488 * @throws ServiceSpecificException 489 * @hide 490 */ setupGlobalVendorTagDescriptor()491 public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException { 492 int err = nativeSetupGlobalVendorTagDescriptor(); 493 if (err != 0) { 494 throw new ServiceSpecificException(err, "Failure to set up global vendor tags"); 495 } 496 } 497 498 /** 499 * Set the global client-side vendor tag descriptor to allow use of vendor 500 * tags in camera applications. 501 * 502 * @return int An error code corresponding to one of the 503 * {@link ICameraService} error constants, or 0 on success. 504 */ nativeSetupGlobalVendorTagDescriptor()505 private static native int nativeSetupGlobalVendorTagDescriptor(); 506 507 /** 508 * Set a camera metadata field to a value. The field definitions can be 509 * found in {@link CameraCharacteristics}, {@link CaptureResult}, and 510 * {@link CaptureRequest}. 511 * 512 * @param key The metadata field to write. 513 * @param value The value to set the field to, which must be of a matching 514 * type to the key. 515 */ set(Key<T> key, T value)516 public <T> void set(Key<T> key, T value) { 517 SetCommand s = sSetCommandMap.get(key); 518 if (s != null) { 519 s.setValue(this, value); 520 return; 521 } 522 523 setBase(key, value); 524 } 525 set(CaptureRequest.Key<T> key, T value)526 public <T> void set(CaptureRequest.Key<T> key, T value) { 527 set(key.getNativeKey(), value); 528 } 529 set(CaptureResult.Key<T> key, T value)530 public <T> void set(CaptureResult.Key<T> key, T value) { 531 set(key.getNativeKey(), value); 532 } 533 set(CameraCharacteristics.Key<T> key, T value)534 public <T> void set(CameraCharacteristics.Key<T> key, T value) { 535 set(key.getNativeKey(), value); 536 } 537 538 // Keep up-to-date with camera_metadata.h 539 /** 540 * @hide 541 */ 542 public static final int TYPE_BYTE = 0; 543 /** 544 * @hide 545 */ 546 public static final int TYPE_INT32 = 1; 547 /** 548 * @hide 549 */ 550 public static final int TYPE_FLOAT = 2; 551 /** 552 * @hide 553 */ 554 public static final int TYPE_INT64 = 3; 555 /** 556 * @hide 557 */ 558 public static final int TYPE_DOUBLE = 4; 559 /** 560 * @hide 561 */ 562 public static final int TYPE_RATIONAL = 5; 563 /** 564 * @hide 565 */ 566 public static final int NUM_TYPES = 6; 567 close()568 private void close() { 569 // Delete native pointer, but does not clear it 570 nativeClose(mMetadataPtr); 571 mMetadataPtr = 0; 572 573 if (mBufferSize > 0) { 574 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 575 } 576 mBufferSize = 0; 577 } 578 getBase(CameraCharacteristics.Key<T> key)579 private <T> T getBase(CameraCharacteristics.Key<T> key) { 580 return getBase(key.getNativeKey()); 581 } 582 getBase(CaptureResult.Key<T> key)583 private <T> T getBase(CaptureResult.Key<T> key) { 584 return getBase(key.getNativeKey()); 585 } 586 getBase(CaptureRequest.Key<T> key)587 private <T> T getBase(CaptureRequest.Key<T> key) { 588 return getBase(key.getNativeKey()); 589 } 590 getBase(Key<T> key)591 private <T> T getBase(Key<T> key) { 592 int tag; 593 if (key.hasTag()) { 594 tag = key.getTag(); 595 } else { 596 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 597 key.cacheTag(tag); 598 } 599 byte[] values = readValues(tag); 600 if (values == null) { 601 // If the key returns null, use the fallback key if exists. 602 // This is to support old key names for the newly published keys. 603 if (key.mFallbackName == null) { 604 return null; 605 } 606 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.mFallbackName); 607 values = readValues(tag); 608 if (values == null) { 609 return null; 610 } 611 } 612 613 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 614 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 615 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 616 return marshaler.unmarshal(buffer); 617 } 618 619 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 620 // metadata. 621 private static final HashMap<Key<?>, GetCommand> sGetCommandMap = 622 new HashMap<Key<?>, GetCommand>(); 623 static { 624 sGetCommandMap.put( GetCommand()625 CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() { 626 @Override 627 @SuppressWarnings("unchecked") 628 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 629 return (T) metadata.getAvailableFormats(); 630 } 631 }); 632 sGetCommandMap.put( GetCommand()633 CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() { 634 @Override 635 @SuppressWarnings("unchecked") 636 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 637 return (T) metadata.getFaces(); 638 } 639 }); 640 sGetCommandMap.put( GetCommand()641 CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() { 642 @Override 643 @SuppressWarnings("unchecked") 644 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 645 return (T) metadata.getFaceRectangles(); 646 } 647 }); 648 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey()649 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(), 650 new GetCommand() { 651 @Override 652 @SuppressWarnings("unchecked") 653 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 654 return (T) metadata.getStreamConfigurationMap(); 655 } 656 }); 657 sGetCommandMap.put( CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey()658 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION.getNativeKey(), 659 new GetCommand() { 660 @Override 661 @SuppressWarnings("unchecked") 662 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 663 return (T) metadata.getStreamConfigurationMapMaximumResolution(); 664 } 665 }); 666 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey()667 CameraCharacteristics.SCALER_MANDATORY_STREAM_COMBINATIONS.getNativeKey(), 668 new GetCommand() { 669 @Override 670 @SuppressWarnings("unchecked") 671 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 672 return (T) metadata.getMandatoryStreamCombinations(); 673 } 674 }); 675 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey()676 CameraCharacteristics.SCALER_MANDATORY_CONCURRENT_STREAM_COMBINATIONS.getNativeKey(), 677 new GetCommand() { 678 @Override 679 @SuppressWarnings("unchecked") 680 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 681 return (T) metadata.getMandatoryConcurrentStreamCombinations(); 682 } 683 }); 684 685 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey()686 CameraCharacteristics.SCALER_MANDATORY_TEN_BIT_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 687 new GetCommand() { 688 @Override 689 @SuppressWarnings("unchecked") 690 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 691 return (T) metadata.getMandatory10BitStreamCombinations(); 692 } 693 }); 694 695 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey()696 CameraCharacteristics.SCALER_MANDATORY_MAXIMUM_RESOLUTION_STREAM_COMBINATIONS.getNativeKey(), 697 new GetCommand() { 698 @Override 699 @SuppressWarnings("unchecked") 700 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 701 return (T) metadata.getMandatoryMaximumResolutionStreamCombinations(); 702 } 703 }); 704 705 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey()706 CameraCharacteristics.SCALER_MANDATORY_USE_CASE_STREAM_COMBINATIONS.getNativeKey(), 707 new GetCommand() { 708 @Override 709 @SuppressWarnings("unchecked") 710 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 711 return (T) metadata.getMandatoryUseCaseStreamCombinations(); 712 } 713 }); 714 sGetCommandMap.put( CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey()715 CameraCharacteristics.SCALER_MANDATORY_PREVIEW_STABILIZATION_OUTPUT_STREAM_COMBINATIONS.getNativeKey(), 716 new GetCommand() { 717 @Override 718 @SuppressWarnings("unchecked") 719 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 720 return (T) metadata.getMandatoryPreviewStabilizationStreamCombinations(); 721 } 722 }); 723 724 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey()725 CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() { 726 @Override 727 @SuppressWarnings("unchecked") 728 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 729 return (T) metadata.getMaxRegions(key); 730 } 731 }); 732 sGetCommandMap.put( GetCommand()733 CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() { 734 @Override 735 @SuppressWarnings("unchecked") 736 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 737 return (T) metadata.getMaxRegions(key); 738 } 739 }); 740 sGetCommandMap.put( CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey()741 CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() { 742 @Override 743 @SuppressWarnings("unchecked") 744 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 745 return (T) metadata.getMaxRegions(key); 746 } 747 }); 748 sGetCommandMap.put( GetCommand()749 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() { 750 @Override 751 @SuppressWarnings("unchecked") 752 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 753 return (T) metadata.getMaxNumOutputs(key); 754 } 755 }); 756 sGetCommandMap.put( GetCommand()757 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() { 758 @Override 759 @SuppressWarnings("unchecked") 760 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 761 return (T) metadata.getMaxNumOutputs(key); 762 } 763 }); 764 sGetCommandMap.put( CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey()765 CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(), 766 new GetCommand() { 767 @Override 768 @SuppressWarnings("unchecked") 769 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 770 return (T) metadata.getMaxNumOutputs(key); 771 } 772 }); 773 sGetCommandMap.put( GetCommand()774 CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() { 775 @Override 776 @SuppressWarnings("unchecked") 777 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 778 return (T) metadata.getTonemapCurve(); 779 } 780 }); 781 sGetCommandMap.put( GetCommand()782 CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() { 783 @Override 784 @SuppressWarnings("unchecked") 785 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 786 return (T) metadata.getGpsLocation(); 787 } 788 }); 789 sGetCommandMap.put( CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()790 CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 791 new GetCommand() { 792 @Override 793 @SuppressWarnings("unchecked") 794 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 795 return (T) metadata.getLensShadingMap(); 796 } 797 }); 798 sGetCommandMap.put( CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey()799 CameraCharacteristics.INFO_DEVICE_STATE_SENSOR_ORIENTATION_MAP.getNativeKey(), 800 new GetCommand() { 801 @Override 802 @SuppressWarnings("unchecked") 803 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 804 return (T) metadata.getDeviceStateOrientationMap(); 805 } 806 }); 807 sGetCommandMap.put( CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey()808 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES.getNativeKey(), 809 new GetCommand() { 810 @Override 811 @SuppressWarnings("unchecked") 812 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 813 return (T) metadata.getDynamicRangeProfiles(); 814 } 815 }); 816 sGetCommandMap.put( CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES.getNativeKey()817 CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES.getNativeKey(), 818 new GetCommand() { 819 @Override 820 @SuppressWarnings("unchecked") 821 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 822 return (T) metadata.getColorSpaceProfiles(); 823 } 824 }); 825 sGetCommandMap.put( CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey()826 CaptureResult.STATISTICS_OIS_SAMPLES.getNativeKey(), 827 new GetCommand() { 828 @Override 829 @SuppressWarnings("unchecked") 830 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 831 return (T) metadata.getOisSamples(); 832 } 833 }); 834 sGetCommandMap.put( CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey()835 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES.getNativeKey(), 836 new GetCommand() { 837 @Override 838 @SuppressWarnings("unchecked") 839 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 840 return (T) metadata.getExtendedSceneModeCapabilities(); 841 } 842 }); 843 sGetCommandMap.put( CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey()844 CameraCharacteristics.SCALER_MULTI_RESOLUTION_STREAM_CONFIGURATION_MAP.getNativeKey(), 845 new GetCommand() { 846 @Override 847 @SuppressWarnings("unchecked") 848 public <T> T getValue(CameraMetadataNative metadata, Key<T> key) { 849 return (T) metadata.getMultiResolutionStreamConfigurationMap(); 850 } 851 }); 852 } 853 getAvailableFormats()854 private int[] getAvailableFormats() { 855 int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS); 856 if (availableFormats != null) { 857 for (int i = 0; i < availableFormats.length; i++) { 858 // JPEG has different value between native and managed side, need override. 859 if (availableFormats[i] == NATIVE_JPEG_FORMAT) { 860 availableFormats[i] = ImageFormat.JPEG; 861 } 862 } 863 } 864 865 return availableFormats; 866 } 867 setFaces(Face[] faces)868 private boolean setFaces(Face[] faces) { 869 if (faces == null) { 870 return false; 871 } 872 873 int numFaces = faces.length; 874 875 // Detect if all faces are SIMPLE or not; count # of valid faces 876 boolean fullMode = true; 877 for (Face face : faces) { 878 if (face == null) { 879 numFaces--; 880 Log.w(TAG, "setFaces - null face detected, skipping"); 881 continue; 882 } 883 884 if (face.getId() == Face.ID_UNSUPPORTED) { 885 fullMode = false; 886 } 887 } 888 889 Rect[] faceRectangles = new Rect[numFaces]; 890 byte[] faceScores = new byte[numFaces]; 891 int[] faceIds = null; 892 int[] faceLandmarks = null; 893 894 if (fullMode) { 895 faceIds = new int[numFaces]; 896 faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE]; 897 } 898 899 int i = 0; 900 for (Face face : faces) { 901 if (face == null) { 902 continue; 903 } 904 905 faceRectangles[i] = face.getBounds(); 906 faceScores[i] = (byte)face.getScore(); 907 908 if (fullMode) { 909 faceIds[i] = face.getId(); 910 911 int j = 0; 912 913 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x; 914 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y; 915 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x; 916 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y; 917 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x; 918 faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y; 919 } 920 921 i++; 922 } 923 924 set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles); 925 set(CaptureResult.STATISTICS_FACE_IDS, faceIds); 926 set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks); 927 set(CaptureResult.STATISTICS_FACE_SCORES, faceScores); 928 929 return true; 930 } 931 getFaces()932 private Face[] getFaces() { 933 Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE); 934 byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES); 935 Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES); 936 int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS); 937 int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS); 938 939 if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) { 940 return null; 941 } 942 943 if (faceDetectMode == null) { 944 Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE"); 945 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 946 } else if (faceDetectMode > CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 947 // Face detect mode is larger than FULL, assuming the mode is FULL 948 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL; 949 } else { 950 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) { 951 return new Face[0]; 952 } 953 if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE && 954 faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 955 Log.w(TAG, "Unknown face detect mode: " + faceDetectMode); 956 return new Face[0]; 957 } 958 } 959 960 // Face scores and rectangles are required by SIMPLE and FULL mode. 961 if (faceScores == null || faceRectangles == null) { 962 Log.w(TAG, "Expect face scores and rectangles to be non-null"); 963 return new Face[0]; 964 } else if (faceScores.length != faceRectangles.length) { 965 Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!", 966 faceScores.length, faceRectangles.length)); 967 } 968 969 // To be safe, make number of faces is the minimal of all face info metadata length. 970 int numFaces = Math.min(faceScores.length, faceRectangles.length); 971 // Face id and landmarks are only required by FULL mode. 972 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) { 973 if (faceIds == null || faceLandmarks == null) { 974 Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," + 975 "fallback to SIMPLE mode"); 976 faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE; 977 } else { 978 if (faceIds.length != numFaces || 979 faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) { 980 Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" + 981 "match face number(%d)!", 982 faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces)); 983 } 984 // To be safe, make number of faces is the minimal of all face info metadata length. 985 numFaces = Math.min(numFaces, faceIds.length); 986 numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE); 987 } 988 } 989 990 ArrayList<Face> faceList = new ArrayList<Face>(); 991 if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) { 992 for (int i = 0; i < numFaces; i++) { 993 if (faceScores[i] <= Face.SCORE_MAX && 994 faceScores[i] >= Face.SCORE_MIN) { 995 faceList.add(new Face(faceRectangles[i], faceScores[i])); 996 } 997 } 998 } else { 999 // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL 1000 for (int i = 0; i < numFaces; i++) { 1001 if (faceScores[i] <= Face.SCORE_MAX && 1002 faceScores[i] >= Face.SCORE_MIN && 1003 faceIds[i] >= 0) { 1004 Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE], 1005 faceLandmarks[i*FACE_LANDMARK_SIZE+1]); 1006 Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2], 1007 faceLandmarks[i*FACE_LANDMARK_SIZE+3]); 1008 Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4], 1009 faceLandmarks[i*FACE_LANDMARK_SIZE+5]); 1010 Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i], 1011 leftEye, rightEye, mouth); 1012 faceList.add(face); 1013 } 1014 } 1015 } 1016 Face[] faces = new Face[faceList.size()]; 1017 faceList.toArray(faces); 1018 return faces; 1019 } 1020 1021 // Face rectangles are defined as (left, top, right, bottom) instead of 1022 // (left, top, width, height) at the native level, so the normal Rect 1023 // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo 1024 // that conversion here for just the faces. getFaceRectangles()1025 private Rect[] getFaceRectangles() { 1026 Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES); 1027 if (faceRectangles == null) return null; 1028 1029 Rect[] fixedFaceRectangles = new Rect[faceRectangles.length]; 1030 for (int i = 0; i < faceRectangles.length; i++) { 1031 fixedFaceRectangles[i] = new Rect( 1032 faceRectangles[i].left, 1033 faceRectangles[i].top, 1034 faceRectangles[i].right - faceRectangles[i].left, 1035 faceRectangles[i].bottom - faceRectangles[i].top); 1036 } 1037 return fixedFaceRectangles; 1038 } 1039 setLensShadingMap(LensShadingMap lensShadingMap)1040 private boolean setLensShadingMap(LensShadingMap lensShadingMap) { 1041 if (lensShadingMap == null) { 1042 return false; 1043 } 1044 float[] lsmArray = new float[lensShadingMap.getGainFactorCount()]; 1045 lensShadingMap.copyGainFactors(lsmArray, 0); 1046 setBase(CaptureResult.STATISTICS_LENS_SHADING_MAP, lsmArray); 1047 1048 Size s = new Size(lensShadingMap.getRowCount(), lensShadingMap.getColumnCount()); 1049 setBase(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE, s); 1050 return true; 1051 } 1052 getLensShadingMap()1053 private LensShadingMap getLensShadingMap() { 1054 float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP); 1055 Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE); 1056 1057 // Do not warn if lsmArray is null while s is not. This is valid. 1058 if (lsmArray == null) { 1059 return null; 1060 } 1061 1062 if (s == null) { 1063 Log.w(TAG, "getLensShadingMap - Lens shading map size was null."); 1064 return null; 1065 } 1066 1067 LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth()); 1068 return map; 1069 } 1070 getDeviceStateOrientationMap()1071 private DeviceStateSensorOrientationMap getDeviceStateOrientationMap() { 1072 long[] mapArray = getBase(CameraCharacteristics.INFO_DEVICE_STATE_ORIENTATIONS); 1073 1074 // Do not warn if map is null while s is not. This is valid. 1075 if (mapArray == null) { 1076 return null; 1077 } 1078 1079 DeviceStateSensorOrientationMap map = new DeviceStateSensorOrientationMap(mapArray); 1080 return map; 1081 } 1082 getDynamicRangeProfiles()1083 private DynamicRangeProfiles getDynamicRangeProfiles() { 1084 long[] profileArray = getBase( 1085 CameraCharacteristics.REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP); 1086 1087 if (profileArray == null) { 1088 return null; 1089 } 1090 1091 return new DynamicRangeProfiles(profileArray); 1092 } 1093 getColorSpaceProfiles()1094 private ColorSpaceProfiles getColorSpaceProfiles() { 1095 long[] profileArray = getBase( 1096 CameraCharacteristics.REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP); 1097 1098 if (profileArray == null) { 1099 return null; 1100 } 1101 1102 return new ColorSpaceProfiles(profileArray); 1103 } 1104 getGpsLocation()1105 private Location getGpsLocation() { 1106 String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD); 1107 double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES); 1108 Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP); 1109 1110 if (areValuesAllNull(processingMethod, coords, timeStamp)) { 1111 return null; 1112 } 1113 1114 Location l = new Location(translateProcessToLocationProvider(processingMethod)); 1115 if (timeStamp != null) { 1116 // Location expects timestamp in [ms.] 1117 l.setTime(timeStamp * 1000); 1118 } else { 1119 Log.w(TAG, "getGpsLocation - No timestamp for GPS location."); 1120 } 1121 1122 if (coords != null) { 1123 l.setLatitude(coords[0]); 1124 l.setLongitude(coords[1]); 1125 l.setAltitude(coords[2]); 1126 } else { 1127 Log.w(TAG, "getGpsLocation - No coordinates for GPS location"); 1128 } 1129 1130 return l; 1131 } 1132 setGpsLocation(Location l)1133 private boolean setGpsLocation(Location l) { 1134 if (l == null) { 1135 // If Location value being set is null, remove corresponding keys. 1136 // This is safe because api1/client2/CameraParameters.cpp already erases 1137 // the keys for JPEG_GPS_LOCATION for certain cases. 1138 setBase(CaptureRequest.JPEG_GPS_TIMESTAMP, null); 1139 setBase(CaptureRequest.JPEG_GPS_COORDINATES, null); 1140 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, null); 1141 return false; 1142 } 1143 1144 double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() }; 1145 String processMethod = translateLocationProviderToProcess(l.getProvider()); 1146 //JPEG_GPS_TIMESTAMP expects sec. instead of msec. 1147 long timestamp = l.getTime() / 1000; 1148 1149 set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp); 1150 set(CaptureRequest.JPEG_GPS_COORDINATES, coords); 1151 1152 if (processMethod == null) { 1153 Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" + 1154 "provider"); 1155 } else { 1156 setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod); 1157 } 1158 return true; 1159 } 1160 parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, StreamConfigurationMap fullMap, boolean isDepth, ArrayList<ArrayList<StreamConfiguration>> streamConfigList, ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList, ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList, boolean[] supportsPrivate)1161 private void parseRecommendedConfigurations(RecommendedStreamConfiguration[] configurations, 1162 StreamConfigurationMap fullMap, boolean isDepth, 1163 ArrayList<ArrayList<StreamConfiguration>> /*out*/streamConfigList, 1164 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamDurationList, 1165 ArrayList<ArrayList<StreamConfigurationDuration>> /*out*/streamStallList, 1166 boolean[] /*out*/supportsPrivate) { 1167 1168 streamConfigList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1169 streamDurationList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1170 streamStallList.ensureCapacity(RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1171 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1172 streamConfigList.add(new ArrayList<StreamConfiguration> ()); 1173 streamDurationList.add(new ArrayList<StreamConfigurationDuration> ()); 1174 streamStallList.add(new ArrayList<StreamConfigurationDuration> ()); 1175 } 1176 1177 for (RecommendedStreamConfiguration c : configurations) { 1178 int width = c.getWidth(); 1179 int height = c.getHeight(); 1180 int internalFormat = c.getFormat(); 1181 int publicFormat = 1182 (isDepth) ? StreamConfigurationMap.depthFormatToPublic(internalFormat) : 1183 StreamConfigurationMap.imageFormatToPublic(internalFormat); 1184 Size sz = new Size(width, height); 1185 int usecaseBitmap = c.getUsecaseBitmap(); 1186 1187 if (!c.isInput()) { 1188 StreamConfigurationDuration minDurationConfiguration = null; 1189 StreamConfigurationDuration stallDurationConfiguration = null; 1190 1191 StreamConfiguration streamConfiguration = new StreamConfiguration(internalFormat, 1192 width, height, /*input*/ false); 1193 1194 long minFrameDuration = fullMap.getOutputMinFrameDuration(publicFormat, sz); 1195 if (minFrameDuration > 0) { 1196 minDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1197 width, height, minFrameDuration); 1198 } 1199 1200 long stallDuration = fullMap.getOutputStallDuration(publicFormat, sz); 1201 if (stallDuration > 0) { 1202 stallDurationConfiguration = new StreamConfigurationDuration(internalFormat, 1203 width, height, stallDuration); 1204 } 1205 1206 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1207 if ((usecaseBitmap & (1 << i)) != 0) { 1208 ArrayList<StreamConfiguration> sc = streamConfigList.get(i); 1209 sc.add(streamConfiguration); 1210 1211 if (minFrameDuration > 0) { 1212 ArrayList<StreamConfigurationDuration> scd = streamDurationList.get(i); 1213 scd.add(minDurationConfiguration); 1214 } 1215 1216 if (stallDuration > 0) { 1217 ArrayList<StreamConfigurationDuration> scs = streamStallList.get(i); 1218 scs.add(stallDurationConfiguration); 1219 } 1220 1221 if ((supportsPrivate != null) && !supportsPrivate[i] && 1222 (publicFormat == ImageFormat.PRIVATE)) { 1223 supportsPrivate[i] = true; 1224 } 1225 } 1226 } 1227 } else { 1228 if (usecaseBitmap != (1 << RecommendedStreamConfigurationMap.USECASE_ZSL)) { 1229 throw new IllegalArgumentException("Recommended input stream configurations " + 1230 "should only be advertised in the ZSL use case!"); 1231 } 1232 1233 ArrayList<StreamConfiguration> sc = streamConfigList.get( 1234 RecommendedStreamConfigurationMap.USECASE_ZSL); 1235 sc.add(new StreamConfiguration(internalFormat, 1236 width, height, /*input*/ true)); 1237 } 1238 } 1239 } 1240 1241 private class StreamConfigurationData { 1242 StreamConfiguration [] streamConfigurationArray = null; 1243 StreamConfigurationDuration [] minDurationArray = null; 1244 StreamConfigurationDuration [] stallDurationArray = null; 1245 } 1246 initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, StreamConfigurationData scData)1247 public void initializeStreamConfigurationData(ArrayList<StreamConfiguration> sc, 1248 ArrayList<StreamConfigurationDuration> scd, ArrayList<StreamConfigurationDuration> scs, 1249 StreamConfigurationData /*out*/scData) { 1250 if ((scData == null) || (sc == null)) { 1251 return; 1252 } 1253 1254 scData.streamConfigurationArray = new StreamConfiguration[sc.size()]; 1255 scData.streamConfigurationArray = sc.toArray(scData.streamConfigurationArray); 1256 1257 if ((scd != null) && !scd.isEmpty()) { 1258 scData.minDurationArray = new StreamConfigurationDuration[scd.size()]; 1259 scData.minDurationArray = scd.toArray(scData.minDurationArray); 1260 } else { 1261 scData.minDurationArray = new StreamConfigurationDuration[0]; 1262 } 1263 1264 if ((scs != null) && !scs.isEmpty()) { 1265 scData.stallDurationArray = new StreamConfigurationDuration[scs.size()]; 1266 scData.stallDurationArray = scs.toArray(scData.stallDurationArray); 1267 } else { 1268 scData.stallDurationArray = new StreamConfigurationDuration[0]; 1269 } 1270 } 1271 1272 /** 1273 * Retrieve the list of recommended stream configurations. 1274 * 1275 * @return A list of recommended stream configuration maps for each common use case or null 1276 * in case the recommended stream configurations are invalid or incomplete. 1277 * @hide 1278 */ getRecommendedStreamConfigurations()1279 public ArrayList<RecommendedStreamConfigurationMap> getRecommendedStreamConfigurations() { 1280 RecommendedStreamConfiguration[] configurations = getBase( 1281 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS); 1282 RecommendedStreamConfiguration[] depthConfigurations = getBase( 1283 CameraCharacteristics.DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS); 1284 if ((configurations == null) && (depthConfigurations == null)) { 1285 return null; 1286 } 1287 1288 StreamConfigurationMap fullMap = getStreamConfigurationMap(); 1289 ArrayList<RecommendedStreamConfigurationMap> recommendedConfigurations = 1290 new ArrayList<RecommendedStreamConfigurationMap> (); 1291 1292 ArrayList<ArrayList<StreamConfiguration>> streamConfigList = 1293 new ArrayList<ArrayList<StreamConfiguration>>(); 1294 ArrayList<ArrayList<StreamConfigurationDuration>> streamDurationList = 1295 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1296 ArrayList<ArrayList<StreamConfigurationDuration>> streamStallList = 1297 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1298 boolean[] supportsPrivate = 1299 new boolean[RecommendedStreamConfigurationMap.MAX_USECASE_COUNT]; 1300 try { 1301 if (configurations != null) { 1302 parseRecommendedConfigurations(configurations, fullMap, /*isDepth*/ false, 1303 streamConfigList, streamDurationList, streamStallList, supportsPrivate); 1304 } 1305 } catch (IllegalArgumentException e) { 1306 Log.e(TAG, "Failed parsing the recommended stream configurations!"); 1307 return null; 1308 } 1309 1310 ArrayList<ArrayList<StreamConfiguration>> depthStreamConfigList = 1311 new ArrayList<ArrayList<StreamConfiguration>>(); 1312 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamDurationList = 1313 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1314 ArrayList<ArrayList<StreamConfigurationDuration>> depthStreamStallList = 1315 new ArrayList<ArrayList<StreamConfigurationDuration>>(); 1316 if (depthConfigurations != null) { 1317 try { 1318 parseRecommendedConfigurations(depthConfigurations, fullMap, /*isDepth*/ true, 1319 depthStreamConfigList, depthStreamDurationList, depthStreamStallList, 1320 /*supportsPrivate*/ null); 1321 } catch (IllegalArgumentException e) { 1322 Log.e(TAG, "Failed parsing the recommended depth stream configurations!"); 1323 return null; 1324 } 1325 } 1326 1327 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1328 CameraCharacteristics.SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP); 1329 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1330 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1331 boolean listHighResolution = isBurstSupported(); 1332 recommendedConfigurations.ensureCapacity( 1333 RecommendedStreamConfigurationMap.MAX_USECASE_COUNT); 1334 for (int i = 0; i < RecommendedStreamConfigurationMap.MAX_USECASE_COUNT; i++) { 1335 StreamConfigurationData scData = new StreamConfigurationData(); 1336 if (configurations != null) { 1337 initializeStreamConfigurationData(streamConfigList.get(i), 1338 streamDurationList.get(i), streamStallList.get(i), scData); 1339 } 1340 1341 StreamConfigurationData depthScData = new StreamConfigurationData(); 1342 if (depthConfigurations != null) { 1343 initializeStreamConfigurationData(depthStreamConfigList.get(i), 1344 depthStreamDurationList.get(i), depthStreamStallList.get(i), depthScData); 1345 } 1346 1347 if ((scData.streamConfigurationArray == null || 1348 scData.streamConfigurationArray.length == 0) && 1349 (depthScData.streamConfigurationArray == null || 1350 depthScData.streamConfigurationArray.length == 0)) { 1351 recommendedConfigurations.add(null); 1352 continue; 1353 } 1354 1355 // Dynamic depth streams involve alot of SW processing and currently cannot be 1356 // recommended. 1357 StreamConfigurationMap map = null; 1358 switch (i) { 1359 case RecommendedStreamConfigurationMap.USECASE_PREVIEW: 1360 case RecommendedStreamConfigurationMap.USECASE_RAW: 1361 case RecommendedStreamConfigurationMap.USECASE_LOW_LATENCY_SNAPSHOT: 1362 case RecommendedStreamConfigurationMap.USECASE_VIDEO_SNAPSHOT: 1363 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1364 scData.minDurationArray, scData.stallDurationArray, 1365 /*depthconfiguration*/ null, /*depthminduration*/ null, 1366 /*depthstallduration*/ null, 1367 /*dynamicDepthConfigurations*/ null, 1368 /*dynamicDepthMinFrameDurations*/ null, 1369 /*dynamicDepthStallDurations*/ null, 1370 /*heicconfiguration*/ null, 1371 /*heicminduration*/ null, 1372 /*heicstallduration*/ null, 1373 /*jpegRconfiguration*/ null, 1374 /*jpegRminduration*/ null, 1375 /*jpegRstallduration*/ null, 1376 /*highspeedvideoconfigurations*/ null, 1377 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1378 break; 1379 case RecommendedStreamConfigurationMap.USECASE_RECORD: 1380 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1381 scData.minDurationArray, scData.stallDurationArray, 1382 /*depthconfiguration*/ null, /*depthminduration*/ null, 1383 /*depthstallduration*/ null, 1384 /*dynamicDepthConfigurations*/ null, 1385 /*dynamicDepthMinFrameDurations*/ null, 1386 /*dynamicDepthStallDurations*/ null, 1387 /*heicconfiguration*/ null, 1388 /*heicminduration*/ null, 1389 /*heicstallduration*/ null, 1390 /*jpegRconfiguration*/ null, 1391 /*jpegRminduration*/ null, 1392 /*jpegRstallduration*/ null, 1393 highSpeedVideoConfigurations, 1394 /*inputoutputformatsmap*/ null, listHighResolution, supportsPrivate[i]); 1395 break; 1396 case RecommendedStreamConfigurationMap.USECASE_ZSL: 1397 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1398 scData.minDurationArray, scData.stallDurationArray, 1399 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1400 depthScData.stallDurationArray, 1401 /*dynamicDepthConfigurations*/ null, 1402 /*dynamicDepthMinFrameDurations*/ null, 1403 /*dynamicDepthStallDurations*/ null, 1404 /*heicconfiguration*/ null, 1405 /*heicminduration*/ null, 1406 /*heicstallduration*/ null, 1407 /*jpegRconfiguration*/ null, 1408 /*jpegRminduration*/ null, 1409 /*jpegRstallduration*/ null, 1410 /*highSpeedVideoConfigurations*/ null, 1411 inputOutputFormatsMap, listHighResolution, supportsPrivate[i]); 1412 break; 1413 default: 1414 map = new StreamConfigurationMap(scData.streamConfigurationArray, 1415 scData.minDurationArray, scData.stallDurationArray, 1416 depthScData.streamConfigurationArray, depthScData.minDurationArray, 1417 depthScData.stallDurationArray, 1418 /*dynamicDepthConfigurations*/ null, 1419 /*dynamicDepthMinFrameDurations*/ null, 1420 /*dynamicDepthStallDurations*/ null, 1421 /*heicconfiguration*/ null, 1422 /*heicminduration*/ null, 1423 /*heicstallduration*/ null, 1424 /*jpegRconfiguration*/ null, 1425 /*jpegRminduration*/ null, 1426 /*jpegRstallduration*/ null, 1427 /*highSpeedVideoConfigurations*/ null, 1428 /*inputOutputFormatsMap*/ null, listHighResolution, supportsPrivate[i]); 1429 } 1430 1431 recommendedConfigurations.add(new RecommendedStreamConfigurationMap(map, /*usecase*/i, 1432 supportsPrivate[i])); 1433 } 1434 1435 return recommendedConfigurations; 1436 } 1437 isCapabilitySupported(int capabilityRequested)1438 private boolean isCapabilitySupported(int capabilityRequested) { 1439 boolean ret = false; 1440 1441 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1442 for (int capability : capabilities) { 1443 if (capabilityRequested == capability) { 1444 ret = true; 1445 break; 1446 } 1447 } 1448 1449 return ret; 1450 } 1451 1452 /** 1453 * @hide 1454 */ isUltraHighResolutionSensor()1455 public boolean isUltraHighResolutionSensor() { 1456 return isCapabilitySupported( 1457 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR); 1458 1459 } isBurstSupported()1460 private boolean isBurstSupported() { 1461 return isCapabilitySupported( 1462 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); 1463 } 1464 isPreviewStabilizationSupported()1465 private boolean isPreviewStabilizationSupported() { 1466 boolean ret = false; 1467 1468 int[] videoStabilizationModes = 1469 getBase(CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES); 1470 if (videoStabilizationModes == null) { 1471 return false; 1472 } 1473 for (int mode : videoStabilizationModes) { 1474 if (mode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) { 1475 ret = true; 1476 break; 1477 } 1478 } 1479 1480 return ret; 1481 } 1482 isCroppedRawSupported()1483 private boolean isCroppedRawSupported() { 1484 boolean ret = false; 1485 1486 long[] streamUseCases = 1487 getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_USE_CASES); 1488 if (streamUseCases == null) { 1489 return false; 1490 } 1491 for (long useCase : streamUseCases) { 1492 if (useCase == CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW) { 1493 return true; 1494 } 1495 } 1496 1497 return ret; 1498 } 1499 getMandatoryStreamCombinationsHelper( int mandatoryStreamsType)1500 private MandatoryStreamCombination[] getMandatoryStreamCombinationsHelper( 1501 int mandatoryStreamsType) { 1502 int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 1503 ArrayList<Integer> caps = new ArrayList<Integer>(); 1504 caps.ensureCapacity(capabilities.length); 1505 for (int c : capabilities) { 1506 caps.add(new Integer(c)); 1507 } 1508 int hwLevel = getBase(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); 1509 MandatoryStreamCombination.Builder build = new MandatoryStreamCombination.Builder( 1510 mCameraId, hwLevel, mDisplaySize, caps, getStreamConfigurationMap(), 1511 getStreamConfigurationMapMaximumResolution(), isPreviewStabilizationSupported(), 1512 isCroppedRawSupported()); 1513 1514 List<MandatoryStreamCombination> combs = null; 1515 switch (mandatoryStreamsType) { 1516 case MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT: 1517 combs = build.getAvailableMandatoryConcurrentStreamCombinations(); 1518 break; 1519 case MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION: 1520 combs = build.getAvailableMandatoryMaximumResolutionStreamCombinations(); 1521 break; 1522 case MANDATORY_STREAM_CONFIGURATIONS_10BIT: 1523 combs = build.getAvailableMandatory10BitStreamCombinations(); 1524 break; 1525 case MANDATORY_STREAM_CONFIGURATIONS_USE_CASE: 1526 combs = build.getAvailableMandatoryStreamUseCaseCombinations(); 1527 break; 1528 case MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION: 1529 combs = build.getAvailableMandatoryPreviewStabilizedStreamCombinations(); 1530 break; 1531 default: 1532 combs = build.getAvailableMandatoryStreamCombinations(); 1533 } 1534 if ((combs != null) && (!combs.isEmpty())) { 1535 MandatoryStreamCombination[] combArray = new MandatoryStreamCombination[combs.size()]; 1536 combArray = combs.toArray(combArray); 1537 return combArray; 1538 } 1539 return null; 1540 } 1541 getMandatory10BitStreamCombinations()1542 private MandatoryStreamCombination[] getMandatory10BitStreamCombinations() { 1543 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_10BIT); 1544 } 1545 getMandatoryConcurrentStreamCombinations()1546 private MandatoryStreamCombination[] getMandatoryConcurrentStreamCombinations() { 1547 if (!mHasMandatoryConcurrentStreams) { 1548 return null; 1549 } 1550 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_CONCURRENT); 1551 } 1552 getMandatoryMaximumResolutionStreamCombinations()1553 private MandatoryStreamCombination[] getMandatoryMaximumResolutionStreamCombinations() { 1554 if (!isUltraHighResolutionSensor()) { 1555 return null; 1556 } 1557 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_MAX_RESOLUTION); 1558 } 1559 getMandatoryStreamCombinations()1560 private MandatoryStreamCombination[] getMandatoryStreamCombinations() { 1561 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_DEFAULT); 1562 } 1563 getMandatoryUseCaseStreamCombinations()1564 private MandatoryStreamCombination[] getMandatoryUseCaseStreamCombinations() { 1565 return getMandatoryStreamCombinationsHelper(MANDATORY_STREAM_CONFIGURATIONS_USE_CASE); 1566 } 1567 getMandatoryPreviewStabilizationStreamCombinations()1568 private MandatoryStreamCombination[] getMandatoryPreviewStabilizationStreamCombinations() { 1569 return getMandatoryStreamCombinationsHelper( 1570 MANDATORY_STREAM_CONFIGURATIONS_PREVIEW_STABILIZATION); 1571 } 1572 getStreamConfigurationMap()1573 private StreamConfigurationMap getStreamConfigurationMap() { 1574 StreamConfiguration[] configurations = getBase( 1575 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS); 1576 StreamConfigurationDuration[] minFrameDurations = getBase( 1577 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS); 1578 StreamConfigurationDuration[] stallDurations = getBase( 1579 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS); 1580 StreamConfiguration[] depthConfigurations = getBase( 1581 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS); 1582 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1583 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); 1584 StreamConfigurationDuration[] depthStallDurations = getBase( 1585 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); 1586 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1587 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS); 1588 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1589 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS); 1590 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1591 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS); 1592 StreamConfiguration[] heicConfigurations = getBase( 1593 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); 1594 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1595 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); 1596 StreamConfigurationDuration[] heicStallDurations = getBase( 1597 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS); 1598 StreamConfiguration[] jpegRConfigurations = getBase( 1599 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS); 1600 StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( 1601 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS); 1602 StreamConfigurationDuration[] jpegRStallDurations = getBase( 1603 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS); 1604 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1605 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); 1606 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1607 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP); 1608 boolean listHighResolution = isBurstSupported(); 1609 return new StreamConfigurationMap( 1610 configurations, minFrameDurations, stallDurations, 1611 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1612 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1613 dynamicDepthStallDurations, heicConfigurations, 1614 heicMinFrameDurations, heicStallDurations, 1615 jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, 1616 highSpeedVideoConfigurations, inputOutputFormatsMap, 1617 listHighResolution); 1618 } 1619 getStreamConfigurationMapMaximumResolution()1620 private StreamConfigurationMap getStreamConfigurationMapMaximumResolution() { 1621 StreamConfiguration[] configurations = getBase( 1622 CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1623 StreamConfigurationDuration[] minFrameDurations = getBase( 1624 CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1625 StreamConfigurationDuration[] stallDurations = getBase( 1626 CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1627 // If the at least these keys haven't been advertised, there cannot be a meaningful max 1628 // resolution StreamConfigurationMap 1629 if (configurations == null || 1630 minFrameDurations == null || 1631 stallDurations == null) { 1632 return null; 1633 } 1634 1635 StreamConfiguration[] depthConfigurations = getBase( 1636 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1637 StreamConfigurationDuration[] depthMinFrameDurations = getBase( 1638 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1639 StreamConfigurationDuration[] depthStallDurations = getBase( 1640 CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1641 StreamConfiguration[] dynamicDepthConfigurations = getBase( 1642 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1643 StreamConfigurationDuration[] dynamicDepthMinFrameDurations = getBase( 1644 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1645 StreamConfigurationDuration[] dynamicDepthStallDurations = getBase( 1646 CameraCharacteristics.DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1647 StreamConfiguration[] heicConfigurations = getBase( 1648 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1649 StreamConfigurationDuration[] heicMinFrameDurations = getBase( 1650 CameraCharacteristics.HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1651 StreamConfigurationDuration[] heicStallDurations = getBase( 1652 CameraCharacteristics.HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1653 StreamConfiguration[] jpegRConfigurations = getBase( 1654 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1655 StreamConfigurationDuration[] jpegRMinFrameDurations = getBase( 1656 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION); 1657 StreamConfigurationDuration[] jpegRStallDurations = getBase( 1658 CameraCharacteristics.JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION); 1659 HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase( 1660 CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION); 1661 ReprocessFormatsMap inputOutputFormatsMap = getBase( 1662 CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION); 1663 // TODO: Is this correct, burst capability shouldn't necessarily correspond to max res mode 1664 boolean listHighResolution = isBurstSupported(); 1665 return new StreamConfigurationMap( 1666 configurations, minFrameDurations, stallDurations, 1667 depthConfigurations, depthMinFrameDurations, depthStallDurations, 1668 dynamicDepthConfigurations, dynamicDepthMinFrameDurations, 1669 dynamicDepthStallDurations, heicConfigurations, 1670 heicMinFrameDurations, heicStallDurations, 1671 jpegRConfigurations, jpegRMinFrameDurations, jpegRStallDurations, 1672 highSpeedVideoConfigurations, inputOutputFormatsMap, 1673 listHighResolution, false); 1674 } 1675 getMaxRegions(Key<T> key)1676 private <T> Integer getMaxRegions(Key<T> key) { 1677 final int AE = 0; 1678 final int AWB = 1; 1679 final int AF = 2; 1680 1681 // The order of the elements is: (AE, AWB, AF) 1682 int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); 1683 1684 if (maxRegions == null) { 1685 return null; 1686 } 1687 1688 if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { 1689 return maxRegions[AE]; 1690 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { 1691 return maxRegions[AWB]; 1692 } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { 1693 return maxRegions[AF]; 1694 } else { 1695 throw new AssertionError("Invalid key " + key); 1696 } 1697 } 1698 getMaxNumOutputs(Key<T> key)1699 private <T> Integer getMaxNumOutputs(Key<T> key) { 1700 final int RAW = 0; 1701 final int PROC = 1; 1702 final int PROC_STALLING = 2; 1703 1704 // The order of the elements is: (raw, proc+nonstalling, proc+stalling) 1705 int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); 1706 1707 if (maxNumOutputs == null) { 1708 return null; 1709 } 1710 1711 if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { 1712 return maxNumOutputs[RAW]; 1713 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { 1714 return maxNumOutputs[PROC]; 1715 } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { 1716 return maxNumOutputs[PROC_STALLING]; 1717 } else { 1718 throw new AssertionError("Invalid key " + key); 1719 } 1720 } 1721 getTonemapCurve()1722 private <T> TonemapCurve getTonemapCurve() { 1723 float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED); 1724 float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN); 1725 float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE); 1726 1727 if (areValuesAllNull(red, green, blue)) { 1728 return null; 1729 } 1730 1731 if (red == null || green == null || blue == null) { 1732 Log.w(TAG, "getTonemapCurve - missing tone curve components"); 1733 return null; 1734 } 1735 TonemapCurve tc = new TonemapCurve(red, green, blue); 1736 return tc; 1737 } 1738 getOisSamples()1739 private OisSample[] getOisSamples() { 1740 long[] timestamps = getBase(CaptureResult.STATISTICS_OIS_TIMESTAMPS); 1741 float[] xShifts = getBase(CaptureResult.STATISTICS_OIS_X_SHIFTS); 1742 float[] yShifts = getBase(CaptureResult.STATISTICS_OIS_Y_SHIFTS); 1743 1744 if (timestamps == null) { 1745 if (xShifts != null) { 1746 throw new AssertionError("timestamps is null but xShifts is not"); 1747 } 1748 1749 if (yShifts != null) { 1750 throw new AssertionError("timestamps is null but yShifts is not"); 1751 } 1752 1753 return null; 1754 } 1755 1756 if (xShifts == null) { 1757 throw new AssertionError("timestamps is not null but xShifts is"); 1758 } 1759 1760 if (yShifts == null) { 1761 throw new AssertionError("timestamps is not null but yShifts is"); 1762 } 1763 1764 if (xShifts.length != timestamps.length) { 1765 throw new AssertionError(String.format( 1766 "timestamps has %d entries but xShifts has %d", timestamps.length, 1767 xShifts.length)); 1768 } 1769 1770 if (yShifts.length != timestamps.length) { 1771 throw new AssertionError(String.format( 1772 "timestamps has %d entries but yShifts has %d", timestamps.length, 1773 yShifts.length)); 1774 } 1775 1776 OisSample[] samples = new OisSample[timestamps.length]; 1777 for (int i = 0; i < timestamps.length; i++) { 1778 samples[i] = new OisSample(timestamps[i], xShifts[i], yShifts[i]); 1779 } 1780 return samples; 1781 } 1782 getExtendedSceneModeCapabilities()1783 private Capability[] getExtendedSceneModeCapabilities() { 1784 int[] maxSizes = 1785 getBase(CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES); 1786 float[] zoomRanges = getBase( 1787 CameraCharacteristics.CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES); 1788 Range<Float> zoomRange = getBase(CameraCharacteristics.CONTROL_ZOOM_RATIO_RANGE); 1789 float maxDigitalZoom = getBase(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM); 1790 1791 if (maxSizes == null) { 1792 return null; 1793 } 1794 if (maxSizes.length % 3 != 0) { 1795 throw new AssertionError("availableExtendedSceneModeMaxSizes must be tuples of " 1796 + "[mode, width, height]"); 1797 } 1798 int numExtendedSceneModes = maxSizes.length / 3; 1799 int numExtendedSceneModeZoomRanges = 0; 1800 if (zoomRanges != null) { 1801 if (zoomRanges.length % 2 != 0) { 1802 throw new AssertionError("availableExtendedSceneModeZoomRanges must be tuples of " 1803 + "[minZoom, maxZoom]"); 1804 } 1805 numExtendedSceneModeZoomRanges = zoomRanges.length / 2; 1806 if (numExtendedSceneModes - numExtendedSceneModeZoomRanges != 1) { 1807 throw new AssertionError("Number of extended scene mode zoom ranges must be 1 " 1808 + "less than number of supported modes"); 1809 } 1810 } 1811 1812 float modeOffMinZoomRatio = 1.0f; 1813 float modeOffMaxZoomRatio = maxDigitalZoom; 1814 if (zoomRange != null) { 1815 modeOffMinZoomRatio = zoomRange.getLower(); 1816 modeOffMaxZoomRatio = zoomRange.getUpper(); 1817 } 1818 1819 Capability[] capabilities = new Capability[numExtendedSceneModes]; 1820 for (int i = 0, j = 0; i < numExtendedSceneModes; i++) { 1821 int mode = maxSizes[3 * i]; 1822 int width = maxSizes[3 * i + 1]; 1823 int height = maxSizes[3 * i + 2]; 1824 if (mode != CameraMetadata.CONTROL_EXTENDED_SCENE_MODE_DISABLED 1825 && j < numExtendedSceneModeZoomRanges) { 1826 capabilities[i] = new Capability(mode, new Size(width, height), 1827 new Range<Float>(zoomRanges[2 * j], zoomRanges[2 * j + 1])); 1828 j++; 1829 } else { 1830 capabilities[i] = new Capability(mode, new Size(width, height), 1831 new Range<Float>(modeOffMinZoomRatio, modeOffMaxZoomRatio)); 1832 } 1833 } 1834 1835 return capabilities; 1836 } 1837 setBase(CameraCharacteristics.Key<T> key, T value)1838 private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { 1839 setBase(key.getNativeKey(), value); 1840 } 1841 setBase(CaptureResult.Key<T> key, T value)1842 private <T> void setBase(CaptureResult.Key<T> key, T value) { 1843 setBase(key.getNativeKey(), value); 1844 } 1845 setBase(CaptureRequest.Key<T> key, T value)1846 private <T> void setBase(CaptureRequest.Key<T> key, T value) { 1847 setBase(key.getNativeKey(), value); 1848 } 1849 setBase(Key<T> key, T value)1850 private <T> void setBase(Key<T> key, T value) { 1851 int tag; 1852 if (key.hasTag()) { 1853 tag = key.getTag(); 1854 } else { 1855 tag = nativeGetTagFromKeyLocal(mMetadataPtr, key.getName()); 1856 key.cacheTag(tag); 1857 } 1858 if (value == null) { 1859 // Erase the entry 1860 writeValues(tag, /*src*/null); 1861 return; 1862 } // else update the entry to a new value 1863 1864 int nativeType = nativeGetTypeFromTagLocal(mMetadataPtr, tag); 1865 Marshaler<T> marshaler = getMarshalerForKey(key, nativeType); 1866 int size = marshaler.calculateMarshalSize(value); 1867 1868 // TODO: Optimization. Cache the byte[] and reuse if the size is big enough. 1869 byte[] values = new byte[size]; 1870 1871 ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder()); 1872 marshaler.marshal(value, buffer); 1873 1874 writeValues(tag, values); 1875 } 1876 1877 // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden 1878 // metadata. 1879 private static final HashMap<Key<?>, SetCommand> sSetCommandMap = 1880 new HashMap<Key<?>, SetCommand>(); 1881 static { CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey()1882 sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), 1883 new SetCommand() { 1884 @Override 1885 public <T> void setValue(CameraMetadataNative metadata, T value) { 1886 metadata.setAvailableFormats((int[]) value); 1887 } 1888 }); CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey()1889 sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), 1890 new SetCommand() { 1891 @Override 1892 public <T> void setValue(CameraMetadataNative metadata, T value) { 1893 metadata.setFaceRectangles((Rect[]) value); 1894 } 1895 }); CaptureResult.STATISTICS_FACES.getNativeKey()1896 sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(), 1897 new SetCommand() { 1898 @Override 1899 public <T> void setValue(CameraMetadataNative metadata, T value) { 1900 metadata.setFaces((Face[])value); 1901 } 1902 }); CaptureRequest.TONEMAP_CURVE.getNativeKey()1903 sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() { 1904 @Override 1905 public <T> void setValue(CameraMetadataNative metadata, T value) { 1906 metadata.setTonemapCurve((TonemapCurve) value); 1907 } 1908 }); CaptureResult.JPEG_GPS_LOCATION.getNativeKey()1909 sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() { 1910 @Override 1911 public <T> void setValue(CameraMetadataNative metadata, T value) { 1912 metadata.setGpsLocation((Location) value); 1913 } 1914 }); CaptureRequest.SCALER_CROP_REGION.getNativeKey()1915 sSetCommandMap.put(CaptureRequest.SCALER_CROP_REGION.getNativeKey(), 1916 new SetCommand() { 1917 @Override 1918 public <T> void setValue(CameraMetadataNative metadata, T value) { 1919 metadata.setScalerCropRegion((Rect) value); 1920 } 1921 }); CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey()1922 sSetCommandMap.put(CaptureRequest.CONTROL_AWB_REGIONS.getNativeKey(), 1923 new SetCommand() { 1924 @Override 1925 public <T> void setValue(CameraMetadataNative metadata, T value) { 1926 metadata.setAWBRegions(value); 1927 } 1928 }); CaptureRequest.CONTROL_AF_REGIONS.getNativeKey()1929 sSetCommandMap.put(CaptureRequest.CONTROL_AF_REGIONS.getNativeKey(), 1930 new SetCommand() { 1931 @Override 1932 public <T> void setValue(CameraMetadataNative metadata, T value) { 1933 metadata.setAFRegions(value); 1934 } 1935 }); CaptureRequest.CONTROL_AE_REGIONS.getNativeKey()1936 sSetCommandMap.put(CaptureRequest.CONTROL_AE_REGIONS.getNativeKey(), 1937 new SetCommand() { 1938 @Override 1939 public <T> void setValue(CameraMetadataNative metadata, T value) { 1940 metadata.setAERegions(value); 1941 } 1942 }); CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey()1943 sSetCommandMap.put(CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(), 1944 new SetCommand() { 1945 @Override 1946 public <T> void setValue(CameraMetadataNative metadata, T value) { 1947 metadata.setLensShadingMap((LensShadingMap) value); 1948 } 1949 }); 1950 } 1951 setAvailableFormats(int[] value)1952 private boolean setAvailableFormats(int[] value) { 1953 int[] availableFormat = value; 1954 if (value == null) { 1955 // Let setBase() to handle the null value case. 1956 return false; 1957 } 1958 1959 int[] newValues = new int[availableFormat.length]; 1960 for (int i = 0; i < availableFormat.length; i++) { 1961 newValues[i] = availableFormat[i]; 1962 if (availableFormat[i] == ImageFormat.JPEG) { 1963 newValues[i] = NATIVE_JPEG_FORMAT; 1964 } 1965 } 1966 1967 setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues); 1968 return true; 1969 } 1970 1971 /** 1972 * Convert Face Rectangles from managed side to native side as they have different definitions. 1973 * <p> 1974 * Managed side face rectangles are defined as: left, top, width, height. 1975 * Native side face rectangles are defined as: left, top, right, bottom. 1976 * The input face rectangle need to be converted to native side definition when set is called. 1977 * </p> 1978 * 1979 * @param faceRects Input face rectangles. 1980 * @return true if face rectangles can be set successfully. Otherwise, Let the caller 1981 * (setBase) to handle it appropriately. 1982 */ setFaceRectangles(Rect[] faceRects)1983 private boolean setFaceRectangles(Rect[] faceRects) { 1984 if (faceRects == null) { 1985 return false; 1986 } 1987 1988 Rect[] newFaceRects = new Rect[faceRects.length]; 1989 for (int i = 0; i < newFaceRects.length; i++) { 1990 newFaceRects[i] = new Rect( 1991 faceRects[i].left, 1992 faceRects[i].top, 1993 faceRects[i].right + faceRects[i].left, 1994 faceRects[i].bottom + faceRects[i].top); 1995 } 1996 1997 setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects); 1998 return true; 1999 } 2000 setTonemapCurve(TonemapCurve tc)2001 private <T> boolean setTonemapCurve(TonemapCurve tc) { 2002 if (tc == null) { 2003 return false; 2004 } 2005 2006 float[][] curve = new float[3][]; 2007 for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) { 2008 int pointCount = tc.getPointCount(i); 2009 curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE]; 2010 tc.copyColorCurve(i, curve[i], 0); 2011 } 2012 setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]); 2013 setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]); 2014 setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]); 2015 2016 return true; 2017 } 2018 setScalerCropRegion(Rect cropRegion)2019 private <T> boolean setScalerCropRegion(Rect cropRegion) { 2020 if (cropRegion == null) { 2021 return false; 2022 } 2023 setBase(CaptureRequest.SCALER_CROP_REGION_SET, true); 2024 setBase(CaptureRequest.SCALER_CROP_REGION, cropRegion); 2025 return true; 2026 } 2027 setAFRegions(T afRegions)2028 private <T> boolean setAFRegions(T afRegions) { 2029 if (afRegions == null) { 2030 return false; 2031 } 2032 setBase(CaptureRequest.CONTROL_AF_REGIONS_SET, true); 2033 // The cast to CaptureRequest.Key is needed since java does not support template 2034 // specialization and we need to route this method to 2035 // setBase(CaptureRequest.Key<T> key, T value) 2036 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AF_REGIONS, afRegions); 2037 return true; 2038 } 2039 setAERegions(T aeRegions)2040 private <T> boolean setAERegions(T aeRegions) { 2041 if (aeRegions == null) { 2042 return false; 2043 } 2044 setBase(CaptureRequest.CONTROL_AE_REGIONS_SET, true); 2045 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AE_REGIONS, aeRegions); 2046 return true; 2047 } 2048 setAWBRegions(T awbRegions)2049 private <T> boolean setAWBRegions(T awbRegions) { 2050 if (awbRegions == null) { 2051 return false; 2052 } 2053 setBase(CaptureRequest.CONTROL_AWB_REGIONS_SET, true); 2054 setBase((CaptureRequest.Key)CaptureRequest.CONTROL_AWB_REGIONS, awbRegions); 2055 return true; 2056 } 2057 updateNativeAllocation()2058 private void updateNativeAllocation() { 2059 long currentBufferSize = nativeGetBufferSize(mMetadataPtr); 2060 2061 if (currentBufferSize != mBufferSize) { 2062 if (mBufferSize > 0) { 2063 VMRuntime.getRuntime().registerNativeFree(mBufferSize); 2064 } 2065 2066 mBufferSize = currentBufferSize; 2067 2068 if (mBufferSize > 0) { 2069 VMRuntime.getRuntime().registerNativeAllocation(mBufferSize); 2070 } 2071 } 2072 } 2073 2074 private int mCameraId = -1; 2075 private boolean mHasMandatoryConcurrentStreams = false; 2076 private Size mDisplaySize = new Size(0, 0); 2077 private long mBufferSize = 0; 2078 private MultiResolutionStreamConfigurationMap mMultiResolutionStreamConfigurationMap = null; 2079 2080 /** 2081 * Set the current camera Id. 2082 * 2083 * @param cameraId Current camera id. 2084 * 2085 * @hide 2086 */ setCameraId(int cameraId)2087 public void setCameraId(int cameraId) { 2088 mCameraId = cameraId; 2089 } 2090 2091 /** 2092 * Set the current camera Id. 2093 * 2094 * @param hasMandatoryConcurrentStreams whether the metadata advertises mandatory concurrent 2095 * streams. 2096 * 2097 * @hide 2098 */ setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams)2099 public void setHasMandatoryConcurrentStreams(boolean hasMandatoryConcurrentStreams) { 2100 mHasMandatoryConcurrentStreams = hasMandatoryConcurrentStreams; 2101 } 2102 2103 /** 2104 * Set the current display size. 2105 * 2106 * @param displaySize The current display size. 2107 * 2108 * @hide 2109 */ setDisplaySize(Size displaySize)2110 public void setDisplaySize(Size displaySize) { 2111 mDisplaySize = displaySize; 2112 } 2113 2114 /** 2115 * Set the multi-resolution stream configuration map. 2116 * 2117 * @param multiResolutionMap The multi-resolution stream configuration map. 2118 * 2119 * @hide 2120 */ setMultiResolutionStreamConfigurationMap( @onNull Map<String, StreamConfiguration[]> multiResolutionMap)2121 public void setMultiResolutionStreamConfigurationMap( 2122 @NonNull Map<String, StreamConfiguration[]> multiResolutionMap) { 2123 mMultiResolutionStreamConfigurationMap = 2124 new MultiResolutionStreamConfigurationMap(multiResolutionMap); 2125 } 2126 2127 /** 2128 * Get the multi-resolution stream configuration map. 2129 * 2130 * @return The multi-resolution stream configuration map. 2131 * 2132 * @hide 2133 */ getMultiResolutionStreamConfigurationMap()2134 public MultiResolutionStreamConfigurationMap getMultiResolutionStreamConfigurationMap() { 2135 return mMultiResolutionStreamConfigurationMap; 2136 } 2137 2138 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) 2139 private long mMetadataPtr; // native std::shared_ptr<CameraMetadata>* 2140 2141 @FastNative nativeAllocate()2142 private static native long nativeAllocate(); 2143 @FastNative nativeAllocateCopy(long ptr)2144 private static native long nativeAllocateCopy(long ptr) 2145 throws NullPointerException; 2146 2147 2148 @FastNative nativeUpdate(long dst, long src)2149 private static native void nativeUpdate(long dst, long src); nativeWriteToParcel(Parcel dest, long ptr)2150 private static synchronized native void nativeWriteToParcel(Parcel dest, long ptr); nativeReadFromParcel(Parcel source, long ptr)2151 private static synchronized native void nativeReadFromParcel(Parcel source, long ptr); nativeSwap(long ptr, long otherPtr)2152 private static synchronized native void nativeSwap(long ptr, long otherPtr) 2153 throws NullPointerException; 2154 @FastNative nativeSetVendorId(long ptr, long vendorId)2155 private static native void nativeSetVendorId(long ptr, long vendorId); nativeClose(long ptr)2156 private static synchronized native void nativeClose(long ptr); nativeIsEmpty(long ptr)2157 private static synchronized native boolean nativeIsEmpty(long ptr); nativeGetEntryCount(long ptr)2158 private static synchronized native int nativeGetEntryCount(long ptr); nativeGetBufferSize(long ptr)2159 private static synchronized native long nativeGetBufferSize(long ptr); 2160 2161 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeReadValues(int tag, long ptr)2162 private static synchronized native byte[] nativeReadValues(int tag, long ptr); nativeWriteValues(int tag, byte[] src, long ptr)2163 private static synchronized native void nativeWriteValues(int tag, byte[] src, long ptr); nativeDump(long ptr)2164 private static synchronized native void nativeDump(long ptr) throws IOException; // dump to LOGD 2165 nativeGetAllVendorKeys(long ptr, Class keyClass)2166 private static synchronized native ArrayList nativeGetAllVendorKeys(long ptr, Class keyClass); 2167 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTagFromKeyLocal(long ptr, String keyName)2168 private static synchronized native int nativeGetTagFromKeyLocal(long ptr, String keyName) 2169 throws IllegalArgumentException; 2170 @UnsupportedAppUsage(maxTargetSdk = Build.VERSION_CODES.R, trackingBug = 170729553) nativeGetTypeFromTagLocal(long ptr, int tag)2171 private static synchronized native int nativeGetTypeFromTagLocal(long ptr, int tag) 2172 throws IllegalArgumentException; 2173 @FastNative nativeGetTagFromKey(String keyName, long vendorId)2174 private static native int nativeGetTagFromKey(String keyName, long vendorId) 2175 throws IllegalArgumentException; 2176 @FastNative nativeGetTypeFromTag(int tag, long vendorId)2177 private static native int nativeGetTypeFromTag(int tag, long vendorId) 2178 throws IllegalArgumentException; 2179 2180 /** 2181 * <p>Perform a 0-copy swap of the internal metadata with another object.</p> 2182 * 2183 * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p> 2184 * 2185 * @param other Metadata to swap with 2186 * @throws NullPointerException if other was null 2187 * @hide 2188 */ swap(CameraMetadataNative other)2189 public void swap(CameraMetadataNative other) { 2190 nativeSwap(mMetadataPtr, other.mMetadataPtr); 2191 mCameraId = other.mCameraId; 2192 mHasMandatoryConcurrentStreams = other.mHasMandatoryConcurrentStreams; 2193 mDisplaySize = other.mDisplaySize; 2194 mMultiResolutionStreamConfigurationMap = other.mMultiResolutionStreamConfigurationMap; 2195 updateNativeAllocation(); 2196 other.updateNativeAllocation(); 2197 } 2198 2199 /** 2200 * Set the native metadata vendor id. 2201 * 2202 * @hide 2203 */ setVendorId(long vendorId)2204 public void setVendorId(long vendorId) { 2205 nativeSetVendorId(mMetadataPtr, vendorId); 2206 } 2207 2208 /** 2209 * @hide 2210 */ getEntryCount()2211 public int getEntryCount() { 2212 return nativeGetEntryCount(mMetadataPtr); 2213 } 2214 2215 /** 2216 * Does this metadata contain at least 1 entry? 2217 * 2218 * @hide 2219 */ isEmpty()2220 public boolean isEmpty() { 2221 return nativeIsEmpty(mMetadataPtr); 2222 } 2223 2224 2225 /** 2226 * Retrieves the pointer to the native shared_ptr<CameraMetadata> as a Java long. 2227 * 2228 * @hide 2229 */ getMetadataPtr()2230 public long getMetadataPtr() { 2231 return mMetadataPtr; 2232 } 2233 2234 /** 2235 * Return a list containing keys of the given key class for all defined vendor tags. 2236 * 2237 * @hide 2238 */ getAllVendorKeys(Class<K> keyClass)2239 public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) { 2240 if (keyClass == null) { 2241 throw new NullPointerException(); 2242 } 2243 return (ArrayList<K>) nativeGetAllVendorKeys(mMetadataPtr, keyClass); 2244 } 2245 2246 /** 2247 * Convert a key string into the equivalent native tag. 2248 * 2249 * @throws IllegalArgumentException if the key was not recognized 2250 * @throws NullPointerException if the key was null 2251 * 2252 * @hide 2253 */ getTag(String key)2254 public static int getTag(String key) { 2255 return nativeGetTagFromKey(key, Long.MAX_VALUE); 2256 } 2257 2258 /** 2259 * Convert a key string into the equivalent native tag. 2260 * 2261 * @throws IllegalArgumentException if the key was not recognized 2262 * @throws NullPointerException if the key was null 2263 * 2264 * @hide 2265 */ getTag(String key, long vendorId)2266 public static int getTag(String key, long vendorId) { 2267 return nativeGetTagFromKey(key, vendorId); 2268 } 2269 2270 /** 2271 * Get the underlying native type for a tag. 2272 * 2273 * @param tag An integer tag, see e.g. {@link #getTag} 2274 * @param vendorId A vendor tag provider id 2275 * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE} 2276 * 2277 * @hide 2278 */ getNativeType(int tag, long vendorId)2279 public static int getNativeType(int tag, long vendorId) { 2280 return nativeGetTypeFromTag(tag, vendorId); 2281 } 2282 2283 /** 2284 * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing 2285 * the entry if src was null.</p> 2286 * 2287 * <p>An empty array can be passed in to update the entry to 0 elements.</p> 2288 * 2289 * @param tag An integer tag, see e.g. {@link #getTag} 2290 * @param src An array of bytes, or null to erase the entry 2291 * 2292 * @hide 2293 */ writeValues(int tag, byte[] src)2294 public void writeValues(int tag, byte[] src) { 2295 nativeWriteValues(tag, src, mMetadataPtr); 2296 } 2297 2298 /** 2299 * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize 2300 * the data properly.</p> 2301 * 2302 * <p>An empty array can be returned to denote an existing entry with 0 elements.</p> 2303 * 2304 * @param tag An integer tag, see e.g. {@link #getTag} 2305 * 2306 * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise. 2307 * @hide 2308 */ readValues(int tag)2309 public byte[] readValues(int tag) { 2310 // TODO: Optimization. Native code returns a ByteBuffer instead. 2311 return nativeReadValues(tag, mMetadataPtr); 2312 } 2313 2314 /** 2315 * Dumps the native metadata contents to logcat. 2316 * 2317 * <p>Visibility for testing/debugging only. The results will not 2318 * include any synthesized keys, as they are invisible to the native layer.</p> 2319 * 2320 * @hide 2321 */ dumpToLog()2322 public void dumpToLog() { 2323 try { 2324 nativeDump(mMetadataPtr); 2325 } catch (IOException e) { 2326 Log.wtf(TAG, "Dump logging failed", e); 2327 } 2328 } 2329 2330 @Override finalize()2331 protected void finalize() throws Throwable { 2332 try { 2333 close(); 2334 } finally { 2335 super.finalize(); 2336 } 2337 } 2338 2339 /** 2340 * Get the marshaler compatible with the {@code key} and type {@code T}. 2341 * 2342 * @throws UnsupportedOperationException 2343 * if the native/managed type combination for {@code key} is not supported 2344 */ getMarshalerForKey(Key<T> key, int nativeType)2345 private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) { 2346 return MarshalRegistry.getMarshaler(key.getTypeReference(), 2347 nativeType); 2348 } 2349 2350 @SuppressWarnings({ "unchecked", "rawtypes" }) registerAllMarshalers()2351 private static void registerAllMarshalers() { 2352 if (DEBUG) { 2353 Log.v(TAG, "Shall register metadata marshalers"); 2354 } 2355 2356 MarshalQueryable[] queryList = new MarshalQueryable[] { 2357 // marshalers for standard types 2358 new MarshalQueryablePrimitive(), 2359 new MarshalQueryableEnum(), 2360 new MarshalQueryableArray(), 2361 2362 // pseudo standard types, that expand/narrow the native type into a managed type 2363 new MarshalQueryableBoolean(), 2364 new MarshalQueryableNativeByteToInteger(), 2365 2366 // marshalers for custom types 2367 new MarshalQueryableRect(), 2368 new MarshalQueryableSize(), 2369 new MarshalQueryableSizeF(), 2370 new MarshalQueryableString(), 2371 new MarshalQueryableReprocessFormatsMap(), 2372 new MarshalQueryableRange(), 2373 new MarshalQueryablePair(), 2374 new MarshalQueryableMeteringRectangle(), 2375 new MarshalQueryableColorSpaceTransform(), 2376 new MarshalQueryableStreamConfiguration(), 2377 new MarshalQueryableStreamConfigurationDuration(), 2378 new MarshalQueryableRggbChannelVector(), 2379 new MarshalQueryableBlackLevelPattern(), 2380 new MarshalQueryableHighSpeedVideoConfiguration(), 2381 new MarshalQueryableRecommendedStreamConfiguration(), 2382 2383 // generic parcelable marshaler (MUST BE LAST since it has lowest priority) 2384 new MarshalQueryableParcelable(), 2385 }; 2386 2387 for (MarshalQueryable query : queryList) { 2388 MarshalRegistry.registerMarshalQueryable(query); 2389 } 2390 if (DEBUG) { 2391 Log.v(TAG, "Registered metadata marshalers"); 2392 } 2393 } 2394 2395 /** Check if input arguments are all {@code null}. 2396 * 2397 * @param objs Input arguments for null check 2398 * @return {@code true} if input arguments are all {@code null}, otherwise {@code false} 2399 */ areValuesAllNull(Object... objs)2400 private static boolean areValuesAllNull(Object... objs) { 2401 for (Object o : objs) { 2402 if (o != null) return false; 2403 } 2404 return true; 2405 } 2406 2407 /** 2408 * Return the set of physical camera ids that this logical {@link CameraDevice} is made 2409 * up of. 2410 * 2411 * If the camera device isn't a logical camera, return an empty set. 2412 * 2413 * @hide 2414 */ getPhysicalCameraIds()2415 public Set<String> getPhysicalCameraIds() { 2416 int[] availableCapabilities = get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES); 2417 if (availableCapabilities == null) { 2418 throw new AssertionError("android.request.availableCapabilities must be non-null " 2419 + "in the characteristics"); 2420 } 2421 2422 if (!ArrayUtils.contains(availableCapabilities, 2423 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA)) { 2424 return Collections.emptySet(); 2425 } 2426 byte[] physicalCamIds = get(CameraCharacteristics.LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); 2427 2428 String physicalCamIdString = null; 2429 try { 2430 physicalCamIdString = new String(physicalCamIds, "UTF-8"); 2431 } catch (java.io.UnsupportedEncodingException e) { 2432 throw new AssertionError("android.logicalCam.physicalIds must be UTF-8 string"); 2433 } 2434 String[] physicalCameraIdArray = physicalCamIdString.split("\0"); 2435 2436 return Collections.unmodifiableSet( 2437 new HashSet<String>(Arrays.asList(physicalCameraIdArray))); 2438 } 2439 2440 static { registerAllMarshalers()2441 registerAllMarshalers(); 2442 } 2443 } 2444