From c333c794a6859e92d2fccc2eed33bf8ed42cf348 Mon Sep 17 00:00:00 2001 From: shivasku82 Date: Fri, 25 Aug 2023 20:06:48 +0530 Subject: [PATCH] initial submit for camera hal in vendor Signed-off-by: shivasku82 --- camera/README.md | 12 + camera/common/1.0/default/Android.bp | 31 + camera/common/1.0/default/CameraMetadata.cpp | 563 ++ camera/common/1.0/default/CameraModule.cpp | 583 ++ .../common/1.0/default/CameraParameters.cpp | 549 + camera/common/1.0/default/Exif.cpp | 1115 +++ camera/common/1.0/default/HandleImporter.cpp | 457 + camera/common/1.0/default/OWNERS | 1 + .../1.0/default/VendorTagDescriptor.cpp | 538 + .../1.0/default/include/CameraMetadata.h | 230 + .../common/1.0/default/include/CameraModule.h | 101 + .../1.0/default/include/CameraParameters.h | 709 ++ camera/common/1.0/default/include/Exif.h | 256 + .../1.0/default/include/HandleImporter.h | 90 + .../1.0/default/include/VendorTagDescriptor.h | 242 + camera/common/1.0/types.hal | 413 + camera/common/README.md | 21 + camera/device/1.0/ICameraDevice.hal | 419 + camera/device/1.0/ICameraDeviceCallback.hal | 113 + .../1.0/ICameraDevicePreviewCallback.hal | 120 + camera/device/1.0/default/Android.bp | 34 + camera/device/1.0/default/CameraDevice.cpp | 1027 ++ camera/device/1.0/default/CameraDevice_1_0.h | 237 + camera/device/1.0/default/OWNERS | 1 + camera/device/1.0/types.hal | 289 + camera/device/3.2/ICameraDevice.hal | 201 + camera/device/3.2/ICameraDeviceCallback.hal | 154 + camera/device/3.2/ICameraDeviceSession.hal | 407 + camera/device/3.2/default/Android.bp | 32 + camera/device/3.2/default/CameraDevice.cpp | 317 + .../3.2/default/CameraDeviceSession.cpp | 1637 +++ .../device/3.2/default/CameraDeviceSession.h | 421 + camera/device/3.2/default/CameraDevice_3_2.h | 158 + camera/device/3.2/default/OWNERS | 1 + camera/device/3.2/default/convert.cpp | 142 + camera/device/3.2/default/include/convert.h | 66 + camera/device/3.2/types.hal | 998 ++ camera/device/3.3/ICameraDeviceSession.hal | 50 + camera/device/3.3/default/Android.bp | 35 + camera/device/3.3/default/CameraDevice.cpp | 67 + .../3.3/default/CameraDeviceSession.cpp | 117 + .../device/3.3/default/CameraDeviceSession.h | 138 + camera/device/3.3/default/CameraDevice_3_3.h | 75 + camera/device/3.3/default/OWNERS | 1 + camera/device/3.3/default/convert.cpp | 65 + camera/device/3.3/default/include/convert.h | 49 + camera/device/3.3/types.hal | 57 + camera/device/3.4/ICameraDeviceCallback.hal | 43 + camera/device/3.4/ICameraDeviceSession.hal | 109 + camera/device/3.4/default/Android.bp | 108 + camera/device/3.4/default/CameraDevice.cpp | 67 + .../3.4/default/CameraDeviceSession.cpp | 773 ++ .../3.4/default/ExternalCameraDevice.cpp | 1029 ++ .../default/ExternalCameraDeviceSession.cpp | 2652 +++++ .../3.4/default/ExternalCameraUtils.cpp | 900 ++ camera/device/3.4/default/OWNERS | 1 + camera/device/3.4/default/convert.cpp | 59 + camera/device/3.4/default/include/convert.h | 46 + .../device_v3_4_impl/CameraDeviceSession.h | 202 + .../device_v3_4_impl/CameraDevice_3_4.h | 75 + .../ExternalCameraDeviceSession.h | 465 + .../ExternalCameraDevice_3_4.h | 247 + .../ExternalCameraUtils.h | 303 + camera/device/3.4/types.hal | 355 + camera/device/3.5/ICameraDevice.hal | 119 + camera/device/3.5/ICameraDeviceCallback.hal | 70 + camera/device/3.5/ICameraDeviceSession.hal | 154 + camera/device/3.5/default/Android.bp | 107 + camera/device/3.5/default/CameraDevice.cpp | 157 + .../3.5/default/CameraDeviceSession.cpp | 407 + .../3.5/default/ExternalCameraDevice.cpp | 118 + .../default/ExternalCameraDeviceSession.cpp | 314 + camera/device/3.5/default/OWNERS | 1 + .../device_v3_5_impl/CameraDeviceSession.h | 261 + .../device_v3_5_impl/CameraDevice_3_5.h | 122 + .../ExternalCameraDeviceSession.h | 275 + .../ExternalCameraDevice_3_5.h | 142 + camera/device/3.5/types.hal | 177 + camera/device/3.6/ICameraDevice.hal | 31 + camera/device/3.6/ICameraDeviceSession.hal | 132 + camera/device/3.6/ICameraOfflineSession.hal | 80 + camera/device/3.6/default/Android.bp | 68 + .../3.6/default/ExternalCameraDevice.cpp | 91 + .../default/ExternalCameraDeviceSession.cpp | 360 + .../default/ExternalCameraOfflineSession.cpp | 554 ++ camera/device/3.6/default/OWNERS | 1 + .../ExternalCameraDeviceSession.h | 208 + .../ExternalCameraDevice_3_6.h | 126 + .../ExternalCameraOfflineSession.h | 232 + camera/device/3.6/types.hal | 149 + camera/device/3.7/ICameraDevice.hal | 43 + camera/device/3.7/ICameraDeviceSession.hal | 124 + camera/device/3.7/ICameraInjectionSession.hal | 85 + camera/device/3.7/types.hal | 146 + camera/device/README.md | 97 + camera/provider/2.4/ICameraProvider.hal | 191 + .../provider/2.4/ICameraProviderCallback.hal | 68 + camera/provider/2.4/default/Android.bp | 232 + .../2.4/default/CameraProvider_2_4.cpp | 69 + .../provider/2.4/default/CameraProvider_2_4.h | 86 + .../ExternalCameraProviderImpl_2_4.cpp | 373 + .../default/ExternalCameraProviderImpl_2_4.h | 118 + .../default/LegacyCameraProviderImpl_2_4.cpp | 658 ++ .../default/LegacyCameraProviderImpl_2_4.h | 134 + camera/provider/2.4/default/OWNERS | 1 + ...re.camera.provider@2.4-external-service.rc | 8 + ...rdware.camera.provider@2.4-service-lazy.rc | 10 + ...are.camera.provider@2.4-service-lazy_64.rc | 10 + ...or.hardware.camera.provider@2.4-service.rc | 8 + ...hardware.camera.provider@2.4-service_64.rc | 8 + .../provider/2.4/default/external-service.cpp | 34 + camera/provider/2.4/default/service.cpp | 67 + camera/provider/2.4/vts/OWNERS | 6 + camera/provider/2.4/vts/functional/Android.bp | 55 + .../2.4/vts/functional/AndroidTest.xml | 33 + .../VtsHalCameraProviderV2_4TargetTest.cpp | 8831 +++++++++++++++++ camera/provider/2.5/ICameraProvider.hal | 68 + camera/provider/2.5/default/Android.bp | 191 + .../provider/2.5/default/CameraProvider_2_5.h | 94 + .../ExternalCameraProviderImpl_2_5.cpp | 47 + .../default/ExternalCameraProviderImpl_2_5.h | 62 + .../default/LegacyCameraProviderImpl_2_5.cpp | 53 + .../default/LegacyCameraProviderImpl_2_5.h | 62 + camera/provider/2.5/default/OWNERS | 1 + ...re.camera.provider@2.5-external-service.rc | 9 + ...rdware.camera.provider@2.5-service-lazy.rc | 11 + ...are.camera.provider@2.5-service-lazy_64.rc | 11 + ...id.hardware.camera.provider@2.5-service.rc | 9 + ...hardware.camera.provider@2.5-service_64.rc | 9 + .../provider/2.5/default/external-service.cpp | 46 + camera/provider/2.5/default/service.cpp | 63 + camera/provider/2.5/types.hal | 52 + camera/provider/2.6/ICameraProvider.hal | 151 + .../provider/2.6/ICameraProviderCallback.hal | 54 + camera/provider/2.6/types.hal | 30 + camera/provider/2.7/ICameraProvider.hal | 51 + camera/provider/2.7/types.hal | 30 + camera/provider/README.md | 37 + 138 files changed, 37865 insertions(+) create mode 100644 camera/README.md create mode 100644 camera/common/1.0/default/Android.bp create mode 100644 camera/common/1.0/default/CameraMetadata.cpp create mode 100644 camera/common/1.0/default/CameraModule.cpp create mode 100644 camera/common/1.0/default/CameraParameters.cpp create mode 100644 camera/common/1.0/default/Exif.cpp create mode 100644 camera/common/1.0/default/HandleImporter.cpp create mode 100644 camera/common/1.0/default/OWNERS create mode 100644 camera/common/1.0/default/VendorTagDescriptor.cpp create mode 100644 camera/common/1.0/default/include/CameraMetadata.h create mode 100644 camera/common/1.0/default/include/CameraModule.h create mode 100644 camera/common/1.0/default/include/CameraParameters.h create mode 100644 camera/common/1.0/default/include/Exif.h create mode 100644 camera/common/1.0/default/include/HandleImporter.h create mode 100644 camera/common/1.0/default/include/VendorTagDescriptor.h create mode 100644 camera/common/1.0/types.hal create mode 100644 camera/common/README.md create mode 100644 camera/device/1.0/ICameraDevice.hal create mode 100644 camera/device/1.0/ICameraDeviceCallback.hal create mode 100644 camera/device/1.0/ICameraDevicePreviewCallback.hal create mode 100644 camera/device/1.0/default/Android.bp create mode 100644 camera/device/1.0/default/CameraDevice.cpp create mode 100644 camera/device/1.0/default/CameraDevice_1_0.h create mode 100644 camera/device/1.0/default/OWNERS create mode 100644 camera/device/1.0/types.hal create mode 100644 camera/device/3.2/ICameraDevice.hal create mode 100644 camera/device/3.2/ICameraDeviceCallback.hal create mode 100644 camera/device/3.2/ICameraDeviceSession.hal create mode 100644 camera/device/3.2/default/Android.bp create mode 100644 camera/device/3.2/default/CameraDevice.cpp create mode 100644 camera/device/3.2/default/CameraDeviceSession.cpp create mode 100644 camera/device/3.2/default/CameraDeviceSession.h create mode 100644 camera/device/3.2/default/CameraDevice_3_2.h create mode 100644 camera/device/3.2/default/OWNERS create mode 100644 camera/device/3.2/default/convert.cpp create mode 100644 camera/device/3.2/default/include/convert.h create mode 100644 camera/device/3.2/types.hal create mode 100644 camera/device/3.3/ICameraDeviceSession.hal create mode 100644 camera/device/3.3/default/Android.bp create mode 100644 camera/device/3.3/default/CameraDevice.cpp create mode 100644 camera/device/3.3/default/CameraDeviceSession.cpp create mode 100644 camera/device/3.3/default/CameraDeviceSession.h create mode 100644 camera/device/3.3/default/CameraDevice_3_3.h create mode 100644 camera/device/3.3/default/OWNERS create mode 100644 camera/device/3.3/default/convert.cpp create mode 100644 camera/device/3.3/default/include/convert.h create mode 100644 camera/device/3.3/types.hal create mode 100644 camera/device/3.4/ICameraDeviceCallback.hal create mode 100644 camera/device/3.4/ICameraDeviceSession.hal create mode 100644 camera/device/3.4/default/Android.bp create mode 100644 camera/device/3.4/default/CameraDevice.cpp create mode 100644 camera/device/3.4/default/CameraDeviceSession.cpp create mode 100644 camera/device/3.4/default/ExternalCameraDevice.cpp create mode 100644 camera/device/3.4/default/ExternalCameraDeviceSession.cpp create mode 100644 camera/device/3.4/default/ExternalCameraUtils.cpp create mode 100644 camera/device/3.4/default/OWNERS create mode 100644 camera/device/3.4/default/convert.cpp create mode 100644 camera/device/3.4/default/include/convert.h create mode 100644 camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h create mode 100644 camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h create mode 100644 camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h create mode 100644 camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h create mode 100644 camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h create mode 100644 camera/device/3.4/types.hal create mode 100644 camera/device/3.5/ICameraDevice.hal create mode 100644 camera/device/3.5/ICameraDeviceCallback.hal create mode 100644 camera/device/3.5/ICameraDeviceSession.hal create mode 100644 camera/device/3.5/default/Android.bp create mode 100644 camera/device/3.5/default/CameraDevice.cpp create mode 100644 camera/device/3.5/default/CameraDeviceSession.cpp create mode 100644 camera/device/3.5/default/ExternalCameraDevice.cpp create mode 100644 camera/device/3.5/default/ExternalCameraDeviceSession.cpp create mode 100644 camera/device/3.5/default/OWNERS create mode 100644 camera/device/3.5/default/include/device_v3_5_impl/CameraDeviceSession.h create mode 100644 camera/device/3.5/default/include/device_v3_5_impl/CameraDevice_3_5.h create mode 100644 camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h create mode 100644 camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDevice_3_5.h create mode 100644 camera/device/3.5/types.hal create mode 100644 camera/device/3.6/ICameraDevice.hal create mode 100644 camera/device/3.6/ICameraDeviceSession.hal create mode 100644 camera/device/3.6/ICameraOfflineSession.hal create mode 100644 camera/device/3.6/default/Android.bp create mode 100644 camera/device/3.6/default/ExternalCameraDevice.cpp create mode 100644 camera/device/3.6/default/ExternalCameraDeviceSession.cpp create mode 100644 camera/device/3.6/default/ExternalCameraOfflineSession.cpp create mode 100644 camera/device/3.6/default/OWNERS create mode 100644 camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h create mode 100644 camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDevice_3_6.h create mode 100644 camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraOfflineSession.h create mode 100644 camera/device/3.6/types.hal create mode 100644 camera/device/3.7/ICameraDevice.hal create mode 100644 camera/device/3.7/ICameraDeviceSession.hal create mode 100644 camera/device/3.7/ICameraInjectionSession.hal create mode 100644 camera/device/3.7/types.hal create mode 100644 camera/device/README.md create mode 100644 camera/provider/2.4/ICameraProvider.hal create mode 100644 camera/provider/2.4/ICameraProviderCallback.hal create mode 100644 camera/provider/2.4/default/Android.bp create mode 100644 camera/provider/2.4/default/CameraProvider_2_4.cpp create mode 100644 camera/provider/2.4/default/CameraProvider_2_4.h create mode 100644 camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp create mode 100644 camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.h create mode 100644 camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp create mode 100644 camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.h create mode 100644 camera/provider/2.4/default/OWNERS create mode 100644 camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-external-service.rc create mode 100644 camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy.rc create mode 100644 camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy_64.rc create mode 100644 camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service.rc create mode 100644 camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service_64.rc create mode 100644 camera/provider/2.4/default/external-service.cpp create mode 100644 camera/provider/2.4/default/service.cpp create mode 100644 camera/provider/2.4/vts/OWNERS create mode 100644 camera/provider/2.4/vts/functional/Android.bp create mode 100644 camera/provider/2.4/vts/functional/AndroidTest.xml create mode 100644 camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp create mode 100644 camera/provider/2.5/ICameraProvider.hal create mode 100644 camera/provider/2.5/default/Android.bp create mode 100644 camera/provider/2.5/default/CameraProvider_2_5.h create mode 100644 camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.cpp create mode 100644 camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.h create mode 100644 camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.cpp create mode 100644 camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.h create mode 100644 camera/provider/2.5/default/OWNERS create mode 100644 camera/provider/2.5/default/android.hardware.camera.provider@2.5-external-service.rc create mode 100644 camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy.rc create mode 100644 camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy_64.rc create mode 100644 camera/provider/2.5/default/android.hardware.camera.provider@2.5-service.rc create mode 100644 camera/provider/2.5/default/android.hardware.camera.provider@2.5-service_64.rc create mode 100644 camera/provider/2.5/default/external-service.cpp create mode 100644 camera/provider/2.5/default/service.cpp create mode 100644 camera/provider/2.5/types.hal create mode 100644 camera/provider/2.6/ICameraProvider.hal create mode 100644 camera/provider/2.6/ICameraProviderCallback.hal create mode 100644 camera/provider/2.6/types.hal create mode 100644 camera/provider/2.7/ICameraProvider.hal create mode 100644 camera/provider/2.7/types.hal create mode 100644 camera/provider/README.md diff --git a/camera/README.md b/camera/README.md new file mode 100644 index 0000000..8ce3352 --- /dev/null +++ b/camera/README.md @@ -0,0 +1,12 @@ +## Camera HALs ## +--- + +## Overview: ## + +The camera.* HAL tree is used by the Android camera service to discover and +operate camera devices available on the device. + +More details and versioning information can be found within each particular HAL. + +More complete information about the Android camera HAL and subsystem can be found at +[source.android.com](http://source.android.com/devices/camera/index.html). diff --git a/camera/common/1.0/default/Android.bp b/camera/common/1.0/default/Android.bp new file mode 100644 index 0000000..5df3ce8 --- /dev/null +++ b/camera/common/1.0/default/Android.bp @@ -0,0 +1,31 @@ + +cc_library_static { + name: "android.vendor.hardware.camera.common@1.0-helper", + vendor_available: true, + defaults: ["hidl_defaults"], + srcs: [ + "CameraModule.cpp", + "CameraMetadata.cpp", + "CameraParameters.cpp", + "VendorTagDescriptor.cpp", + "HandleImporter.cpp", + "Exif.cpp", + ], + cflags: [ + "-Werror", + "-Wextra", + "-Wall", + ], + shared_libs: [ + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "libexif", + ], + include_dirs: ["system/media/private/camera/include"], + export_include_dirs: ["include"], +} diff --git a/camera/common/1.0/default/CameraMetadata.cpp b/camera/common/1.0/default/CameraMetadata.cpp new file mode 100644 index 0000000..eb1bd1c --- /dev/null +++ b/camera/common/1.0/default/CameraMetadata.cpp @@ -0,0 +1,563 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// #define LOG_NDEBUG 0 + +#define LOG_TAG "CamComm1.0-MD" +#include +#include + +#include "CameraMetadata.h" +#include "VendorTagDescriptor.h" + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +#define ALIGN_TO(val, alignment) \ + (((uintptr_t)(val) + ((alignment) - 1)) & ~((alignment) - 1)) + +CameraMetadata::CameraMetadata() : + mBuffer(NULL), mLocked(false) { +} + +CameraMetadata::CameraMetadata(size_t entryCapacity, size_t dataCapacity) : + mLocked(false) +{ + mBuffer = allocate_camera_metadata(entryCapacity, dataCapacity); +} + +CameraMetadata::CameraMetadata(const CameraMetadata &other) : + mLocked(false) { + mBuffer = clone_camera_metadata(other.mBuffer); +} + +CameraMetadata::CameraMetadata(camera_metadata_t *buffer) : + mBuffer(NULL), mLocked(false) { + acquire(buffer); +} + +CameraMetadata &CameraMetadata::operator=(const CameraMetadata &other) { + return operator=(other.mBuffer); +} + +CameraMetadata &CameraMetadata::operator=(const camera_metadata_t *buffer) { + if (mLocked) { + ALOGE("%s: Assignment to a locked CameraMetadata!", __FUNCTION__); + return *this; + } + + if (CC_LIKELY(buffer != mBuffer)) { + camera_metadata_t *newBuffer = clone_camera_metadata(buffer); + clear(); + mBuffer = newBuffer; + } + return *this; +} + +CameraMetadata::~CameraMetadata() { + mLocked = false; + clear(); +} + +const camera_metadata_t* CameraMetadata::getAndLock() const { + mLocked = true; + return mBuffer; +} + +status_t CameraMetadata::unlock(const camera_metadata_t *buffer) const { + if (!mLocked) { + ALOGE("%s: Can't unlock a non-locked CameraMetadata!", __FUNCTION__); + return INVALID_OPERATION; + } + if (buffer != mBuffer) { + ALOGE("%s: Can't unlock CameraMetadata with wrong pointer!", + __FUNCTION__); + return BAD_VALUE; + } + mLocked = false; + return OK; +} + +camera_metadata_t* CameraMetadata::release() { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return NULL; + } + camera_metadata_t *released = mBuffer; + mBuffer = NULL; + return released; +} + +void CameraMetadata::clear() { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } + if (mBuffer) { + free_camera_metadata(mBuffer); + mBuffer = NULL; + } +} + +void CameraMetadata::acquire(camera_metadata_t *buffer) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } + clear(); + mBuffer = buffer; + + ALOGE_IF(validate_camera_metadata_structure(mBuffer, /*size*/NULL) != OK, + "%s: Failed to validate metadata structure %p", + __FUNCTION__, buffer); +} + +void CameraMetadata::acquire(CameraMetadata &other) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } + acquire(other.release()); +} + +status_t CameraMetadata::append(const CameraMetadata &other) { + return append(other.mBuffer); +} + +status_t CameraMetadata::append(const camera_metadata_t* other) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + size_t extraEntries = get_camera_metadata_entry_count(other); + size_t extraData = get_camera_metadata_data_count(other); + resizeIfNeeded(extraEntries, extraData); + + return append_camera_metadata(mBuffer, other); +} + +size_t CameraMetadata::entryCount() const { + return (mBuffer == NULL) ? 0 : + get_camera_metadata_entry_count(mBuffer); +} + +bool CameraMetadata::isEmpty() const { + return entryCount() == 0; +} + +status_t CameraMetadata::sort() { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + return sort_camera_metadata(mBuffer); +} + +status_t CameraMetadata::checkType(uint32_t tag, uint8_t expectedType) { + int tagType = get_local_camera_metadata_tag_type(tag, mBuffer); + if ( CC_UNLIKELY(tagType == -1)) { + ALOGE("Update metadata entry: Unknown tag %d", tag); + return INVALID_OPERATION; + } + if ( CC_UNLIKELY(tagType != expectedType) ) { + ALOGE("Mismatched tag type when updating entry %s (%d) of type %s; " + "got type %s data instead ", + get_local_camera_metadata_tag_name(tag, mBuffer), tag, + camera_metadata_type_names[tagType], camera_metadata_type_names[expectedType]); + return INVALID_OPERATION; + } + return OK; +} + +status_t CameraMetadata::update(uint32_t tag, + const int32_t *data, size_t data_count) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(tag, TYPE_INT32)) != OK) { + return res; + } + return updateImpl(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const uint8_t *data, size_t data_count) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(tag, TYPE_BYTE)) != OK) { + return res; + } + return updateImpl(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const float *data, size_t data_count) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(tag, TYPE_FLOAT)) != OK) { + return res; + } + return updateImpl(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const int64_t *data, size_t data_count) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(tag, TYPE_INT64)) != OK) { + return res; + } + return updateImpl(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const double *data, size_t data_count) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(tag, TYPE_DOUBLE)) != OK) { + return res; + } + return updateImpl(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const camera_metadata_rational_t *data, size_t data_count) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(tag, TYPE_RATIONAL)) != OK) { + return res; + } + return updateImpl(tag, (const void*)data, data_count); +} + +status_t CameraMetadata::update(uint32_t tag, + const String8 &string) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(tag, TYPE_BYTE)) != OK) { + return res; + } + // string.size() doesn't count the null termination character. + return updateImpl(tag, (const void*)string.string(), string.size() + 1); +} + +status_t CameraMetadata::update(const camera_metadata_ro_entry &entry) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + if ( (res = checkType(entry.tag, entry.type)) != OK) { + return res; + } + return updateImpl(entry.tag, (const void*)entry.data.u8, entry.count); +} + +status_t CameraMetadata::updateImpl(uint32_t tag, const void *data, + size_t data_count) { + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + int type = get_local_camera_metadata_tag_type(tag, mBuffer); + if (type == -1) { + ALOGE("%s: Tag %d not found", __FUNCTION__, tag); + return BAD_VALUE; + } + // Safety check - ensure that data isn't pointing to this metadata, since + // that would get invalidated if a resize is needed + size_t bufferSize = get_camera_metadata_size(mBuffer); + uintptr_t bufAddr = reinterpret_cast(mBuffer); + uintptr_t dataAddr = reinterpret_cast(data); + if (dataAddr > bufAddr && dataAddr < (bufAddr + bufferSize)) { + ALOGE("%s: Update attempted with data from the same metadata buffer!", + __FUNCTION__); + return INVALID_OPERATION; + } + + size_t data_size = calculate_camera_metadata_entry_data_size(type, + data_count); + + res = resizeIfNeeded(1, data_size); + + if (res == OK) { + camera_metadata_entry_t entry; + res = find_camera_metadata_entry(mBuffer, tag, &entry); + if (res == NAME_NOT_FOUND) { + res = add_camera_metadata_entry(mBuffer, + tag, data, data_count); + } else if (res == OK) { + res = update_camera_metadata_entry(mBuffer, + entry.index, data, data_count, NULL); + } + } + + if (res != OK) { + ALOGE("%s: Unable to update metadata entry %s.%s (%x): %s (%d)", __FUNCTION__, + get_local_camera_metadata_section_name(tag, mBuffer), + get_local_camera_metadata_tag_name(tag, mBuffer), tag, strerror(-res), res); + } + + IF_ALOGV() { + ALOGE_IF(validate_camera_metadata_structure(mBuffer, /*size*/NULL) != + OK, + + "%s: Failed to validate metadata structure after update %p", + __FUNCTION__, mBuffer); + } + + return res; +} + +bool CameraMetadata::exists(uint32_t tag) const { + camera_metadata_ro_entry entry; + return find_camera_metadata_ro_entry(mBuffer, tag, &entry) == 0; +} + +camera_metadata_entry_t CameraMetadata::find(uint32_t tag) { + status_t res; + camera_metadata_entry entry; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + entry.count = 0; + return entry; + } + res = find_camera_metadata_entry(mBuffer, tag, &entry); + if (CC_UNLIKELY( res != OK )) { + entry.count = 0; + entry.data.u8 = NULL; + } + return entry; +} + +camera_metadata_ro_entry_t CameraMetadata::find(uint32_t tag) const { + status_t res; + camera_metadata_ro_entry entry; + res = find_camera_metadata_ro_entry(mBuffer, tag, &entry); + if (CC_UNLIKELY( res != OK )) { + entry.count = 0; + entry.data.u8 = NULL; + } + return entry; +} + +status_t CameraMetadata::erase(uint32_t tag) { + camera_metadata_entry_t entry; + status_t res; + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return INVALID_OPERATION; + } + res = find_camera_metadata_entry(mBuffer, tag, &entry); + if (res == NAME_NOT_FOUND) { + return OK; + } else if (res != OK) { + ALOGE("%s: Error looking for entry %s.%s (%x): %s %d", __FUNCTION__, + get_local_camera_metadata_section_name(tag, mBuffer), + get_local_camera_metadata_tag_name(tag, mBuffer), tag, strerror(-res), res); + return res; + } + res = delete_camera_metadata_entry(mBuffer, entry.index); + if (res != OK) { + ALOGE("%s: Error deleting entry %s.%s (%x): %s %d", __FUNCTION__, + get_local_camera_metadata_section_name(tag, mBuffer), + get_local_camera_metadata_tag_name(tag, mBuffer), tag, strerror(-res), res); + } + return res; +} + +void CameraMetadata::dump(int fd, int verbosity, int indentation) const { + dump_indented_camera_metadata(mBuffer, fd, verbosity, indentation); +} + +status_t CameraMetadata::resizeIfNeeded(size_t extraEntries, size_t extraData) { + if (mBuffer == NULL) { + mBuffer = allocate_camera_metadata(extraEntries * 2, extraData * 2); + if (mBuffer == NULL) { + ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); + return NO_MEMORY; + } + } else { + size_t currentEntryCount = get_camera_metadata_entry_count(mBuffer); + size_t currentEntryCap = get_camera_metadata_entry_capacity(mBuffer); + size_t newEntryCount = currentEntryCount + + extraEntries; + newEntryCount = (newEntryCount > currentEntryCap) ? + newEntryCount * 2 : currentEntryCap; + + size_t currentDataCount = get_camera_metadata_data_count(mBuffer); + size_t currentDataCap = get_camera_metadata_data_capacity(mBuffer); + size_t newDataCount = currentDataCount + + extraData; + newDataCount = (newDataCount > currentDataCap) ? + newDataCount * 2 : currentDataCap; + + if (newEntryCount > currentEntryCap || + newDataCount > currentDataCap) { + camera_metadata_t *oldBuffer = mBuffer; + mBuffer = allocate_camera_metadata(newEntryCount, + newDataCount); + if (mBuffer == NULL) { + ALOGE("%s: Can't allocate larger metadata buffer", __FUNCTION__); + return NO_MEMORY; + } + append_camera_metadata(mBuffer, oldBuffer); + free_camera_metadata(oldBuffer); + } + } + return OK; +} + +void CameraMetadata::swap(CameraMetadata& other) { + if (mLocked) { + ALOGE("%s: CameraMetadata is locked", __FUNCTION__); + return; + } else if (other.mLocked) { + ALOGE("%s: Other CameraMetadata is locked", __FUNCTION__); + return; + } + + camera_metadata* thisBuf = mBuffer; + camera_metadata* otherBuf = other.mBuffer; + + other.mBuffer = thisBuf; + mBuffer = otherBuf; +} + +status_t CameraMetadata::getTagFromName(const char *name, + const VendorTagDescriptor* vTags, uint32_t *tag) { + + if (name == nullptr || tag == nullptr) return BAD_VALUE; + + size_t nameLength = strlen(name); + + const SortedVector *vendorSections; + size_t vendorSectionCount = 0; + + if (vTags != NULL) { + vendorSections = vTags->getAllSectionNames(); + vendorSectionCount = vendorSections->size(); + } + + // First, find the section by the longest string match + const char *section = NULL; + size_t sectionIndex = 0; + size_t sectionLength = 0; + size_t totalSectionCount = ANDROID_SECTION_COUNT + vendorSectionCount; + for (size_t i = 0; i < totalSectionCount; ++i) { + + const char *str = (i < ANDROID_SECTION_COUNT) ? camera_metadata_section_names[i] : + (*vendorSections)[i - ANDROID_SECTION_COUNT].string(); + + ALOGV("%s: Trying to match against section '%s'", __FUNCTION__, str); + + if (strstr(name, str) == name) { // name begins with the section name + size_t strLength = strlen(str); + + ALOGV("%s: Name begins with section name", __FUNCTION__); + + // section name is the longest we've found so far + if (section == NULL || sectionLength < strLength) { + section = str; + sectionIndex = i; + sectionLength = strLength; + + ALOGV("%s: Found new best section (%s)", __FUNCTION__, section); + } + } + } + + if (section == NULL) { + return NAME_NOT_FOUND; + } else { + ALOGV("%s: Found matched section '%s' (%zu)", + __FUNCTION__, section, sectionIndex); + } + + // Get the tag name component of the name + const char *nameTagName = name + sectionLength + 1; // x.y.z -> z + if (sectionLength + 1 >= nameLength) { + return BAD_VALUE; + } + + // Match rest of name against the tag names in that section only + uint32_t candidateTag = 0; + if (sectionIndex < ANDROID_SECTION_COUNT) { + // Match built-in tags (typically android.*) + uint32_t tagBegin, tagEnd; // [tagBegin, tagEnd) + tagBegin = camera_metadata_section_bounds[sectionIndex][0]; + tagEnd = camera_metadata_section_bounds[sectionIndex][1]; + + for (candidateTag = tagBegin; candidateTag < tagEnd; ++candidateTag) { + const char *tagName = get_camera_metadata_tag_name(candidateTag); + + if (strcmp(nameTagName, tagName) == 0) { + ALOGV("%s: Found matched tag '%s' (%d)", + __FUNCTION__, tagName, candidateTag); + break; + } + } + + if (candidateTag == tagEnd) { + return NAME_NOT_FOUND; + } + } else if (vTags != NULL) { + // Match vendor tags (typically com.*) + const String8 sectionName(section); + const String8 tagName(nameTagName); + + status_t res = OK; + if ((res = vTags->lookupTag(tagName, sectionName, &candidateTag)) != OK) { + return NAME_NOT_FOUND; + } + } + + *tag = candidateTag; + return OK; +} + + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/common/1.0/default/CameraModule.cpp b/camera/common/1.0/default/CameraModule.cpp new file mode 100644 index 0000000..16fb85c --- /dev/null +++ b/camera/common/1.0/default/CameraModule.cpp @@ -0,0 +1,583 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamComm1.0-CamModule" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include + +#include "CameraModule.h" + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +void CameraModule::deriveCameraCharacteristicsKeys( + uint32_t deviceVersion, CameraMetadata &chars) { + ATRACE_CALL(); + + Vector derivedCharKeys; + Vector derivedRequestKeys; + Vector derivedResultKeys; + // Keys added in HAL3.3 + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_3) { + Vector controlModes; + uint8_t data = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + chars.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &data, /*count*/1); + data = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + chars.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &data, /*count*/1); + controlModes.push(ANDROID_CONTROL_MODE_AUTO); + camera_metadata_entry entry = chars.find(ANDROID_CONTROL_AVAILABLE_SCENE_MODES); + if (entry.count > 1 || entry.data.u8[0] != ANDROID_CONTROL_SCENE_MODE_DISABLED) { + controlModes.push(ANDROID_CONTROL_MODE_USE_SCENE_MODE); + } + + // Only advertise CONTROL_OFF mode if 3A manual controls are supported. + bool isManualAeSupported = false; + bool isManualAfSupported = false; + bool isManualAwbSupported = false; + entry = chars.find(ANDROID_CONTROL_AE_AVAILABLE_MODES); + if (entry.count > 0) { + for (size_t i = 0; i < entry.count; i++) { + if (entry.data.u8[i] == ANDROID_CONTROL_AE_MODE_OFF) { + isManualAeSupported = true; + break; + } + } + } + entry = chars.find(ANDROID_CONTROL_AF_AVAILABLE_MODES); + if (entry.count > 0) { + for (size_t i = 0; i < entry.count; i++) { + if (entry.data.u8[i] == ANDROID_CONTROL_AF_MODE_OFF) { + isManualAfSupported = true; + break; + } + } + } + entry = chars.find(ANDROID_CONTROL_AWB_AVAILABLE_MODES); + if (entry.count > 0) { + for (size_t i = 0; i < entry.count; i++) { + if (entry.data.u8[i] == ANDROID_CONTROL_AWB_MODE_OFF) { + isManualAwbSupported = true; + break; + } + } + } + if (isManualAeSupported && isManualAfSupported && isManualAwbSupported) { + controlModes.push(ANDROID_CONTROL_MODE_OFF); + } + + chars.update(ANDROID_CONTROL_AVAILABLE_MODES, controlModes); + + entry = chars.find(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS); + // HAL3.2 devices passing existing CTS test should all support all LSC modes and LSC map + bool lensShadingModeSupported = false; + if (entry.count > 0) { + for (size_t i = 0; i < entry.count; i++) { + if (entry.data.i32[i] == ANDROID_SHADING_MODE) { + lensShadingModeSupported = true; + break; + } + } + } + Vector lscModes; + Vector lscMapModes; + lscModes.push(ANDROID_SHADING_MODE_FAST); + lscModes.push(ANDROID_SHADING_MODE_HIGH_QUALITY); + lscMapModes.push(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF); + if (lensShadingModeSupported) { + lscModes.push(ANDROID_SHADING_MODE_OFF); + lscMapModes.push(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON); + } + chars.update(ANDROID_SHADING_AVAILABLE_MODES, lscModes); + chars.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, lscMapModes); + + derivedCharKeys.push(ANDROID_CONTROL_AE_LOCK_AVAILABLE); + derivedCharKeys.push(ANDROID_CONTROL_AWB_LOCK_AVAILABLE); + derivedCharKeys.push(ANDROID_CONTROL_AVAILABLE_MODES); + derivedCharKeys.push(ANDROID_SHADING_AVAILABLE_MODES); + derivedCharKeys.push(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES); + + // Need update android.control.availableHighSpeedVideoConfigurations since HAL3.3 + // adds batch size to this array. + entry = chars.find(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS); + if (entry.count > 0) { + Vector highSpeedConfig; + for (size_t i = 0; i < entry.count; i += 4) { + highSpeedConfig.add(entry.data.i32[i]); // width + highSpeedConfig.add(entry.data.i32[i + 1]); // height + highSpeedConfig.add(entry.data.i32[i + 2]); // fps_min + highSpeedConfig.add(entry.data.i32[i + 3]); // fps_max + highSpeedConfig.add(1); // batchSize_max. default to 1 for HAL3.2 + } + chars.update(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, + highSpeedConfig); + } + } + + // Keys added in HAL3.4 + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_4) { + // Check if HAL supports RAW_OPAQUE output + camera_metadata_entry entry = chars.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); + bool supportRawOpaque = false; + bool supportAnyRaw = false; + const int STREAM_CONFIGURATION_SIZE = 4; + const int STREAM_FORMAT_OFFSET = 0; + const int STREAM_WIDTH_OFFSET = 1; + const int STREAM_HEIGHT_OFFSET = 2; + const int STREAM_IS_INPUT_OFFSET = 3; + Vector rawOpaqueSizes; + + for (size_t i=0; i < entry.count; i += STREAM_CONFIGURATION_SIZE) { + int32_t format = entry.data.i32[i + STREAM_FORMAT_OFFSET]; + int32_t width = entry.data.i32[i + STREAM_WIDTH_OFFSET]; + int32_t height = entry.data.i32[i + STREAM_HEIGHT_OFFSET]; + int32_t isInput = entry.data.i32[i + STREAM_IS_INPUT_OFFSET]; + if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT && + format == HAL_PIXEL_FORMAT_RAW_OPAQUE) { + supportRawOpaque = true; + rawOpaqueSizes.push(width); + rawOpaqueSizes.push(height); + // 2 bytes per pixel. This rough estimation is only used when + // HAL does not fill in the opaque raw size + rawOpaqueSizes.push(width * height *2); + } + if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT && + (format == HAL_PIXEL_FORMAT_RAW16 || + format == HAL_PIXEL_FORMAT_RAW10 || + format == HAL_PIXEL_FORMAT_RAW12 || + format == HAL_PIXEL_FORMAT_RAW_OPAQUE)) { + supportAnyRaw = true; + } + } + + if (supportRawOpaque) { + entry = chars.find(ANDROID_SENSOR_OPAQUE_RAW_SIZE); + if (entry.count == 0) { + // Fill in estimated value if HAL does not list it + chars.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, rawOpaqueSizes); + derivedCharKeys.push(ANDROID_SENSOR_OPAQUE_RAW_SIZE); + } + } + + // Check if HAL supports any RAW output, if so, fill in postRawSensitivityBoost range + if (supportAnyRaw) { + int32_t defaultRange[2] = {100, 100}; + entry = chars.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE); + if (entry.count == 0) { + // Fill in default value (100, 100) + chars.update( + ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE, + defaultRange, 2); + derivedCharKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE); + // Actual request/results will be derived by camera device. + derivedRequestKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST); + derivedResultKeys.push(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST); + } + } + } + + // Add those newly added keys to AVAILABLE_CHARACTERISTICS_KEYS + // This has to be done at this end of this function. + if (derivedCharKeys.size() > 0) { + appendAvailableKeys( + chars, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, derivedCharKeys); + } + if (derivedRequestKeys.size() > 0) { + appendAvailableKeys( + chars, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, derivedRequestKeys); + } + if (derivedResultKeys.size() > 0) { + appendAvailableKeys( + chars, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, derivedResultKeys); + } + return; +} + +void CameraModule::appendAvailableKeys(CameraMetadata &chars, + int32_t keyTag, const Vector& appendKeys) { + camera_metadata_entry entry = chars.find(keyTag); + Vector availableKeys; + availableKeys.setCapacity(entry.count + appendKeys.size()); + for (size_t i = 0; i < entry.count; i++) { + availableKeys.push(entry.data.i32[i]); + } + for (size_t i = 0; i < appendKeys.size(); i++) { + availableKeys.push(appendKeys[i]); + } + chars.update(keyTag, availableKeys); +} + +CameraModule::CameraModule(camera_module_t *module) : mNumberOfCameras(0) { + if (module == NULL) { + ALOGE("%s: camera hardware module must not be null", __FUNCTION__); + assert(0); + } + mModule = module; +} + +CameraModule::~CameraModule() +{ + while (mCameraInfoMap.size() > 0) { + camera_info cameraInfo = mCameraInfoMap.editValueAt(0); + if (cameraInfo.static_camera_characteristics != NULL) { + free_camera_metadata( + const_cast(cameraInfo.static_camera_characteristics)); + } + mCameraInfoMap.removeItemsAt(0); + } + + while (mPhysicalCameraInfoMap.size() > 0) { + camera_metadata_t* metadata = mPhysicalCameraInfoMap.editValueAt(0); + if (metadata != NULL) { + free_camera_metadata(metadata); + } + mPhysicalCameraInfoMap.removeItemsAt(0); + } +} + +int CameraModule::init() { + ATRACE_CALL(); + int res = OK; + if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 && + mModule->init != NULL) { + ATRACE_BEGIN("camera_module->init"); + res = mModule->init(); + ATRACE_END(); + } + mNumberOfCameras = getNumberOfCameras(); + mCameraInfoMap.setCapacity(mNumberOfCameras); + return res; +} + +int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) { + ATRACE_CALL(); + Mutex::Autolock lock(mCameraInfoLock); + if (cameraId < 0) { + ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId); + return -EINVAL; + } + + // Only override static_camera_characteristics for API2 devices + int apiVersion = mModule->common.module_api_version; + if (apiVersion < CAMERA_MODULE_API_VERSION_2_0) { + int ret; + ATRACE_BEGIN("camera_module->get_camera_info"); + ret = mModule->get_camera_info(cameraId, info); + // Fill in this so CameraService won't be confused by + // possibly 0 device_version + info->device_version = CAMERA_DEVICE_API_VERSION_1_0; + ATRACE_END(); + return ret; + } + + ssize_t index = mCameraInfoMap.indexOfKey(cameraId); + if (index == NAME_NOT_FOUND) { + // Get camera info from raw module and cache it + camera_info rawInfo, cameraInfo; + ATRACE_BEGIN("camera_module->get_camera_info"); + int ret = mModule->get_camera_info(cameraId, &rawInfo); + ATRACE_END(); + if (ret != 0) { + return ret; + } + int deviceVersion = rawInfo.device_version; + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_0) { + // static_camera_characteristics is invalid + *info = rawInfo; + return ret; + } + CameraMetadata m; + m.append(rawInfo.static_camera_characteristics); + deriveCameraCharacteristicsKeys(rawInfo.device_version, m); + cameraInfo = rawInfo; + cameraInfo.static_camera_characteristics = m.release(); + index = mCameraInfoMap.add(cameraId, cameraInfo); + } + + assert(index != NAME_NOT_FOUND); + // return the cached camera info + *info = mCameraInfoMap[index]; + return OK; +} + +int CameraModule::getPhysicalCameraInfo(int physicalCameraId, camera_metadata_t **physicalInfo) { + ATRACE_CALL(); + Mutex::Autolock lock(mCameraInfoLock); + if (physicalCameraId < mNumberOfCameras) { + ALOGE("%s: Invalid physical camera ID %d", __FUNCTION__, physicalCameraId); + return -EINVAL; + } + + // Only query physical camera info for 2.5 version for newer + int apiVersion = mModule->common.module_api_version; + if (apiVersion < CAMERA_MODULE_API_VERSION_2_5) { + ALOGE("%s: Module version must be at least 2.5 to handle getPhysicalCameraInfo", + __FUNCTION__); + return -ENODEV; + } + if (mModule->get_physical_camera_info == nullptr) { + ALOGE("%s: get_physical_camera is NULL for module version 2.5", __FUNCTION__); + return -EINVAL; + } + + ssize_t index = mPhysicalCameraInfoMap.indexOfKey(physicalCameraId); + if (index == NAME_NOT_FOUND) { + // Get physical camera characteristics, and cache it + camera_metadata_t *info = nullptr; + ATRACE_BEGIN("camera_module->get_physical_camera_info"); + int ret = mModule->get_physical_camera_info(physicalCameraId, &info); + ATRACE_END(); + if (ret != 0) { + return ret; + } + + // The camera_metadata_t returned by get_physical_camera_info could be using + // more memory than necessary due to unused reserved space. Reduce the + // size by appending it to a new CameraMetadata object, which internally + // calls resizeIfNeeded. + CameraMetadata m; + m.append(info); + camera_metadata_t* derivedMetadata = m.release(); + index = mPhysicalCameraInfoMap.add(physicalCameraId, derivedMetadata); + } + + assert(index != NAME_NOT_FOUND); + *physicalInfo = mPhysicalCameraInfoMap[index]; + return OK; +} + +int CameraModule::getDeviceVersion(int cameraId) { + ssize_t index = mDeviceVersionMap.indexOfKey(cameraId); + if (index == NAME_NOT_FOUND) { + int deviceVersion; + if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) { + struct camera_info info; + getCameraInfo(cameraId, &info); + deviceVersion = info.device_version; + } else { + deviceVersion = CAMERA_DEVICE_API_VERSION_1_0; + } + index = mDeviceVersionMap.add(cameraId, deviceVersion); + } + assert(index != NAME_NOT_FOUND); + return mDeviceVersionMap[index]; +} + +int CameraModule::open(const char* id, struct hw_device_t** device) { + int res; + ATRACE_BEGIN("camera_module->open"); + res = filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device)); + ATRACE_END(); + return res; +} + +bool CameraModule::isOpenLegacyDefined() const { + if (getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_3) { + return false; + } + return mModule->open_legacy != NULL; +} + +int CameraModule::openLegacy( + const char* id, uint32_t halVersion, struct hw_device_t** device) { + int res; + ATRACE_BEGIN("camera_module->open_legacy"); + res = mModule->open_legacy(&mModule->common, id, halVersion, device); + ATRACE_END(); + return res; +} + +int CameraModule::getNumberOfCameras() { + int numCameras; + ATRACE_BEGIN("camera_module->get_number_of_cameras"); + numCameras = mModule->get_number_of_cameras(); + ATRACE_END(); + return numCameras; +} + +int CameraModule::setCallbacks(const camera_module_callbacks_t *callbacks) { + int res = OK; + ATRACE_BEGIN("camera_module->set_callbacks"); + if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_1) { + res = mModule->set_callbacks(callbacks); + } + ATRACE_END(); + return res; +} + +bool CameraModule::isVendorTagDefined() const { + return mModule->get_vendor_tag_ops != NULL; +} + +void CameraModule::getVendorTagOps(vendor_tag_ops_t* ops) { + if (mModule->get_vendor_tag_ops) { + ATRACE_BEGIN("camera_module->get_vendor_tag_ops"); + mModule->get_vendor_tag_ops(ops); + ATRACE_END(); + } +} + +bool CameraModule::isSetTorchModeSupported() const { + if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) { + if (mModule->set_torch_mode == NULL) { + ALOGE("%s: Module 2.4 device must support set torch API!", + __FUNCTION__); + return false; + } + return true; + } + return false; +} + +int CameraModule::setTorchMode(const char* camera_id, bool enable) { + int res = INVALID_OPERATION; + if (mModule->set_torch_mode != NULL) { + ATRACE_BEGIN("camera_module->set_torch_mode"); + res = mModule->set_torch_mode(camera_id, enable); + ATRACE_END(); + } + return res; +} + +int CameraModule::isStreamCombinationSupported(int cameraId, camera_stream_combination_t *streams) { + int res = INVALID_OPERATION; + if (mModule->is_stream_combination_supported != NULL) { + ATRACE_BEGIN("camera_module->is_stream_combination_supported"); + res = mModule->is_stream_combination_supported(cameraId, streams); + ATRACE_END(); + } + return res; +} + +void CameraModule::notifyDeviceStateChange(uint64_t deviceState) { + if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_5 && + mModule->notify_device_state_change != NULL) { + ATRACE_BEGIN("camera_module->notify_device_state_change"); + ALOGI("%s: calling notify_device_state_change with state %" PRId64, __FUNCTION__, + deviceState); + mModule->notify_device_state_change(deviceState); + ATRACE_END(); + } +} + +bool CameraModule::isLogicalMultiCamera( + const common::V1_0::helper::CameraMetadata& metadata, + std::unordered_set* physicalCameraIds) { + if (physicalCameraIds == nullptr) { + ALOGE("%s: physicalCameraIds must not be null", __FUNCTION__); + return false; + } + + bool isLogicalMultiCamera = false; + camera_metadata_ro_entry_t capabilities = + metadata.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES); + for (size_t i = 0; i < capabilities.count; i++) { + if (capabilities.data.u8[i] == + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) { + isLogicalMultiCamera = true; + break; + } + } + + if (isLogicalMultiCamera) { + camera_metadata_ro_entry_t entry = + metadata.find(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); + const uint8_t* ids = entry.data.u8; + size_t start = 0; + for (size_t i = 0; i < entry.count; ++i) { + if (ids[i] == '\0') { + if (start != i) { + const char* physicalId = reinterpret_cast(ids+start); + physicalCameraIds->emplace(physicalId); + } + start = i + 1; + } + } + } + return isLogicalMultiCamera; +} + +status_t CameraModule::filterOpenErrorCode(status_t err) { + switch(err) { + case NO_ERROR: + case -EBUSY: + case -EINVAL: + case -EUSERS: + return err; + default: + break; + } + return -ENODEV; +} + +void CameraModule::removeCamera(int cameraId) { + // Skip HAL1 devices which isn't cached in mCameraInfoMap and don't advertise + // static_camera_characteristics + if (getDeviceVersion(cameraId) >= CAMERA_DEVICE_API_VERSION_3_0) { + std::unordered_set physicalIds; + camera_metadata_t *metadata = const_cast( + mCameraInfoMap.valueFor(cameraId).static_camera_characteristics); + common::V1_0::helper::CameraMetadata hidlMetadata(metadata); + + if (isLogicalMultiCamera(hidlMetadata, &physicalIds)) { + for (const auto& id : physicalIds) { + int idInt = std::stoi(id); + if (mPhysicalCameraInfoMap.indexOfKey(idInt) >= 0) { + free_camera_metadata(mPhysicalCameraInfoMap[idInt]); + mPhysicalCameraInfoMap.removeItem(idInt); + } else { + ALOGE("%s: Cannot find corresponding static metadata for physical id %s", + __FUNCTION__, id.c_str()); + } + } + } + } + + mCameraInfoMap.removeItem(cameraId); + mDeviceVersionMap.removeItem(cameraId); +} + +uint16_t CameraModule::getModuleApiVersion() const { + return mModule->common.module_api_version; +} + +const char* CameraModule::getModuleName() const { + return mModule->common.name; +} + +uint16_t CameraModule::getHalApiVersion() const { + return mModule->common.hal_api_version; +} + +const char* CameraModule::getModuleAuthor() const { + return mModule->common.author; +} + +void* CameraModule::getDso() { + return mModule->common.dso; +} + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/common/1.0/default/CameraParameters.cpp b/camera/common/1.0/default/CameraParameters.cpp new file mode 100644 index 0000000..e707b08 --- /dev/null +++ b/camera/common/1.0/default/CameraParameters.cpp @@ -0,0 +1,549 @@ +/* +** +** Copyright 2008, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "CameraParams" +#include + +#include +#include +#include +#include "CameraParameters.h" +#include + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +// Parameter keys to communicate between camera application and driver. +const char CameraParameters::KEY_PREVIEW_SIZE[] = "preview-size"; +const char CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES[] = "preview-size-values"; +const char CameraParameters::KEY_PREVIEW_FORMAT[] = "preview-format"; +const char CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS[] = "preview-format-values"; +const char CameraParameters::KEY_PREVIEW_FRAME_RATE[] = "preview-frame-rate"; +const char CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES[] = "preview-frame-rate-values"; +const char CameraParameters::KEY_PREVIEW_FPS_RANGE[] = "preview-fps-range"; +const char CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE[] = "preview-fps-range-values"; +const char CameraParameters::KEY_PICTURE_SIZE[] = "picture-size"; +const char CameraParameters::KEY_SUPPORTED_PICTURE_SIZES[] = "picture-size-values"; +const char CameraParameters::KEY_PICTURE_FORMAT[] = "picture-format"; +const char CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS[] = "picture-format-values"; +const char CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH[] = "jpeg-thumbnail-width"; +const char CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT[] = "jpeg-thumbnail-height"; +const char CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES[] = "jpeg-thumbnail-size-values"; +const char CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY[] = "jpeg-thumbnail-quality"; +const char CameraParameters::KEY_JPEG_QUALITY[] = "jpeg-quality"; +const char CameraParameters::KEY_ROTATION[] = "rotation"; +const char CameraParameters::KEY_GPS_LATITUDE[] = "gps-latitude"; +const char CameraParameters::KEY_GPS_LONGITUDE[] = "gps-longitude"; +const char CameraParameters::KEY_GPS_ALTITUDE[] = "gps-altitude"; +const char CameraParameters::KEY_GPS_TIMESTAMP[] = "gps-timestamp"; +const char CameraParameters::KEY_GPS_PROCESSING_METHOD[] = "gps-processing-method"; +const char CameraParameters::KEY_WHITE_BALANCE[] = "whitebalance"; +const char CameraParameters::KEY_SUPPORTED_WHITE_BALANCE[] = "whitebalance-values"; +const char CameraParameters::KEY_EFFECT[] = "effect"; +const char CameraParameters::KEY_SUPPORTED_EFFECTS[] = "effect-values"; +const char CameraParameters::KEY_ANTIBANDING[] = "antibanding"; +const char CameraParameters::KEY_SUPPORTED_ANTIBANDING[] = "antibanding-values"; +const char CameraParameters::KEY_SCENE_MODE[] = "scene-mode"; +const char CameraParameters::KEY_SUPPORTED_SCENE_MODES[] = "scene-mode-values"; +const char CameraParameters::KEY_FLASH_MODE[] = "flash-mode"; +const char CameraParameters::KEY_SUPPORTED_FLASH_MODES[] = "flash-mode-values"; +const char CameraParameters::KEY_FOCUS_MODE[] = "focus-mode"; +const char CameraParameters::KEY_SUPPORTED_FOCUS_MODES[] = "focus-mode-values"; +const char CameraParameters::KEY_MAX_NUM_FOCUS_AREAS[] = "max-num-focus-areas"; +const char CameraParameters::KEY_FOCUS_AREAS[] = "focus-areas"; +const char CameraParameters::KEY_FOCAL_LENGTH[] = "focal-length"; +const char CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE[] = "horizontal-view-angle"; +const char CameraParameters::KEY_VERTICAL_VIEW_ANGLE[] = "vertical-view-angle"; +const char CameraParameters::KEY_EXPOSURE_COMPENSATION[] = "exposure-compensation"; +const char CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION[] = "max-exposure-compensation"; +const char CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION[] = "min-exposure-compensation"; +const char CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP[] = "exposure-compensation-step"; +const char CameraParameters::KEY_AUTO_EXPOSURE_LOCK[] = "auto-exposure-lock"; +const char CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED[] = "auto-exposure-lock-supported"; +const char CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK[] = "auto-whitebalance-lock"; +const char CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED[] = "auto-whitebalance-lock-supported"; +const char CameraParameters::KEY_MAX_NUM_METERING_AREAS[] = "max-num-metering-areas"; +const char CameraParameters::KEY_METERING_AREAS[] = "metering-areas"; +const char CameraParameters::KEY_ZOOM[] = "zoom"; +const char CameraParameters::KEY_MAX_ZOOM[] = "max-zoom"; +const char CameraParameters::KEY_ZOOM_RATIOS[] = "zoom-ratios"; +const char CameraParameters::KEY_ZOOM_SUPPORTED[] = "zoom-supported"; +const char CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED[] = "smooth-zoom-supported"; +const char CameraParameters::KEY_FOCUS_DISTANCES[] = "focus-distances"; +const char CameraParameters::KEY_VIDEO_FRAME_FORMAT[] = "video-frame-format"; +const char CameraParameters::KEY_VIDEO_SIZE[] = "video-size"; +const char CameraParameters::KEY_SUPPORTED_VIDEO_SIZES[] = "video-size-values"; +const char CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video"; +const char CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW[] = "max-num-detected-faces-hw"; +const char CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW[] = "max-num-detected-faces-sw"; +const char CameraParameters::KEY_RECORDING_HINT[] = "recording-hint"; +const char CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED[] = "video-snapshot-supported"; +const char CameraParameters::KEY_VIDEO_STABILIZATION[] = "video-stabilization"; +const char CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED[] = "video-stabilization-supported"; +const char CameraParameters::KEY_LIGHTFX[] = "light-fx"; + +const char CameraParameters::TRUE[] = "true"; +const char CameraParameters::FALSE[] = "false"; +const char CameraParameters::FOCUS_DISTANCE_INFINITY[] = "Infinity"; + +// Values for white balance settings. +const char CameraParameters::WHITE_BALANCE_AUTO[] = "auto"; +const char CameraParameters::WHITE_BALANCE_INCANDESCENT[] = "incandescent"; +const char CameraParameters::WHITE_BALANCE_FLUORESCENT[] = "fluorescent"; +const char CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT[] = "warm-fluorescent"; +const char CameraParameters::WHITE_BALANCE_DAYLIGHT[] = "daylight"; +const char CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT[] = "cloudy-daylight"; +const char CameraParameters::WHITE_BALANCE_TWILIGHT[] = "twilight"; +const char CameraParameters::WHITE_BALANCE_SHADE[] = "shade"; + +// Values for effect settings. +const char CameraParameters::EFFECT_NONE[] = "none"; +const char CameraParameters::EFFECT_MONO[] = "mono"; +const char CameraParameters::EFFECT_NEGATIVE[] = "negative"; +const char CameraParameters::EFFECT_SOLARIZE[] = "solarize"; +const char CameraParameters::EFFECT_SEPIA[] = "sepia"; +const char CameraParameters::EFFECT_POSTERIZE[] = "posterize"; +const char CameraParameters::EFFECT_WHITEBOARD[] = "whiteboard"; +const char CameraParameters::EFFECT_BLACKBOARD[] = "blackboard"; +const char CameraParameters::EFFECT_AQUA[] = "aqua"; + +// Values for antibanding settings. +const char CameraParameters::ANTIBANDING_AUTO[] = "auto"; +const char CameraParameters::ANTIBANDING_50HZ[] = "50hz"; +const char CameraParameters::ANTIBANDING_60HZ[] = "60hz"; +const char CameraParameters::ANTIBANDING_OFF[] = "off"; + +// Values for flash mode settings. +const char CameraParameters::FLASH_MODE_OFF[] = "off"; +const char CameraParameters::FLASH_MODE_AUTO[] = "auto"; +const char CameraParameters::FLASH_MODE_ON[] = "on"; +const char CameraParameters::FLASH_MODE_RED_EYE[] = "red-eye"; +const char CameraParameters::FLASH_MODE_TORCH[] = "torch"; + +// Values for scene mode settings. +const char CameraParameters::SCENE_MODE_AUTO[] = "auto"; +const char CameraParameters::SCENE_MODE_ACTION[] = "action"; +const char CameraParameters::SCENE_MODE_PORTRAIT[] = "portrait"; +const char CameraParameters::SCENE_MODE_LANDSCAPE[] = "landscape"; +const char CameraParameters::SCENE_MODE_NIGHT[] = "night"; +const char CameraParameters::SCENE_MODE_NIGHT_PORTRAIT[] = "night-portrait"; +const char CameraParameters::SCENE_MODE_THEATRE[] = "theatre"; +const char CameraParameters::SCENE_MODE_BEACH[] = "beach"; +const char CameraParameters::SCENE_MODE_SNOW[] = "snow"; +const char CameraParameters::SCENE_MODE_SUNSET[] = "sunset"; +const char CameraParameters::SCENE_MODE_STEADYPHOTO[] = "steadyphoto"; +const char CameraParameters::SCENE_MODE_FIREWORKS[] = "fireworks"; +const char CameraParameters::SCENE_MODE_SPORTS[] = "sports"; +const char CameraParameters::SCENE_MODE_PARTY[] = "party"; +const char CameraParameters::SCENE_MODE_CANDLELIGHT[] = "candlelight"; +const char CameraParameters::SCENE_MODE_BARCODE[] = "barcode"; +const char CameraParameters::SCENE_MODE_HDR[] = "hdr"; + +const char CameraParameters::PIXEL_FORMAT_YUV422SP[] = "yuv422sp"; +const char CameraParameters::PIXEL_FORMAT_YUV420SP[] = "yuv420sp"; +const char CameraParameters::PIXEL_FORMAT_YUV422I[] = "yuv422i-yuyv"; +const char CameraParameters::PIXEL_FORMAT_YUV420P[] = "yuv420p"; +const char CameraParameters::PIXEL_FORMAT_RGB565[] = "rgb565"; +const char CameraParameters::PIXEL_FORMAT_RGBA8888[] = "rgba8888"; +const char CameraParameters::PIXEL_FORMAT_JPEG[] = "jpeg"; +const char CameraParameters::PIXEL_FORMAT_BAYER_RGGB[] = "bayer-rggb"; +const char CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE[] = "android-opaque"; + +// Values for focus mode settings. +const char CameraParameters::FOCUS_MODE_AUTO[] = "auto"; +const char CameraParameters::FOCUS_MODE_INFINITY[] = "infinity"; +const char CameraParameters::FOCUS_MODE_MACRO[] = "macro"; +const char CameraParameters::FOCUS_MODE_FIXED[] = "fixed"; +const char CameraParameters::FOCUS_MODE_EDOF[] = "edof"; +const char CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO[] = "continuous-video"; +const char CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE[] = "continuous-picture"; + +// Values for light fx settings +const char CameraParameters::LIGHTFX_LOWLIGHT[] = "low-light"; +const char CameraParameters::LIGHTFX_HDR[] = "high-dynamic-range"; + +CameraParameters::CameraParameters() + : mMap() +{ +} + +CameraParameters::~CameraParameters() +{ +} + +String8 CameraParameters::flatten() const +{ + String8 flattened(""); + size_t size = mMap.size(); + + for (size_t i = 0; i < size; i++) { + String8 k, v; + k = mMap.keyAt(i); + v = mMap.valueAt(i); + + flattened += k; + flattened += "="; + flattened += v; + if (i != size-1) + flattened += ";"; + } + + return flattened; +} + +void CameraParameters::unflatten(const String8 ¶ms) +{ + const char *a = params.string(); + const char *b; + + mMap.clear(); + + for (;;) { + // Find the bounds of the key name. + b = strchr(a, '='); + if (b == 0) + break; + + // Create the key string. + String8 k(a, (size_t)(b-a)); + + // Find the value. + a = b+1; + b = strchr(a, ';'); + if (b == 0) { + // If there's no semicolon, this is the last item. + String8 v(a); + mMap.add(k, v); + break; + } + + String8 v(a, (size_t)(b-a)); + mMap.add(k, v); + a = b+1; + } +} + + +void CameraParameters::set(const char *key, const char *value) +{ + // i think i can do this with strspn() + if (strchr(key, '=') || strchr(key, ';')) { + // ALOGE("Key \"%s\"contains invalid character (= or ;)", key); + return; + } + + if (strchr(value, '=') || strchr(value, ';')) { + // ALOGE("Value \"%s\"contains invalid character (= or ;)", value); + return; + } + + mMap.replaceValueFor(String8(key), String8(value)); +} + +void CameraParameters::set(const char *key, int value) +{ + char str[16]; + sprintf(str, "%d", value); + set(key, str); +} + +void CameraParameters::setFloat(const char *key, float value) +{ + char str[16]; // 14 should be enough. We overestimate to be safe. + snprintf(str, sizeof(str), "%g", value); + set(key, str); +} + +const char *CameraParameters::get(const char *key) const +{ + String8 v = mMap.valueFor(String8(key)); + if (v.length() == 0) + return 0; + return v.string(); +} + +int CameraParameters::getInt(const char *key) const +{ + const char *v = get(key); + if (v == 0) + return -1; + return strtol(v, 0, 0); +} + +float CameraParameters::getFloat(const char *key) const +{ + const char *v = get(key); + if (v == 0) return -1; + return strtof(v, 0); +} + +void CameraParameters::remove(const char *key) +{ + mMap.removeItem(String8(key)); +} + +// Parse string like "640x480" or "10000,20000" +static int parse_pair(const char *str, int *first, int *second, char delim, + char **endptr = NULL) +{ + // Find the first integer. + char *end; + int w = (int)strtol(str, &end, 10); + // If a delimeter does not immediately follow, give up. + if (*end != delim) { + ALOGE("Cannot find delimeter (%c) in str=%s", delim, str); + return -1; + } + + // Find the second integer, immediately after the delimeter. + int h = (int)strtol(end+1, &end, 10); + + *first = w; + *second = h; + + if (endptr) { + *endptr = end; + } + + return 0; +} + +static void parseSizesList(const char *sizesStr, Vector &sizes) +{ + if (sizesStr == 0) { + return; + } + + char *sizeStartPtr = (char *)sizesStr; + + while (true) { + int width, height; + int success = parse_pair(sizeStartPtr, &width, &height, 'x', + &sizeStartPtr); + if (success == -1 || (*sizeStartPtr != ',' && *sizeStartPtr != '\0')) { + ALOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr); + return; + } + sizes.push(Size(width, height)); + + if (*sizeStartPtr == '\0') { + return; + } + sizeStartPtr++; + } +} + +void CameraParameters::setPreviewSize(int width, int height) +{ + char str[32]; + sprintf(str, "%dx%d", width, height); + set(KEY_PREVIEW_SIZE, str); +} + +void CameraParameters::getPreviewSize(int *width, int *height) const +{ + *width = *height = -1; + // Get the current string, if it doesn't exist, leave the -1x-1 + const char *p = get(KEY_PREVIEW_SIZE); + if (p == 0) return; + parse_pair(p, width, height, 'x'); +} + +void CameraParameters::getPreferredPreviewSizeForVideo(int *width, int *height) const +{ + *width = *height = -1; + const char *p = get(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO); + if (p == 0) return; + parse_pair(p, width, height, 'x'); +} + +void CameraParameters::getSupportedPreviewSizes(Vector &sizes) const +{ + const char *previewSizesStr = get(KEY_SUPPORTED_PREVIEW_SIZES); + parseSizesList(previewSizesStr, sizes); +} + +void CameraParameters::setVideoSize(int width, int height) +{ + char str[32]; + sprintf(str, "%dx%d", width, height); + set(KEY_VIDEO_SIZE, str); +} + +void CameraParameters::getVideoSize(int *width, int *height) const +{ + *width = *height = -1; + const char *p = get(KEY_VIDEO_SIZE); + if (p == 0) return; + parse_pair(p, width, height, 'x'); +} + +void CameraParameters::getSupportedVideoSizes(Vector &sizes) const +{ + const char *videoSizesStr = get(KEY_SUPPORTED_VIDEO_SIZES); + parseSizesList(videoSizesStr, sizes); +} + +void CameraParameters::setPreviewFrameRate(int fps) +{ + set(KEY_PREVIEW_FRAME_RATE, fps); +} + +int CameraParameters::getPreviewFrameRate() const +{ + return getInt(KEY_PREVIEW_FRAME_RATE); +} + +void CameraParameters::getPreviewFpsRange(int *min_fps, int *max_fps) const +{ + *min_fps = *max_fps = -1; + const char *p = get(KEY_PREVIEW_FPS_RANGE); + if (p == 0) return; + parse_pair(p, min_fps, max_fps, ','); +} + +void CameraParameters::setPreviewFormat(const char *format) +{ + set(KEY_PREVIEW_FORMAT, format); +} + +const char *CameraParameters::getPreviewFormat() const +{ + return get(KEY_PREVIEW_FORMAT); +} + +void CameraParameters::setPictureSize(int width, int height) +{ + char str[32]; + sprintf(str, "%dx%d", width, height); + set(KEY_PICTURE_SIZE, str); +} + +void CameraParameters::getPictureSize(int *width, int *height) const +{ + *width = *height = -1; + // Get the current string, if it doesn't exist, leave the -1x-1 + const char *p = get(KEY_PICTURE_SIZE); + if (p == 0) return; + parse_pair(p, width, height, 'x'); +} + +void CameraParameters::getSupportedPictureSizes(Vector &sizes) const +{ + const char *pictureSizesStr = get(KEY_SUPPORTED_PICTURE_SIZES); + parseSizesList(pictureSizesStr, sizes); +} + +void CameraParameters::setPictureFormat(const char *format) +{ + set(KEY_PICTURE_FORMAT, format); +} + +const char *CameraParameters::getPictureFormat() const +{ + return get(KEY_PICTURE_FORMAT); +} + +void CameraParameters::dump() const +{ + ALOGD("dump: mMap.size = %zu", mMap.size()); + for (size_t i = 0; i < mMap.size(); i++) { + String8 k, v; + k = mMap.keyAt(i); + v = mMap.valueAt(i); + ALOGD("%s: %s\n", k.string(), v.string()); + } +} + +status_t CameraParameters::dump(int fd, const Vector& /*args*/) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + snprintf(buffer, 255, "CameraParameters::dump: mMap.size = %zu\n", mMap.size()); + result.append(buffer); + for (size_t i = 0; i < mMap.size(); i++) { + String8 k, v; + k = mMap.keyAt(i); + v = mMap.valueAt(i); + snprintf(buffer, 255, "\t%s: %s\n", k.string(), v.string()); + result.append(buffer); + } + write(fd, result.string(), result.size()); + return NO_ERROR; +} + +void CameraParameters::getSupportedPreviewFormats(Vector& formats) const { + const char* supportedPreviewFormats = + get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS); + + if (supportedPreviewFormats == NULL) { + ALOGW("%s: No supported preview formats.", __FUNCTION__); + return; + } + + String8 fmtStr(supportedPreviewFormats); + char* prevFmts = fmtStr.lockBuffer(fmtStr.size()); + + char* savePtr; + char* fmt = strtok_r(prevFmts, ",", &savePtr); + while (fmt) { + int actual = previewFormatToEnum(fmt); + if (actual != -1) { + formats.add(actual); + } + fmt = strtok_r(NULL, ",", &savePtr); + } + fmtStr.unlockBuffer(fmtStr.size()); +} + + +int CameraParameters::previewFormatToEnum(const char* format) { + return + !format ? + HAL_PIXEL_FORMAT_YCrCb_420_SP : + !strcmp(format, PIXEL_FORMAT_YUV422SP) ? + HAL_PIXEL_FORMAT_YCbCr_422_SP : // NV16 + !strcmp(format, PIXEL_FORMAT_YUV420SP) ? + HAL_PIXEL_FORMAT_YCrCb_420_SP : // NV21 + !strcmp(format, PIXEL_FORMAT_YUV422I) ? + HAL_PIXEL_FORMAT_YCbCr_422_I : // YUY2 + !strcmp(format, PIXEL_FORMAT_YUV420P) ? + HAL_PIXEL_FORMAT_YV12 : // YV12 + !strcmp(format, PIXEL_FORMAT_RGB565) ? + HAL_PIXEL_FORMAT_RGB_565 : // RGB565 + !strcmp(format, PIXEL_FORMAT_RGBA8888) ? + HAL_PIXEL_FORMAT_RGBA_8888 : // RGB8888 + !strcmp(format, PIXEL_FORMAT_BAYER_RGGB) ? + HAL_PIXEL_FORMAT_RAW16 : // Raw sensor data + -1; +} + +bool CameraParameters::isEmpty() const { + return mMap.isEmpty(); +} + +}; +}; +}; +}; +}; +}; // namespace android diff --git a/camera/common/1.0/default/Exif.cpp b/camera/common/1.0/default/Exif.cpp new file mode 100644 index 0000000..413b6bb --- /dev/null +++ b/camera/common/1.0/default/Exif.cpp @@ -0,0 +1,1115 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamComm1.0-Exif" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include + +#include +#include +#include +#include +#include + +#include "Exif.h" + +extern "C" { +#include +} + +namespace std { + +template <> +struct default_delete { + inline void operator()(ExifEntry* entry) const { exif_entry_unref(entry); } +}; + +} // namespace std + + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + + +class ExifUtilsImpl : public ExifUtils { + public: + ExifUtilsImpl(); + + virtual ~ExifUtilsImpl(); + + // Initialize() can be called multiple times. The setting of Exif tags will be + // cleared. + virtual bool initialize(); + + // set all known fields from a metadata structure + virtual bool setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, + const size_t imageHeight); + + // sets the len aperture. + // Returns false if memory allocation fails. + virtual bool setAperture(uint32_t numerator, uint32_t denominator); + + // sets the value of brightness. + // Returns false if memory allocation fails. + virtual bool setBrightness(int32_t numerator, int32_t denominator); + + // sets the color space. + // Returns false if memory allocation fails. + virtual bool setColorSpace(uint16_t color_space); + + // sets the information to compressed data. + // Returns false if memory allocation fails. + virtual bool setComponentsConfiguration(const std::string& components_configuration); + + // sets the compression scheme used for the image data. + // Returns false if memory allocation fails. + virtual bool setCompression(uint16_t compression); + + // sets image contrast. + // Returns false if memory allocation fails. + virtual bool setContrast(uint16_t contrast); + + // sets the date and time of image last modified. It takes local time. The + // name of the tag is DateTime in IFD0. + // Returns false if memory allocation fails. + virtual bool setDateTime(const struct tm& t); + + // sets the image description. + // Returns false if memory allocation fails. + virtual bool setDescription(const std::string& description); + + // sets the digital zoom ratio. If the numerator is 0, it means digital zoom + // was not used. + // Returns false if memory allocation fails. + virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator); + + // sets the exposure bias. + // Returns false if memory allocation fails. + virtual bool setExposureBias(int32_t numerator, int32_t denominator); + + // sets the exposure mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setExposureMode(uint16_t exposure_mode); + + // sets the program used by the camera to set exposure when the picture is + // taken. + // Returns false if memory allocation fails. + virtual bool setExposureProgram(uint16_t exposure_program); + + // sets the exposure time, given in seconds. + // Returns false if memory allocation fails. + virtual bool setExposureTime(uint32_t numerator, uint32_t denominator); + + // sets the status of flash. + // Returns false if memory allocation fails. + virtual bool setFlash(uint16_t flash); + + // sets the F number. + // Returns false if memory allocation fails. + virtual bool setFNumber(uint32_t numerator, uint32_t denominator); + + // sets the focal length of lens used to take the image in millimeters. + // Returns false if memory allocation fails. + virtual bool setFocalLength(uint32_t numerator, uint32_t denominator); + + // sets the degree of overall image gain adjustment. + // Returns false if memory allocation fails. + virtual bool setGainControl(uint16_t gain_control); + + // sets the altitude in meters. + // Returns false if memory allocation fails. + virtual bool setGpsAltitude(double altitude); + + // sets the latitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLatitude(double latitude); + + // sets the longitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLongitude(double longitude); + + // sets GPS processing method. + // Returns false if memory allocation fails. + virtual bool setGpsProcessingMethod(const std::string& method); + + // sets GPS date stamp and time stamp (atomic clock). It takes UTC time. + // Returns false if memory allocation fails. + virtual bool setGpsTimestamp(const struct tm& t); + + // sets the length (number of rows) of main image. + // Returns false if memory allocation fails. + virtual bool setImageHeight(uint32_t length); + + // sets the width (number of columes) of main image. + // Returns false if memory allocation fails. + virtual bool setImageWidth(uint32_t width); + + // sets the ISO speed. + // Returns false if memory allocation fails. + virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings); + + // sets the kind of light source. + // Returns false if memory allocation fails. + virtual bool setLightSource(uint16_t light_source); + + // sets the smallest F number of the lens. + // Returns false if memory allocation fails. + virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator); + + // sets the metering mode. + // Returns false if memory allocation fails. + virtual bool setMeteringMode(uint16_t metering_mode); + + // sets image orientation. + // Returns false if memory allocation fails. + virtual bool setOrientation(uint16_t orientation); + + // sets the unit for measuring XResolution and YResolution. + // Returns false if memory allocation fails. + virtual bool setResolutionUnit(uint16_t resolution_unit); + + // sets image saturation. + // Returns false if memory allocation fails. + virtual bool setSaturation(uint16_t saturation); + + // sets the type of scene that was shot. + // Returns false if memory allocation fails. + virtual bool setSceneCaptureType(uint16_t type); + + // sets image sharpness. + // Returns false if memory allocation fails. + virtual bool setSharpness(uint16_t sharpness); + + // sets the shutter speed. + // Returns false if memory allocation fails. + virtual bool setShutterSpeed(int32_t numerator, int32_t denominator); + + // sets the distance to the subject, given in meters. + // Returns false if memory allocation fails. + virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator); + + // sets the fractions of seconds for the tag. + // Returns false if memory allocation fails. + virtual bool setSubsecTime(const std::string& subsec_time); + + // sets the white balance mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setWhiteBalance(uint16_t white_balance); + + // sets the number of pixels per resolution unit in the image width. + // Returns false if memory allocation fails. + virtual bool setXResolution(uint32_t numerator, uint32_t denominator); + + // sets the position of chrominance components in relation to the luminance + // component. + // Returns false if memory allocation fails. + virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning); + + // sets the number of pixels per resolution unit in the image length. + // Returns false if memory allocation fails. + virtual bool setYResolution(uint32_t numerator, uint32_t denominator); + + // sets the manufacturer of camera. + // Returns false if memory allocation fails. + virtual bool setMake(const std::string& make); + + // sets the model number of camera. + // Returns false if memory allocation fails. + virtual bool setModel(const std::string& model); + + // Generates APP1 segment. + // Returns false if generating APP1 segment fails. + virtual bool generateApp1(const void* thumbnail_buffer, uint32_t size); + + // Gets buffer of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual const uint8_t* getApp1Buffer(); + + // Gets length of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual unsigned int getApp1Length(); + + protected: + // sets the version of this standard supported. + // Returns false if memory allocation fails. + virtual bool setExifVersion(const std::string& exif_version); + + + // Resets the pointers and memories. + virtual void reset(); + + // Adds a variable length tag to |exif_data_|. It will remove the original one + // if the tag exists. + // Returns the entry of the tag. The reference count of returned ExifEntry is + // two. + virtual std::unique_ptr addVariableLengthEntry(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + uint64_t components, + unsigned int size); + + // Adds a entry of |tag| in |exif_data_|. It won't remove the original one if + // the tag exists. + // Returns the entry of the tag. It adds one reference count to returned + // ExifEntry. + virtual std::unique_ptr addEntry(ExifIfd ifd, ExifTag tag); + + // Helpe functions to add exif data with different types. + virtual bool setShort(ExifIfd ifd, + ExifTag tag, + uint16_t value, + const std::string& msg); + + virtual bool setLong(ExifIfd ifd, + ExifTag tag, + uint32_t value, + const std::string& msg); + + virtual bool setRational(ExifIfd ifd, + ExifTag tag, + uint32_t numerator, + uint32_t denominator, + const std::string& msg); + + virtual bool setSRational(ExifIfd ifd, + ExifTag tag, + int32_t numerator, + int32_t denominator, + const std::string& msg); + + virtual bool setString(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + const std::string& buffer, + const std::string& msg); + + // Destroys the buffer of APP1 segment if exists. + virtual void destroyApp1(); + + // The Exif data (APP1). Owned by this class. + ExifData* exif_data_; + // The raw data of APP1 segment. It's allocated by ExifMem in |exif_data_| but + // owned by this class. + uint8_t* app1_buffer_; + // The length of |app1_buffer_|. + unsigned int app1_length_; + +}; + +#define SET_SHORT(ifd, tag, value) \ + do { \ + if (setShort(ifd, tag, value, #tag) == false) \ + return false; \ + } while (0); + +#define SET_LONG(ifd, tag, value) \ + do { \ + if (setLong(ifd, tag, value, #tag) == false) \ + return false; \ + } while (0); + +#define SET_RATIONAL(ifd, tag, numerator, denominator) \ + do { \ + if (setRational(ifd, tag, numerator, denominator, #tag) == false) \ + return false; \ + } while (0); + +#define SET_SRATIONAL(ifd, tag, numerator, denominator) \ + do { \ + if (setSRational(ifd, tag, numerator, denominator, #tag) == false) \ + return false; \ + } while (0); + +#define SET_STRING(ifd, tag, format, buffer) \ + do { \ + if (setString(ifd, tag, format, buffer, #tag) == false) \ + return false; \ + } while (0); + +// This comes from the Exif Version 2.2 standard table 6. +const char gExifAsciiPrefix[] = {0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0}; + +static void setLatitudeOrLongitudeData(unsigned char* data, double num) { + // Take the integer part of |num|. + ExifLong degrees = static_cast(num); + ExifLong minutes = static_cast(60 * (num - degrees)); + ExifLong microseconds = + static_cast(3600000000u * (num - degrees - minutes / 60.0)); + exif_set_rational(data, EXIF_BYTE_ORDER_INTEL, {degrees, 1}); + exif_set_rational(data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {minutes, 1}); + exif_set_rational(data + 2 * sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {microseconds, 1000000}); +} + +ExifUtils *ExifUtils::create() { + return new ExifUtilsImpl(); +} + +ExifUtils::~ExifUtils() { +} + +ExifUtilsImpl::ExifUtilsImpl() + : exif_data_(nullptr), app1_buffer_(nullptr), app1_length_(0) {} + +ExifUtilsImpl::~ExifUtilsImpl() { + reset(); +} + + +bool ExifUtilsImpl::initialize() { + reset(); + exif_data_ = exif_data_new(); + if (exif_data_ == nullptr) { + ALOGE("%s: allocate memory for exif_data_ failed", __FUNCTION__); + return false; + } + // set the image options. + exif_data_set_option(exif_data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION); + exif_data_set_data_type(exif_data_, EXIF_DATA_TYPE_COMPRESSED); + exif_data_set_byte_order(exif_data_, EXIF_BYTE_ORDER_INTEL); + + // set exif version to 2.2. + if (!setExifVersion("0220")) { + return false; + } + + return true; +} + +bool ExifUtilsImpl::setAperture(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setBrightness(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_BRIGHTNESS_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setColorSpace(uint16_t color_space) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_COLOR_SPACE, color_space); + return true; +} + +bool ExifUtilsImpl::setComponentsConfiguration( + const std::string& components_configuration) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_COMPONENTS_CONFIGURATION, + EXIF_FORMAT_UNDEFINED, components_configuration); + return true; +} + +bool ExifUtilsImpl::setCompression(uint16_t compression) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression); + return true; +} + +bool ExifUtilsImpl::setContrast(uint16_t contrast) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_CONTRAST, contrast); + return true; +} + +bool ExifUtilsImpl::setDateTime(const struct tm& t) { + // The length is 20 bytes including NULL for termination in Exif standard. + char str[20]; + int result = snprintf(str, sizeof(str), "%04i:%02i:%02i %02i:%02i:%02i", + t.tm_year + 1900, t.tm_mon + 1, t.tm_mday, t.tm_hour, + t.tm_min, t.tm_sec); + if (result != sizeof(str) - 1) { + ALOGW("%s: Input time is invalid", __FUNCTION__); + return false; + } + std::string buffer(str); + SET_STRING(EXIF_IFD_0, EXIF_TAG_DATE_TIME, EXIF_FORMAT_ASCII, buffer); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_ORIGINAL, EXIF_FORMAT_ASCII, + buffer); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_DIGITIZED, EXIF_FORMAT_ASCII, + buffer); + return true; +} + +bool ExifUtilsImpl::setDescription(const std::string& description) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_IMAGE_DESCRIPTION, EXIF_FORMAT_ASCII, + description); + return true; +} + +bool ExifUtilsImpl::setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_DIGITAL_ZOOM_RATIO, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setExposureBias(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_BIAS_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setExposureMode(uint16_t exposure_mode) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_MODE, exposure_mode); + return true; +} + +bool ExifUtilsImpl::setExposureProgram(uint16_t exposure_program) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_PROGRAM, exposure_program); + return true; +} + +bool ExifUtilsImpl::setExposureTime(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setFlash(uint16_t flash) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_FLASH, flash); + return true; +} + +bool ExifUtilsImpl::setFNumber(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setFocalLength(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setGainControl(uint16_t gain_control) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_GAIN_CONTROL, gain_control); + return true; +} + +bool ExifUtilsImpl::setGpsAltitude(double altitude) { + ExifTag refTag = static_cast(EXIF_TAG_GPS_ALTITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_BYTE, 1, 1); + if (!refEntry) { + ALOGE("%s: Adding GPSAltitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (altitude >= 0) { + *refEntry->data = 0; + } else { + *refEntry->data = 1; + altitude *= -1; + } + + ExifTag tag = static_cast(EXIF_TAG_GPS_ALTITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 1, sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSAltitude exif entry failed", __FUNCTION__); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {static_cast(altitude * 1000), 1000}); + + return true; +} + +bool ExifUtilsImpl::setGpsLatitude(double latitude) { + const ExifTag refTag = static_cast(EXIF_TAG_GPS_LATITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2); + if (!refEntry) { + ALOGE("%s: Adding GPSLatitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (latitude >= 0) { + memcpy(refEntry->data, "N", sizeof("N")); + } else { + memcpy(refEntry->data, "S", sizeof("S")); + latitude *= -1; + } + + const ExifTag tag = static_cast(EXIF_TAG_GPS_LATITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSLatitude exif entry failed", __FUNCTION__); + return false; + } + setLatitudeOrLongitudeData(entry->data, latitude); + + return true; +} + +bool ExifUtilsImpl::setGpsLongitude(double longitude) { + ExifTag refTag = static_cast(EXIF_TAG_GPS_LONGITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2); + if (!refEntry) { + ALOGE("%s: Adding GPSLongitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (longitude >= 0) { + memcpy(refEntry->data, "E", sizeof("E")); + } else { + memcpy(refEntry->data, "W", sizeof("W")); + longitude *= -1; + } + + ExifTag tag = static_cast(EXIF_TAG_GPS_LONGITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSLongitude exif entry failed", __FUNCTION__); + return false; + } + setLatitudeOrLongitudeData(entry->data, longitude); + + return true; +} + +bool ExifUtilsImpl::setGpsProcessingMethod(const std::string& method) { + std::string buffer = + std::string(gExifAsciiPrefix, sizeof(gExifAsciiPrefix)) + method; + SET_STRING(EXIF_IFD_GPS, static_cast(EXIF_TAG_GPS_PROCESSING_METHOD), + EXIF_FORMAT_UNDEFINED, buffer); + return true; +} + +bool ExifUtilsImpl::setGpsTimestamp(const struct tm& t) { + const ExifTag dateTag = static_cast(EXIF_TAG_GPS_DATE_STAMP); + const size_t kGpsDateStampSize = 11; + std::unique_ptr entry = + addVariableLengthEntry(EXIF_IFD_GPS, dateTag, EXIF_FORMAT_ASCII, + kGpsDateStampSize, kGpsDateStampSize); + if (!entry) { + ALOGE("%s: Adding GPSDateStamp exif entry failed", __FUNCTION__); + return false; + } + int result = + snprintf(reinterpret_cast(entry->data), kGpsDateStampSize, + "%04i:%02i:%02i", t.tm_year + 1900, t.tm_mon + 1, t.tm_mday); + if (result != kGpsDateStampSize - 1) { + ALOGW("%s: Input time is invalid", __FUNCTION__); + return false; + } + + const ExifTag timeTag = static_cast(EXIF_TAG_GPS_TIME_STAMP); + entry = addVariableLengthEntry(EXIF_IFD_GPS, timeTag, EXIF_FORMAT_RATIONAL, 3, + 3 * sizeof(ExifRational)); + if (!entry) { + ALOGE("%s: Adding GPSTimeStamp exif entry failed", __FUNCTION__); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_hour), 1}); + exif_set_rational(entry->data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_min), 1}); + exif_set_rational(entry->data + 2 * sizeof(ExifRational), + EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_sec), 1}); + + return true; +} + +bool ExifUtilsImpl::setImageHeight(uint32_t length) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_IMAGE_LENGTH, length); + SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_Y_DIMENSION, length); + return true; +} + +bool ExifUtilsImpl::setImageWidth(uint32_t width) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_IMAGE_WIDTH, width); + SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_X_DIMENSION, width); + return true; +} + +bool ExifUtilsImpl::setIsoSpeedRating(uint16_t iso_speed_ratings) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_ISO_SPEED_RATINGS, iso_speed_ratings); + return true; +} + +bool ExifUtilsImpl::setLightSource(uint16_t light_source) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_LIGHT_SOURCE, light_source); + return true; +} + +bool ExifUtilsImpl::setMaxAperture(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_MAX_APERTURE_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setMeteringMode(uint16_t metering_mode) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_METERING_MODE, metering_mode); + return true; +} + +bool ExifUtilsImpl::setOrientation(uint16_t orientation) { + /* + * Orientation value: + * 1 2 3 4 5 6 7 8 + * + * 888888 888888 88 88 8888888888 88 88 8888888888 + * 88 88 88 88 88 88 88 88 88 88 88 88 + * 8888 8888 8888 8888 88 8888888888 8888888888 88 + * 88 88 88 88 + * 88 88 888888 888888 + */ + int value = 1; + switch (orientation) { + case 90: + value = 6; + break; + case 180: + value = 3; + break; + case 270: + value = 8; + break; + default: + break; + } + SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value); + return true; +} + +bool ExifUtilsImpl::setResolutionUnit(uint16_t resolution_unit) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_RESOLUTION_UNIT, resolution_unit); + return true; +} + +bool ExifUtilsImpl::setSaturation(uint16_t saturation) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SATURATION, saturation); + return true; +} + +bool ExifUtilsImpl::setSceneCaptureType(uint16_t type) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SCENE_CAPTURE_TYPE, type); + return true; +} + +bool ExifUtilsImpl::setSharpness(uint16_t sharpness) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SHARPNESS, sharpness); + return true; +} + +bool ExifUtilsImpl::setShutterSpeed(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SHUTTER_SPEED_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setSubjectDistance(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SUBJECT_DISTANCE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setSubsecTime(const std::string& subsec_time) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME, EXIF_FORMAT_ASCII, + subsec_time); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_ORIGINAL, EXIF_FORMAT_ASCII, + subsec_time); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_DIGITIZED, EXIF_FORMAT_ASCII, + subsec_time); + return true; +} + +bool ExifUtilsImpl::setWhiteBalance(uint16_t white_balance) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, white_balance); + return true; +} + +bool ExifUtilsImpl::setXResolution(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_X_RESOLUTION, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setYCbCrPositioning(uint16_t ycbcr_positioning) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_YCBCR_POSITIONING, ycbcr_positioning); + return true; +} + +bool ExifUtilsImpl::setYResolution(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_Y_RESOLUTION, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::generateApp1(const void* thumbnail_buffer, uint32_t size) { + destroyApp1(); + exif_data_->data = const_cast(static_cast(thumbnail_buffer)); + exif_data_->size = size; + // Save the result into |app1_buffer_|. + exif_data_save_data(exif_data_, &app1_buffer_, &app1_length_); + if (!app1_length_) { + ALOGE("%s: Allocate memory for app1_buffer_ failed", __FUNCTION__); + return false; + } + /* + * The JPEG segment size is 16 bits in spec. The size of APP1 segment should + * be smaller than 65533 because there are two bytes for segment size field. + */ + if (app1_length_ > 65533) { + destroyApp1(); + ALOGE("%s: The size of APP1 segment is too large", __FUNCTION__); + return false; + } + return true; +} + +const uint8_t* ExifUtilsImpl::getApp1Buffer() { + return app1_buffer_; +} + +unsigned int ExifUtilsImpl::getApp1Length() { + return app1_length_; +} + +bool ExifUtilsImpl::setExifVersion(const std::string& exif_version) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_EXIF_VERSION, EXIF_FORMAT_UNDEFINED, exif_version); + return true; +} + +bool ExifUtilsImpl::setMake(const std::string& make) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_MAKE, EXIF_FORMAT_ASCII, make); + return true; +} + +bool ExifUtilsImpl::setModel(const std::string& model) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_MODEL, EXIF_FORMAT_ASCII, model); + return true; +} + +void ExifUtilsImpl::reset() { + destroyApp1(); + if (exif_data_) { + /* + * Since we decided to ignore the original APP1, we are sure that there is + * no thumbnail allocated by libexif. |exif_data_->data| is actually + * allocated by JpegCompressor. sets |exif_data_->data| to nullptr to + * prevent exif_data_unref() destroy it incorrectly. + */ + exif_data_->data = nullptr; + exif_data_->size = 0; + exif_data_unref(exif_data_); + exif_data_ = nullptr; + } +} + +std::unique_ptr ExifUtilsImpl::addVariableLengthEntry(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + uint64_t components, + unsigned int size) { + // Remove old entry if exists. + exif_content_remove_entry(exif_data_->ifd[ifd], + exif_content_get_entry(exif_data_->ifd[ifd], tag)); + ExifMem* mem = exif_mem_new_default(); + if (!mem) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + return nullptr; + } + std::unique_ptr entry(exif_entry_new_mem(mem)); + if (!entry) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + exif_mem_unref(mem); + return nullptr; + } + void* tmpBuffer = exif_mem_alloc(mem, size); + if (!tmpBuffer) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + exif_mem_unref(mem); + return nullptr; + } + + entry->data = static_cast(tmpBuffer); + entry->tag = tag; + entry->format = format; + entry->components = components; + entry->size = size; + + exif_content_add_entry(exif_data_->ifd[ifd], entry.get()); + exif_mem_unref(mem); + + return entry; +} + +std::unique_ptr ExifUtilsImpl::addEntry(ExifIfd ifd, ExifTag tag) { + std::unique_ptr entry(exif_content_get_entry(exif_data_->ifd[ifd], tag)); + if (entry) { + // exif_content_get_entry() won't ref the entry, so we ref here. + exif_entry_ref(entry.get()); + return entry; + } + entry.reset(exif_entry_new()); + if (!entry) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + return nullptr; + } + entry->tag = tag; + exif_content_add_entry(exif_data_->ifd[ifd], entry.get()); + exif_entry_initialize(entry.get(), tag); + return entry; +} + +bool ExifUtilsImpl::setShort(ExifIfd ifd, + ExifTag tag, + uint16_t value, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_short(entry->data, EXIF_BYTE_ORDER_INTEL, value); + return true; +} + +bool ExifUtilsImpl::setLong(ExifIfd ifd, + ExifTag tag, + uint32_t value, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_long(entry->data, EXIF_BYTE_ORDER_INTEL, value); + return true; +} + +bool ExifUtilsImpl::setRational(ExifIfd ifd, + ExifTag tag, + uint32_t numerator, + uint32_t denominator, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {numerator, denominator}); + return true; +} + +bool ExifUtilsImpl::setSRational(ExifIfd ifd, + ExifTag tag, + int32_t numerator, + int32_t denominator, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_srational(entry->data, EXIF_BYTE_ORDER_INTEL, + {numerator, denominator}); + return true; +} + +bool ExifUtilsImpl::setString(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + const std::string& buffer, + const std::string& msg) { + size_t entry_size = buffer.length(); + // Since the exif format is undefined, NULL termination is not necessary. + if (format == EXIF_FORMAT_ASCII) { + entry_size++; + } + std::unique_ptr entry = + addVariableLengthEntry(ifd, tag, format, entry_size, entry_size); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + memcpy(entry->data, buffer.c_str(), entry_size); + return true; +} + +void ExifUtilsImpl::destroyApp1() { + /* + * Since there is no API to access ExifMem in ExifData->priv, we use free + * here, which is the default free function in libexif. See + * exif_data_save_data() for detail. + */ + free(app1_buffer_); + app1_buffer_ = nullptr; + app1_length_ = 0; +} + +bool ExifUtilsImpl::setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, + const size_t imageHeight) { + // How precise the float-to-rational conversion for EXIF tags would be. + constexpr int kRationalPrecision = 10000; + if (!setImageWidth(imageWidth) || + !setImageHeight(imageHeight)) { + ALOGE("%s: setting image resolution failed.", __FUNCTION__); + return false; + } + + struct timespec tp; + struct tm time_info; + bool time_available = clock_gettime(CLOCK_REALTIME, &tp) != -1; + localtime_r(&tp.tv_sec, &time_info); + if (!setDateTime(time_info)) { + ALOGE("%s: setting data time failed.", __FUNCTION__); + return false; + } + + float focal_length; + camera_metadata_ro_entry entry = metadata.find(ANDROID_LENS_FOCAL_LENGTH); + if (entry.count) { + focal_length = entry.data.f[0]; + + if (!setFocalLength( + static_cast(focal_length * kRationalPrecision), + kRationalPrecision)) { + ALOGE("%s: setting focal length failed.", __FUNCTION__); + return false; + } + } else { + ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__); + } + + if (metadata.exists(ANDROID_JPEG_GPS_COORDINATES)) { + entry = metadata.find(ANDROID_JPEG_GPS_COORDINATES); + if (entry.count < 3) { + ALOGE("%s: Gps coordinates in metadata is not complete.", __FUNCTION__); + return false; + } + if (!setGpsLatitude(entry.data.d[0])) { + ALOGE("%s: setting gps latitude failed.", __FUNCTION__); + return false; + } + if (!setGpsLongitude(entry.data.d[1])) { + ALOGE("%s: setting gps longitude failed.", __FUNCTION__); + return false; + } + if (!setGpsAltitude(entry.data.d[2])) { + ALOGE("%s: setting gps altitude failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { + entry = metadata.find(ANDROID_JPEG_GPS_PROCESSING_METHOD); + std::string method_str(reinterpret_cast(entry.data.u8)); + if (!setGpsProcessingMethod(method_str)) { + ALOGE("%s: setting gps processing method failed.", __FUNCTION__); + return false; + } + } + + if (time_available && metadata.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { + entry = metadata.find(ANDROID_JPEG_GPS_TIMESTAMP); + time_t timestamp = static_cast(entry.data.i64[0]); + if (gmtime_r(×tamp, &time_info)) { + if (!setGpsTimestamp(time_info)) { + ALOGE("%s: setting gps timestamp failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Time tranformation failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_JPEG_ORIENTATION)) { + entry = metadata.find(ANDROID_JPEG_ORIENTATION); + if (!setOrientation(entry.data.i32[0])) { + ALOGE("%s: setting orientation failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { + entry = metadata.find(ANDROID_SENSOR_EXPOSURE_TIME); + // int64_t of nanoseconds + if (!setExposureTime(entry.data.i64[0],1000000000u)) { + ALOGE("%s: setting exposure time failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_LENS_APERTURE)) { + const int kAperturePrecision = 10000; + entry = metadata.find(ANDROID_LENS_APERTURE); + if (!setFNumber(entry.data.f[0] * kAperturePrecision, + kAperturePrecision)) { + ALOGE("%s: setting F number failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_FLASH_INFO_AVAILABLE)) { + entry = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_FALSE) { + const uint32_t kNoFlashFunction = 0x20; + if (!setFlash(kNoFlashFunction)) { + ALOGE("%s: setting flash failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Unsupported flash info: %d",__FUNCTION__, entry.data.u8[0]); + return false; + } + } + + if (metadata.exists(ANDROID_CONTROL_AWB_MODE)) { + entry = metadata.find(ANDROID_CONTROL_AWB_MODE); + if (entry.data.u8[0] == ANDROID_CONTROL_AWB_MODE_AUTO) { + const uint16_t kAutoWhiteBalance = 0; + if (!setWhiteBalance(kAutoWhiteBalance)) { + ALOGE("%s: setting white balance failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Unsupported awb mode: %d", __FUNCTION__, entry.data.u8[0]); + return false; + } + } + + if (time_available) { + char str[4]; + if (snprintf(str, sizeof(str), "%03ld", tp.tv_nsec / 1000000) < 0) { + ALOGE("%s: Subsec is invalid: %ld", __FUNCTION__, tp.tv_nsec); + return false; + } + if (!setSubsecTime(std::string(str))) { + ALOGE("%s: setting subsec time failed.", __FUNCTION__); + return false; + } + } + + return true; +} + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/common/1.0/default/HandleImporter.cpp b/camera/common/1.0/default/HandleImporter.cpp new file mode 100644 index 0000000..7fcf523 --- /dev/null +++ b/camera/common/1.0/default/HandleImporter.cpp @@ -0,0 +1,457 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "HandleImporter" +#include "HandleImporter.h" + +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +using aidl::android::hardware::graphics::common::PlaneLayout; +using aidl::android::hardware::graphics::common::PlaneLayoutComponent; +using aidl::android::hardware::graphics::common::PlaneLayoutComponentType; +using MapperErrorV2 = android::hardware::graphics::mapper::V2_0::Error; +using MapperErrorV3 = android::hardware::graphics::mapper::V3_0::Error; +using MapperErrorV4 = android::hardware::graphics::mapper::V4_0::Error; +using IMapperV3 = android::hardware::graphics::mapper::V3_0::IMapper; +using IMapperV4 = android::hardware::graphics::mapper::V4_0::IMapper; + +HandleImporter::HandleImporter() : mInitialized(false) {} + +void HandleImporter::initializeLocked() { + if (mInitialized) { + return; + } + + mMapperV4 = IMapperV4::getService(); + if (mMapperV4 != nullptr) { + mInitialized = true; + return; + } + + mMapperV3 = IMapperV3::getService(); + if (mMapperV3 != nullptr) { + mInitialized = true; + return; + } + + mMapperV2 = IMapper::getService(); + if (mMapperV2 == nullptr) { + ALOGE("%s: cannnot acccess graphics mapper HAL!", __FUNCTION__); + return; + } + + mInitialized = true; + return; +} + +void HandleImporter::cleanup() { + mMapperV4.clear(); + mMapperV3.clear(); + mMapperV2.clear(); + mInitialized = false; +} + +template +bool HandleImporter::importBufferInternal(const sp mapper, buffer_handle_t& handle) { + E error; + buffer_handle_t importedHandle; + auto ret = mapper->importBuffer( + hidl_handle(handle), + [&](const auto& tmpError, const auto& tmpBufferHandle) { + error = tmpError; + importedHandle = static_cast(tmpBufferHandle); + }); + + if (!ret.isOk()) { + ALOGE("%s: mapper importBuffer failed: %s", + __FUNCTION__, ret.description().c_str()); + return false; + } + + if (error != E::NONE) { + return false; + } + + handle = importedHandle; + return true; +} + +template +YCbCrLayout HandleImporter::lockYCbCrInternal(const sp mapper, buffer_handle_t& buf, + uint64_t cpuUsage, const IMapper::Rect& accessRegion) { + hidl_handle acquireFenceHandle; + auto buffer = const_cast(buf); + YCbCrLayout layout = {}; + + typename M::Rect accessRegionCopy = {accessRegion.left, accessRegion.top, + accessRegion.width, accessRegion.height}; + mapper->lockYCbCr(buffer, cpuUsage, accessRegionCopy, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpLayout) { + if (tmpError == E::NONE) { + // Member by member copy from different versions of YCbCrLayout. + layout.y = tmpLayout.y; + layout.cb = tmpLayout.cb; + layout.cr = tmpLayout.cr; + layout.yStride = tmpLayout.yStride; + layout.cStride = tmpLayout.cStride; + layout.chromaStep = tmpLayout.chromaStep; + } else { + ALOGE("%s: failed to lockYCbCr error %d!", __FUNCTION__, tmpError); + } + }); + return layout; +} + +std::vector getPlaneLayouts(const sp mapper, buffer_handle_t& buf) { + auto buffer = const_cast(buf); + std::vector planeLayouts; + hidl_vec encodedPlaneLayouts; + mapper->get(buffer, gralloc4::MetadataType_PlaneLayouts, + [&](const auto& tmpError, const auto& tmpEncodedPlaneLayouts) { + if (tmpError == MapperErrorV4::NONE) { + encodedPlaneLayouts = tmpEncodedPlaneLayouts; + } else { + ALOGE("%s: failed to get plane layouts %d!", __FUNCTION__, tmpError); + } + }); + + gralloc4::decodePlaneLayouts(encodedPlaneLayouts, &planeLayouts); + + return planeLayouts; +} + +template <> +YCbCrLayout HandleImporter::lockYCbCrInternal( + const sp mapper, buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion) { + hidl_handle acquireFenceHandle; + auto buffer = const_cast(buf); + YCbCrLayout layout = {}; + void* mapped = nullptr; + + typename IMapperV4::Rect accessRegionV4 = {accessRegion.left, accessRegion.top, + accessRegion.width, accessRegion.height}; + mapper->lock(buffer, cpuUsage, accessRegionV4, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpPtr) { + if (tmpError == MapperErrorV4::NONE) { + mapped = tmpPtr; + } else { + ALOGE("%s: failed to lock error %d!", __FUNCTION__, tmpError); + } + }); + + if (mapped == nullptr) { + return layout; + } + + std::vector planeLayouts = getPlaneLayouts(mapper, buf); + for (const auto& planeLayout : planeLayouts) { + for (const auto& planeLayoutComponent : planeLayout.components) { + const auto& type = planeLayoutComponent.type; + + if (!gralloc4::isStandardPlaneLayoutComponentType(type)) { + continue; + } + + uint8_t* data = reinterpret_cast(mapped); + data += planeLayout.offsetInBytes; + data += planeLayoutComponent.offsetInBits / 8; + + switch (static_cast(type.value)) { + case PlaneLayoutComponentType::Y: + layout.y = data; + layout.yStride = planeLayout.strideInBytes; + break; + case PlaneLayoutComponentType::CB: + layout.cb = data; + layout.cStride = planeLayout.strideInBytes; + layout.chromaStep = planeLayout.sampleIncrementInBits / 8; + break; + case PlaneLayoutComponentType::CR: + layout.cr = data; + layout.cStride = planeLayout.strideInBytes; + layout.chromaStep = planeLayout.sampleIncrementInBits / 8; + break; + default: + break; + } + } + } + + return layout; +} + +template +int HandleImporter::unlockInternal(const sp mapper, buffer_handle_t& buf) { + int releaseFence = -1; + auto buffer = const_cast(buf); + + mapper->unlock( + buffer, [&](const auto& tmpError, const auto& tmpReleaseFence) { + if (tmpError == E::NONE) { + auto fenceHandle = tmpReleaseFence.getNativeHandle(); + if (fenceHandle) { + if (fenceHandle->numInts != 0 || fenceHandle->numFds != 1) { + ALOGE("%s: bad release fence numInts %d numFds %d", + __FUNCTION__, fenceHandle->numInts, fenceHandle->numFds); + return; + } + releaseFence = dup(fenceHandle->data[0]); + if (releaseFence < 0) { + ALOGE("%s: bad release fence FD %d", + __FUNCTION__, releaseFence); + } + } + } else { + ALOGE("%s: failed to unlock error %d!", __FUNCTION__, tmpError); + } + }); + return releaseFence; +} + +// In IComposer, any buffer_handle_t is owned by the caller and we need to +// make a clone for hwcomposer2. We also need to translate empty handle +// to nullptr. This function does that, in-place. +bool HandleImporter::importBuffer(buffer_handle_t& handle) { + if (!handle->numFds && !handle->numInts) { + handle = nullptr; + return true; + } + + Mutex::Autolock lock(mLock); + if (!mInitialized) { + initializeLocked(); + } + + if (mMapperV4 != nullptr) { + return importBufferInternal(mMapperV4, handle); + } + + if (mMapperV3 != nullptr) { + return importBufferInternal(mMapperV3, handle); + } + + if (mMapperV2 != nullptr) { + return importBufferInternal(mMapperV2, handle); + } + + ALOGE("%s: mMapperV4, mMapperV3 and mMapperV2 are all null!", __FUNCTION__); + return false; +} + +void HandleImporter::freeBuffer(buffer_handle_t handle) { + if (!handle) { + return; + } + + Mutex::Autolock lock(mLock); + if (!mInitialized) { + initializeLocked(); + } + + if (mMapperV4 != nullptr) { + auto ret = mMapperV4->freeBuffer(const_cast(handle)); + if (!ret.isOk()) { + ALOGE("%s: mapper freeBuffer failed: %s", __FUNCTION__, ret.description().c_str()); + } + } else if (mMapperV3 != nullptr) { + auto ret = mMapperV3->freeBuffer(const_cast(handle)); + if (!ret.isOk()) { + ALOGE("%s: mapper freeBuffer failed: %s", + __FUNCTION__, ret.description().c_str()); + } + } else { + auto ret = mMapperV2->freeBuffer(const_cast(handle)); + if (!ret.isOk()) { + ALOGE("%s: mapper freeBuffer failed: %s", + __FUNCTION__, ret.description().c_str()); + } + } +} + +bool HandleImporter::importFence(const native_handle_t* handle, int& fd) const { + if (handle == nullptr || handle->numFds == 0) { + fd = -1; + } else if (handle->numFds == 1) { + fd = dup(handle->data[0]); + if (fd < 0) { + ALOGE("failed to dup fence fd %d", handle->data[0]); + return false; + } + } else { + ALOGE("invalid fence handle with %d file descriptors", + handle->numFds); + return false; + } + + return true; +} + +void HandleImporter::closeFence(int fd) const { + if (fd >= 0) { + close(fd); + } +} + +void* HandleImporter::lock( + buffer_handle_t& buf, uint64_t cpuUsage, size_t size) { + IMapper::Rect accessRegion{0, 0, static_cast(size), 1}; + return lock(buf, cpuUsage, accessRegion); +} + +void* HandleImporter::lock(buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion) { + Mutex::Autolock lock(mLock); + + if (!mInitialized) { + initializeLocked(); + } + + void* ret = nullptr; + + if (mMapperV4 == nullptr && mMapperV3 == nullptr && mMapperV2 == nullptr) { + ALOGE("%s: mMapperV4, mMapperV3 and mMapperV2 are all null!", __FUNCTION__); + return ret; + } + + hidl_handle acquireFenceHandle; + auto buffer = const_cast(buf); + if (mMapperV4 != nullptr) { + IMapperV4::Rect accessRegionV4{accessRegion.left, accessRegion.top, accessRegion.width, + accessRegion.height}; + + mMapperV4->lock(buffer, cpuUsage, accessRegionV4, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpPtr) { + if (tmpError == MapperErrorV4::NONE) { + ret = tmpPtr; + } else { + ALOGE("%s: failed to lock error %d!", __FUNCTION__, tmpError); + } + }); + } else if (mMapperV3 != nullptr) { + IMapperV3::Rect accessRegionV3{accessRegion.left, accessRegion.top, accessRegion.width, + accessRegion.height}; + + mMapperV3->lock(buffer, cpuUsage, accessRegionV3, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpPtr, const auto& /*bytesPerPixel*/, + const auto& /*bytesPerStride*/) { + if (tmpError == MapperErrorV3::NONE) { + ret = tmpPtr; + } else { + ALOGE("%s: failed to lock error %d!", __FUNCTION__, tmpError); + } + }); + } else { + mMapperV2->lock(buffer, cpuUsage, accessRegion, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpPtr) { + if (tmpError == MapperErrorV2::NONE) { + ret = tmpPtr; + } else { + ALOGE("%s: failed to lock error %d!", __FUNCTION__, tmpError); + } + }); + } + + ALOGV("%s: ptr %p accessRegion.top: %d accessRegion.left: %d accessRegion.width: %d " + "accessRegion.height: %d", + __FUNCTION__, ret, accessRegion.top, accessRegion.left, accessRegion.width, + accessRegion.height); + return ret; +} + +YCbCrLayout HandleImporter::lockYCbCr( + buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion) { + Mutex::Autolock lock(mLock); + + if (!mInitialized) { + initializeLocked(); + } + + if (mMapperV4 != nullptr) { + return lockYCbCrInternal(mMapperV4, buf, cpuUsage, accessRegion); + } + + if (mMapperV3 != nullptr) { + return lockYCbCrInternal( + mMapperV3, buf, cpuUsage, accessRegion); + } + + if (mMapperV2 != nullptr) { + return lockYCbCrInternal( + mMapperV2, buf, cpuUsage, accessRegion); + } + + ALOGE("%s: mMapperV4, mMapperV3 and mMapperV2 are all null!", __FUNCTION__); + return {}; +} + +status_t HandleImporter::getMonoPlanarStrideBytes(buffer_handle_t &buf, uint32_t *stride /*out*/) { + if (stride == nullptr) { + return BAD_VALUE; + } + + Mutex::Autolock lock(mLock); + + if (!mInitialized) { + initializeLocked(); + } + + if (mMapperV4 != nullptr) { + std::vector planeLayouts = getPlaneLayouts(mMapperV4, buf); + if (planeLayouts.size() != 1) { + ALOGE("%s: Unexpected number of planes %zu!", __FUNCTION__, planeLayouts.size()); + return BAD_VALUE; + } + + *stride = planeLayouts[0].strideInBytes; + } else { + ALOGE("%s: mMapperV4 is null! Query not supported!", __FUNCTION__); + return NO_INIT; + } + + return OK; +} + +int HandleImporter::unlock(buffer_handle_t& buf) { + if (mMapperV4 != nullptr) { + return unlockInternal(mMapperV4, buf); + } + if (mMapperV3 != nullptr) { + return unlockInternal(mMapperV3, buf); + } + if (mMapperV2 != nullptr) { + return unlockInternal(mMapperV2, buf); + } + + ALOGE("%s: mMapperV4, mMapperV3 and mMapperV2 are all null!", __FUNCTION__); + return -1; +} + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/common/1.0/default/OWNERS b/camera/common/1.0/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/common/1.0/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/common/1.0/default/VendorTagDescriptor.cpp b/camera/common/1.0/default/VendorTagDescriptor.cpp new file mode 100644 index 0000000..d2bee85 --- /dev/null +++ b/camera/common/1.0/default/VendorTagDescriptor.cpp @@ -0,0 +1,538 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamComm1.0-VTDesc" + +#include +#include +#include +#include +#include +#include +#include + +#include "VendorTagDescriptor.h" + +#include +#include + +namespace android { +namespace hardware { +namespace camera2 { +namespace params { + +VendorTagDescriptor::~VendorTagDescriptor() { + size_t len = mReverseMapping.size(); + for (size_t i = 0; i < len; ++i) { + delete mReverseMapping[i]; + } +} + +VendorTagDescriptor::VendorTagDescriptor() : + mTagCount(0), + mVendorOps() { +} + +VendorTagDescriptor::VendorTagDescriptor(const VendorTagDescriptor& src) { + copyFrom(src); +} + +VendorTagDescriptor& VendorTagDescriptor::operator=(const VendorTagDescriptor& rhs) { + copyFrom(rhs); + return *this; +} + +void VendorTagDescriptor::copyFrom(const VendorTagDescriptor& src) { + if (this == &src) return; + + size_t len = mReverseMapping.size(); + for (size_t i = 0; i < len; ++i) { + delete mReverseMapping[i]; + } + mReverseMapping.clear(); + + len = src.mReverseMapping.size(); + // Have to copy KeyedVectors inside mReverseMapping + for (size_t i = 0; i < len; ++i) { + KeyedVector* nameMapper = new KeyedVector(); + *nameMapper = *(src.mReverseMapping.valueAt(i)); + mReverseMapping.add(src.mReverseMapping.keyAt(i), nameMapper); + } + // Everything else is simple + mTagToNameMap = src.mTagToNameMap; + mTagToSectionMap = src.mTagToSectionMap; + mTagToTypeMap = src.mTagToTypeMap; + mSections = src.mSections; + mTagCount = src.mTagCount; + mVendorOps = src.mVendorOps; +} + +int VendorTagDescriptor::getTagCount() const { + size_t size = mTagToNameMap.size(); + if (size == 0) { + return VENDOR_TAG_COUNT_ERR; + } + return size; +} + +void VendorTagDescriptor::getTagArray(uint32_t* tagArray) const { + size_t size = mTagToNameMap.size(); + for (size_t i = 0; i < size; ++i) { + tagArray[i] = mTagToNameMap.keyAt(i); + } +} + +const char* VendorTagDescriptor::getSectionName(uint32_t tag) const { + ssize_t index = mTagToSectionMap.indexOfKey(tag); + if (index < 0) { + return VENDOR_SECTION_NAME_ERR; + } + return mSections[mTagToSectionMap.valueAt(index)].string(); +} + +ssize_t VendorTagDescriptor::getSectionIndex(uint32_t tag) const { + return mTagToSectionMap.valueFor(tag); +} + +const char* VendorTagDescriptor::getTagName(uint32_t tag) const { + ssize_t index = mTagToNameMap.indexOfKey(tag); + if (index < 0) { + return VENDOR_TAG_NAME_ERR; + } + return mTagToNameMap.valueAt(index).string(); +} + +int VendorTagDescriptor::getTagType(uint32_t tag) const { + auto iter = mTagToTypeMap.find(tag); + if (iter == mTagToTypeMap.end()) { + return VENDOR_TAG_TYPE_ERR; + } + return iter->second; +} + +const SortedVector* VendorTagDescriptor::getAllSectionNames() const { + return &mSections; +} + +status_t VendorTagDescriptor::lookupTag(const String8& name, const String8& section, /*out*/uint32_t* tag) const { + ssize_t index = mReverseMapping.indexOfKey(section); + if (index < 0) { + ALOGE("%s: Section '%s' does not exist.", __FUNCTION__, section.string()); + return BAD_VALUE; + } + + ssize_t nameIndex = mReverseMapping[index]->indexOfKey(name); + if (nameIndex < 0) { + ALOGE("%s: Tag name '%s' does not exist.", __FUNCTION__, name.string()); + return BAD_VALUE; + } + + if (tag != NULL) { + *tag = mReverseMapping[index]->valueAt(nameIndex); + } + return OK; +} + +void VendorTagDescriptor::dump(int fd, int verbosity, int indentation) const { + + size_t size = mTagToNameMap.size(); + if (size == 0) { + dprintf(fd, "%*sDumping configured vendor tag descriptors: None set\n", + indentation, ""); + return; + } + + dprintf(fd, "%*sDumping configured vendor tag descriptors: %zu entries\n", + indentation, "", size); + for (size_t i = 0; i < size; ++i) { + uint32_t tag = mTagToNameMap.keyAt(i); + + if (verbosity < 1) { + dprintf(fd, "%*s0x%x\n", indentation + 2, "", tag); + continue; + } + String8 name = mTagToNameMap.valueAt(i); + uint32_t sectionId = mTagToSectionMap.valueFor(tag); + String8 sectionName = mSections[sectionId]; + int type = mTagToTypeMap.at(tag); + const char* typeName = (type >= 0 && type < NUM_TYPES) ? + camera_metadata_type_names[type] : "UNKNOWN"; + dprintf(fd, "%*s0x%x (%s) with type %d (%s) defined in section %s\n", indentation + 2, + "", tag, name.string(), type, typeName, sectionName.string()); + } + +} + +int VendorTagDescriptorCache::getTagCount(metadata_vendor_id_t id) const { + int ret = 0; + auto desc = mVendorMap.find(id); + if (desc != mVendorMap.end()) { + ret = desc->second->getTagCount(); + } else { + ALOGE("%s: Vendor descriptor id is missing!", __func__); + } + + return ret; +} + +void VendorTagDescriptorCache::getTagArray(uint32_t* tagArray, metadata_vendor_id_t id) const { + auto desc = mVendorMap.find(id); + if (desc != mVendorMap.end()) { + desc->second->getTagArray(tagArray); + } else { + ALOGE("%s: Vendor descriptor id is missing!", __func__); + } +} + +const char* VendorTagDescriptorCache::getSectionName(uint32_t tag, metadata_vendor_id_t id) const { + const char* ret = nullptr; + auto desc = mVendorMap.find(id); + if (desc != mVendorMap.end()) { + ret = desc->second->getSectionName(tag); + } else { + ALOGE("%s: Vendor descriptor id is missing!", __func__); + } + + return ret; +} + +const char* VendorTagDescriptorCache::getTagName(uint32_t tag, metadata_vendor_id_t id) const { + const char* ret = nullptr; + auto desc = mVendorMap.find(id); + if (desc != mVendorMap.end()) { + ret = desc->second->getTagName(tag); + } else { + ALOGE("%s: Vendor descriptor id is missing!", __func__); + } + + return ret; +} + +int VendorTagDescriptorCache::getTagType(uint32_t tag, metadata_vendor_id_t id) const { + int ret = 0; + auto desc = mVendorMap.find(id); + if (desc != mVendorMap.end()) { + ret = desc->second->getTagType(tag); + } else { + ALOGE("%s: Vendor descriptor id is missing!", __func__); + } + + return ret; +} + +void VendorTagDescriptorCache::dump(int fd, int verbosity, int indentation) const { + for (const auto& desc : mVendorMap) { + desc.second->dump(fd, verbosity, indentation); + } +} + +int32_t VendorTagDescriptorCache::addVendorDescriptor( + metadata_vendor_id_t id, sp desc) { + auto entry = mVendorMap.find(id); + if (entry != mVendorMap.end()) { + ALOGE("%s: Vendor descriptor with same id already present!", __func__); + return BAD_VALUE; + } + + mVendorMap.emplace(id, desc); + return NO_ERROR; +} + +int32_t VendorTagDescriptorCache::getVendorTagDescriptor( + metadata_vendor_id_t id, + sp* desc /*out*/) { + auto entry = mVendorMap.find(id); + if (entry == mVendorMap.end()) { + return NAME_NOT_FOUND; + } + + *desc = entry->second; + + return NO_ERROR; +} +} // namespace params +} // namespace camera2 + +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +extern "C" { + +static int vendor_tag_descriptor_get_tag_count(const vendor_tag_ops_t* v); +static void vendor_tag_descriptor_get_all_tags(const vendor_tag_ops_t* v, uint32_t* tagArray); +static const char* vendor_tag_descriptor_get_section_name(const vendor_tag_ops_t* v, uint32_t tag); +static const char* vendor_tag_descriptor_get_tag_name(const vendor_tag_ops_t* v, uint32_t tag); +static int vendor_tag_descriptor_get_tag_type(const vendor_tag_ops_t* v, uint32_t tag); + +static int vendor_tag_descriptor_cache_get_tag_count(metadata_vendor_id_t id); +static void vendor_tag_descriptor_cache_get_all_tags(uint32_t* tagArray, metadata_vendor_id_t id); +static const char* vendor_tag_descriptor_cache_get_section_name(uint32_t tag, + metadata_vendor_id_t id); +static const char* vendor_tag_descriptor_cache_get_tag_name(uint32_t tag, metadata_vendor_id_t id); +static int vendor_tag_descriptor_cache_get_tag_type(uint32_t tag, metadata_vendor_id_t id); +} /* extern "C" */ + +static Mutex sLock; +static sp sGlobalVendorTagDescriptor; +static sp sGlobalVendorTagDescriptorCache; + +status_t VendorTagDescriptor::createDescriptorFromOps(const vendor_tag_ops_t* vOps, + /*out*/ + sp& descriptor) { + if (vOps == NULL) { + ALOGE("%s: vendor_tag_ops argument was NULL.", __FUNCTION__); + return BAD_VALUE; + } + + int tagCount = vOps->get_tag_count(vOps); + if (tagCount < 0 || tagCount > INT32_MAX) { + ALOGE("%s: tag count %d from vendor ops is invalid.", __FUNCTION__, tagCount); + return BAD_VALUE; + } + + Vector tagArray; + LOG_ALWAYS_FATAL_IF(tagArray.resize(tagCount) != tagCount, + "%s: too many (%u) vendor tags defined.", __FUNCTION__, tagCount); + + vOps->get_all_tags(vOps, /*out*/tagArray.editArray()); + + sp desc = new VendorTagDescriptor(); + desc->mTagCount = tagCount; + + SortedVector sections; + KeyedVector tagToSectionMap; + + for (size_t i = 0; i < static_cast(tagCount); ++i) { + uint32_t tag = tagArray[i]; + if (tag < CAMERA_METADATA_VENDOR_TAG_BOUNDARY) { + ALOGE("%s: vendor tag %d not in vendor tag section.", __FUNCTION__, tag); + return BAD_VALUE; + } + const char *tagName = vOps->get_tag_name(vOps, tag); + if (tagName == NULL) { + ALOGE("%s: no tag name defined for vendor tag %d.", __FUNCTION__, tag); + return BAD_VALUE; + } + desc->mTagToNameMap.add(tag, String8(tagName)); + const char *sectionName = vOps->get_section_name(vOps, tag); + if (sectionName == NULL) { + ALOGE("%s: no section name defined for vendor tag %d.", __FUNCTION__, tag); + return BAD_VALUE; + } + + String8 sectionString(sectionName); + + sections.add(sectionString); + tagToSectionMap.add(tag, sectionString); + + int tagType = vOps->get_tag_type(vOps, tag); + if (tagType < 0 || tagType >= NUM_TYPES) { + ALOGE("%s: tag type %d from vendor ops does not exist.", __FUNCTION__, tagType); + return BAD_VALUE; + } + desc->mTagToTypeMap.insert(std::make_pair(tag, tagType)); + } + + desc->mSections = sections; + + for (size_t i = 0; i < static_cast(tagCount); ++i) { + uint32_t tag = tagArray[i]; + const String8& sectionString = tagToSectionMap.valueFor(tag); + + // Set up tag to section index map + ssize_t index = sections.indexOf(sectionString); + LOG_ALWAYS_FATAL_IF(index < 0, "index %zd must be non-negative", index); + desc->mTagToSectionMap.add(tag, static_cast(index)); + + // Set up reverse mapping + ssize_t reverseIndex = -1; + if ((reverseIndex = desc->mReverseMapping.indexOfKey(sectionString)) < 0) { + KeyedVector* nameMapper = new KeyedVector(); + reverseIndex = desc->mReverseMapping.add(sectionString, nameMapper); + } + desc->mReverseMapping[reverseIndex]->add(desc->mTagToNameMap.valueFor(tag), tag); + } + + descriptor = desc; + return OK; +} + +status_t VendorTagDescriptor::setAsGlobalVendorTagDescriptor(const sp& desc) { + status_t res = OK; + Mutex::Autolock al(sLock); + sGlobalVendorTagDescriptor = desc; + + vendor_tag_ops_t* opsPtr = NULL; + if (desc != NULL) { + opsPtr = &(desc->mVendorOps); + opsPtr->get_tag_count = vendor_tag_descriptor_get_tag_count; + opsPtr->get_all_tags = vendor_tag_descriptor_get_all_tags; + opsPtr->get_section_name = vendor_tag_descriptor_get_section_name; + opsPtr->get_tag_name = vendor_tag_descriptor_get_tag_name; + opsPtr->get_tag_type = vendor_tag_descriptor_get_tag_type; + } + if((res = set_camera_metadata_vendor_ops(opsPtr)) != OK) { + ALOGE("%s: Could not set vendor tag descriptor, received error %s (%d)." + , __FUNCTION__, strerror(-res), res); + } + return res; +} + +void VendorTagDescriptor::clearGlobalVendorTagDescriptor() { + Mutex::Autolock al(sLock); + set_camera_metadata_vendor_ops(NULL); + sGlobalVendorTagDescriptor.clear(); +} + +sp VendorTagDescriptor::getGlobalVendorTagDescriptor() { + Mutex::Autolock al(sLock); + return sGlobalVendorTagDescriptor; +} + +status_t VendorTagDescriptorCache::setAsGlobalVendorTagCache( + const sp& cache) { + status_t res = OK; + Mutex::Autolock al(sLock); + sGlobalVendorTagDescriptorCache = cache; + + struct vendor_tag_cache_ops* opsPtr = NULL; + if (cache != NULL) { + opsPtr = &(cache->mVendorCacheOps); + opsPtr->get_tag_count = vendor_tag_descriptor_cache_get_tag_count; + opsPtr->get_all_tags = vendor_tag_descriptor_cache_get_all_tags; + opsPtr->get_section_name = vendor_tag_descriptor_cache_get_section_name; + opsPtr->get_tag_name = vendor_tag_descriptor_cache_get_tag_name; + opsPtr->get_tag_type = vendor_tag_descriptor_cache_get_tag_type; + } + if ((res = set_camera_metadata_vendor_cache_ops(opsPtr)) != OK) { + ALOGE("%s: Could not set vendor tag cache, received error %s (%d).", __FUNCTION__, + strerror(-res), res); + } + return res; +} + +void VendorTagDescriptorCache::clearGlobalVendorTagCache() { + Mutex::Autolock al(sLock); + set_camera_metadata_vendor_cache_ops(NULL); + sGlobalVendorTagDescriptorCache.clear(); +} + +sp VendorTagDescriptorCache::getGlobalVendorTagCache() { + Mutex::Autolock al(sLock); + return sGlobalVendorTagDescriptorCache; +} + +extern "C" { + +int vendor_tag_descriptor_get_tag_count(const vendor_tag_ops_t* /*v*/) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptor == NULL) { + ALOGE("%s: Vendor tag descriptor not initialized.", __FUNCTION__); + return VENDOR_TAG_COUNT_ERR; + } + return sGlobalVendorTagDescriptor->getTagCount(); +} + +void vendor_tag_descriptor_get_all_tags(const vendor_tag_ops_t* /*v*/, uint32_t* tagArray) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptor == NULL) { + ALOGE("%s: Vendor tag descriptor not initialized.", __FUNCTION__); + return; + } + sGlobalVendorTagDescriptor->getTagArray(tagArray); +} + +const char* vendor_tag_descriptor_get_section_name(const vendor_tag_ops_t* /*v*/, uint32_t tag) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptor == NULL) { + ALOGE("%s: Vendor tag descriptor not initialized.", __FUNCTION__); + return VENDOR_SECTION_NAME_ERR; + } + return sGlobalVendorTagDescriptor->getSectionName(tag); +} + +const char* vendor_tag_descriptor_get_tag_name(const vendor_tag_ops_t* /*v*/, uint32_t tag) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptor == NULL) { + ALOGE("%s: Vendor tag descriptor not initialized.", __FUNCTION__); + return VENDOR_TAG_NAME_ERR; + } + return sGlobalVendorTagDescriptor->getTagName(tag); +} + +int vendor_tag_descriptor_get_tag_type(const vendor_tag_ops_t* /*v*/, uint32_t tag) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptor == NULL) { + ALOGE("%s: Vendor tag descriptor not initialized.", __FUNCTION__); + return VENDOR_TAG_TYPE_ERR; + } + return sGlobalVendorTagDescriptor->getTagType(tag); +} + +int vendor_tag_descriptor_cache_get_tag_count(metadata_vendor_id_t id) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptorCache == NULL) { + ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__); + return VENDOR_TAG_COUNT_ERR; + } + return sGlobalVendorTagDescriptorCache->getTagCount(id); +} + +void vendor_tag_descriptor_cache_get_all_tags(uint32_t* tagArray, metadata_vendor_id_t id) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptorCache == NULL) { + ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__); + } + sGlobalVendorTagDescriptorCache->getTagArray(tagArray, id); +} + +const char* vendor_tag_descriptor_cache_get_section_name(uint32_t tag, metadata_vendor_id_t id) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptorCache == NULL) { + ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__); + return VENDOR_SECTION_NAME_ERR; + } + return sGlobalVendorTagDescriptorCache->getSectionName(tag, id); +} + +const char* vendor_tag_descriptor_cache_get_tag_name(uint32_t tag, metadata_vendor_id_t id) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptorCache == NULL) { + ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__); + return VENDOR_TAG_NAME_ERR; + } + return sGlobalVendorTagDescriptorCache->getTagName(tag, id); +} + +int vendor_tag_descriptor_cache_get_tag_type(uint32_t tag, metadata_vendor_id_t id) { + Mutex::Autolock al(sLock); + if (sGlobalVendorTagDescriptorCache == NULL) { + ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__); + return VENDOR_TAG_NAME_ERR; + } + return sGlobalVendorTagDescriptorCache->getTagType(tag, id); +} + +} /* extern "C" */ + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/common/1.0/default/include/CameraMetadata.h b/camera/common/1.0/default/include/CameraMetadata.h new file mode 100644 index 0000000..d5e4d56 --- /dev/null +++ b/camera/common/1.0/default/include/CameraMetadata.h @@ -0,0 +1,230 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CAMERA_COMMON_1_0_CAMERAMETADATA_H +#define CAMERA_COMMON_1_0_CAMERAMETADATA_H + +#include "system/camera_metadata.h" + +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +class VendorTagDescriptor; + +/** + * A convenience wrapper around the C-based camera_metadata_t library. + */ +class CameraMetadata { + public: + /** Creates an empty object; best used when expecting to acquire contents + * from elsewhere */ + CameraMetadata(); + /** Creates an object with space for entryCapacity entries, with + * dataCapacity extra storage */ + CameraMetadata(size_t entryCapacity, size_t dataCapacity = 10); + + ~CameraMetadata(); + + /** Takes ownership of passed-in buffer */ + CameraMetadata(camera_metadata_t *buffer); + /** Clones the metadata */ + CameraMetadata(const CameraMetadata &other); + + /** + * Assignment clones metadata buffer. + */ + CameraMetadata &operator=(const CameraMetadata &other); + CameraMetadata &operator=(const camera_metadata_t *buffer); + + /** + * Get reference to the underlying metadata buffer. Ownership remains with + * the CameraMetadata object, but non-const CameraMetadata methods will not + * work until unlock() is called. Note that the lock has nothing to do with + * thread-safety, it simply prevents the camera_metadata_t pointer returned + * here from being accidentally invalidated by CameraMetadata operations. + */ + const camera_metadata_t* getAndLock() const; + + /** + * Unlock the CameraMetadata for use again. After this unlock, the pointer + * given from getAndLock() may no longer be used. The pointer passed out + * from getAndLock must be provided to guarantee that the right object is + * being unlocked. + */ + status_t unlock(const camera_metadata_t *buffer) const; + + /** + * Release a raw metadata buffer to the caller. After this call, + * CameraMetadata no longer references the buffer, and the caller takes + * responsibility for freeing the raw metadata buffer (using + * free_camera_metadata()), or for handing it to another CameraMetadata + * instance. + */ + camera_metadata_t* release(); + + /** + * Clear the metadata buffer and free all storage used by it + */ + void clear(); + + /** + * Acquire a raw metadata buffer from the caller. After this call, + * the caller no longer owns the raw buffer, and must not free or manipulate it. + * If CameraMetadata already contains metadata, it is freed. + */ + void acquire(camera_metadata_t* buffer); + + /** + * Acquires raw buffer from other CameraMetadata object. After the call, the argument + * object no longer has any metadata. + */ + void acquire(CameraMetadata &other); + + /** + * Append metadata from another CameraMetadata object. + */ + status_t append(const CameraMetadata &other); + + /** + * Append metadata from a raw camera_metadata buffer + */ + status_t append(const camera_metadata* other); + + /** + * Number of metadata entries. + */ + size_t entryCount() const; + + /** + * Is the buffer empty (no entires) + */ + bool isEmpty() const; + + /** + * Sort metadata buffer for faster find + */ + status_t sort(); + + /** + * Update metadata entry. Will create entry if it doesn't exist already, and + * will reallocate the buffer if insufficient space exists. Overloaded for + * the various types of valid data. + */ + status_t update(uint32_t tag, + const uint8_t *data, size_t data_count); + status_t update(uint32_t tag, + const int32_t *data, size_t data_count); + status_t update(uint32_t tag, + const float *data, size_t data_count); + status_t update(uint32_t tag, + const int64_t *data, size_t data_count); + status_t update(uint32_t tag, + const double *data, size_t data_count); + status_t update(uint32_t tag, + const camera_metadata_rational_t *data, size_t data_count); + status_t update(uint32_t tag, + const String8 &string); + status_t update(const camera_metadata_ro_entry &entry); + + + template + status_t update(uint32_t tag, Vector data) { + return update(tag, data.array(), data.size()); + } + + /** + * Check if a metadata entry exists for a given tag id + * + */ + bool exists(uint32_t tag) const; + + /** + * Get metadata entry by tag id + */ + camera_metadata_entry find(uint32_t tag); + + /** + * Get metadata entry by tag id, with no editing + */ + camera_metadata_ro_entry find(uint32_t tag) const; + + /** + * Delete metadata entry by tag + */ + status_t erase(uint32_t tag); + + /** + * Swap the underlying camera metadata between this and the other + * metadata object. + */ + void swap(CameraMetadata &other); + + /** + * Dump contents into FD for debugging. The verbosity levels are + * 0: Tag entry information only, no data values + * 1: Level 0 plus at most 16 data values per entry + * 2: All information + * + * The indentation parameter sets the number of spaces to add to the start + * each line of output. + */ + void dump(int fd, int verbosity = 1, int indentation = 0) const; + + /** + * Find tag id for a given tag name, also checking vendor tags if available. + * On success, returns OK and writes the tag id into tag. + * + * This is a slow method. + */ + static status_t getTagFromName(const char *name, + const VendorTagDescriptor* vTags, uint32_t *tag); + + private: + camera_metadata_t *mBuffer; + mutable bool mLocked; + + /** + * Check if tag has a given type + */ + status_t checkType(uint32_t tag, uint8_t expectedType); + + /** + * Base update entry method + */ + status_t updateImpl(uint32_t tag, const void *data, size_t data_count); + + /** + * Resize metadata buffer if needed by reallocating it and copying it over. + */ + status_t resizeIfNeeded(size_t extraEntries, size_t extraData); + +}; + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android + +#endif diff --git a/camera/common/1.0/default/include/CameraModule.h b/camera/common/1.0/default/include/CameraModule.h new file mode 100644 index 0000000..c89e934 --- /dev/null +++ b/camera/common/1.0/default/include/CameraModule.h @@ -0,0 +1,101 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CAMERA_COMMON_1_0_CAMERAMODULE_H +#define CAMERA_COMMON_1_0_CAMERAMODULE_H + +#include +#include + +#include +#include +#include +#include + +#include "CameraMetadata.h" + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { +/** + * A wrapper class for HAL camera module. + * + * This class wraps camera_module_t returned from HAL to provide a wrapped + * get_camera_info implementation which CameraService generates some + * camera characteristics keys defined in newer HAL version on an older HAL. + */ +class CameraModule : public RefBase { +public: + explicit CameraModule(camera_module_t *module); + virtual ~CameraModule(); + + // Must be called after construction + // Returns OK on success, NO_INIT on failure + int init(); + + int getCameraInfo(int cameraId, struct camera_info *info); + int getDeviceVersion(int cameraId); + int getNumberOfCameras(void); + int open(const char* id, struct hw_device_t** device); + bool isOpenLegacyDefined() const; + int openLegacy(const char* id, uint32_t halVersion, struct hw_device_t** device); + int setCallbacks(const camera_module_callbacks_t *callbacks); + bool isVendorTagDefined() const; + void getVendorTagOps(vendor_tag_ops_t* ops); + bool isSetTorchModeSupported() const; + int setTorchMode(const char* camera_id, bool enable); + uint16_t getModuleApiVersion() const; + const char* getModuleName() const; + uint16_t getHalApiVersion() const; + const char* getModuleAuthor() const; + // Only used by CameraModuleFixture native test. Do NOT use elsewhere. + void *getDso(); + // Only used by CameraProvider + void removeCamera(int cameraId); + int getPhysicalCameraInfo(int physicalCameraId, camera_metadata_t **physicalInfo); + int isStreamCombinationSupported(int cameraId, camera_stream_combination_t *streams); + void notifyDeviceStateChange(uint64_t deviceState); + + static bool isLogicalMultiCamera( + const common::V1_0::helper::CameraMetadata& metadata, + std::unordered_set* physicalCameraIds); + +private: + // Derive camera characteristics keys defined after HAL device version + static void deriveCameraCharacteristicsKeys(uint32_t deviceVersion, CameraMetadata &chars); + // Helper function to append available[request|result|chars]Keys + static void appendAvailableKeys(CameraMetadata &chars, + int32_t keyTag, const Vector& appendKeys); + status_t filterOpenErrorCode(status_t err); + camera_module_t *mModule; + int mNumberOfCameras; + KeyedVector mCameraInfoMap; + KeyedVector mDeviceVersionMap; + KeyedVector mPhysicalCameraInfoMap; + Mutex mCameraInfoLock; +}; + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android + +#endif diff --git a/camera/common/1.0/default/include/CameraParameters.h b/camera/common/1.0/default/include/CameraParameters.h new file mode 100644 index 0000000..e4ff6f2 --- /dev/null +++ b/camera/common/1.0/default/include/CameraParameters.h @@ -0,0 +1,709 @@ +/* + * Copyright (C) 2008 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PARAMETERS_H +#define ANDROID_HARDWARE_CAMERA_PARAMETERS_H + +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +struct Size { + int width; + int height; + + Size() { + width = 0; + height = 0; + } + + Size(int w, int h) { + width = w; + height = h; + } +}; + +class CameraParameters +{ +public: + CameraParameters(); + CameraParameters(const String8 ¶ms) { unflatten(params); } + ~CameraParameters(); + + String8 flatten() const; + void unflatten(const String8 ¶ms); + + void set(const char *key, const char *value); + void set(const char *key, int value); + void setFloat(const char *key, float value); + const char *get(const char *key) const; + int getInt(const char *key) const; + float getFloat(const char *key) const; + + void remove(const char *key); + + void setPreviewSize(int width, int height); + void getPreviewSize(int *width, int *height) const; + void getSupportedPreviewSizes(Vector &sizes) const; + + // Set the dimensions in pixels to the given width and height + // for video frames. The given width and height must be one + // of the supported dimensions returned from + // getSupportedVideoSizes(). Must not be called if + // getSupportedVideoSizes() returns an empty Vector of Size. + void setVideoSize(int width, int height); + // Retrieve the current dimensions (width and height) + // in pixels for video frames, which must be one of the + // supported dimensions returned from getSupportedVideoSizes(). + // Must not be called if getSupportedVideoSizes() returns an + // empty Vector of Size. + void getVideoSize(int *width, int *height) const; + // Retrieve a Vector of supported dimensions (width and height) + // in pixels for video frames. If sizes returned from the method + // is empty, the camera does not support calls to setVideoSize() + // or getVideoSize(). In adddition, it also indicates that + // the camera only has a single output, and does not have + // separate output for video frames and preview frame. + void getSupportedVideoSizes(Vector &sizes) const; + // Retrieve the preferred preview size (width and height) in pixels + // for video recording. The given width and height must be one of + // supported preview sizes returned from getSupportedPreviewSizes(). + // Must not be called if getSupportedVideoSizes() returns an empty + // Vector of Size. If getSupportedVideoSizes() returns an empty + // Vector of Size, the width and height returned from this method + // is invalid, and is "-1x-1". + void getPreferredPreviewSizeForVideo(int *width, int *height) const; + + void setPreviewFrameRate(int fps); + int getPreviewFrameRate() const; + void getPreviewFpsRange(int *min_fps, int *max_fps) const; + void setPreviewFormat(const char *format); + const char *getPreviewFormat() const; + void setPictureSize(int width, int height); + void getPictureSize(int *width, int *height) const; + void getSupportedPictureSizes(Vector &sizes) const; + void setPictureFormat(const char *format); + const char *getPictureFormat() const; + + void dump() const; + status_t dump(int fd, const Vector& args) const; + + /** + * Returns a Vector containing the supported preview formats + * as enums given in graphics.h. + */ + void getSupportedPreviewFormats(Vector& formats) const; + + // Returns true if no keys are present + bool isEmpty() const; + + // Parameter keys to communicate between camera application and driver. + // The access (read/write, read only, or write only) is viewed from the + // perspective of applications, not driver. + + // Preview frame size in pixels (width x height). + // Example value: "480x320". Read/Write. + static const char KEY_PREVIEW_SIZE[]; + // Supported preview frame sizes in pixels. + // Example value: "800x600,480x320". Read only. + static const char KEY_SUPPORTED_PREVIEW_SIZES[]; + // The current minimum and maximum preview fps. This controls the rate of + // preview frames received (CAMERA_MSG_PREVIEW_FRAME). The minimum and + // maximum fps must be one of the elements from + // KEY_SUPPORTED_PREVIEW_FPS_RANGE parameter. + // Example value: "10500,26623" + static const char KEY_PREVIEW_FPS_RANGE[]; + // The supported preview fps (frame-per-second) ranges. Each range contains + // a minimum fps and maximum fps. If minimum fps equals to maximum fps, the + // camera outputs frames in fixed frame rate. If not, the camera outputs + // frames in auto frame rate. The actual frame rate fluctuates between the + // minimum and the maximum. The list has at least one element. The list is + // sorted from small to large (first by maximum fps and then minimum fps). + // Example value: "(10500,26623),(15000,26623),(30000,30000)" + static const char KEY_SUPPORTED_PREVIEW_FPS_RANGE[]; + // The image format for preview frames. See CAMERA_MSG_PREVIEW_FRAME in + // frameworks/av/include/camera/Camera.h. The default is + // PIXEL_FORMAT_YUV420SP. Example value: "yuv420sp" or PIXEL_FORMAT_XXX + // constants. Read/write. + static const char KEY_PREVIEW_FORMAT[]; + // Supported image formats for preview frames. + // Example value: "yuv420sp,yuv422i-yuyv". Read only. + static const char KEY_SUPPORTED_PREVIEW_FORMATS[]; + // Number of preview frames per second. This is the target frame rate. The + // actual frame rate depends on the driver. + // Example value: "15". Read/write. + static const char KEY_PREVIEW_FRAME_RATE[]; + // Supported number of preview frames per second. + // Example value: "24,15,10". Read. + static const char KEY_SUPPORTED_PREVIEW_FRAME_RATES[]; + // The dimensions for captured pictures in pixels (width x height). + // Example value: "1024x768". Read/write. + static const char KEY_PICTURE_SIZE[]; + // Supported dimensions for captured pictures in pixels. + // Example value: "2048x1536,1024x768". Read only. + static const char KEY_SUPPORTED_PICTURE_SIZES[]; + // The image format for captured pictures. See CAMERA_MSG_COMPRESSED_IMAGE + // in frameworks/base/include/camera/Camera.h. + // Example value: "jpeg" or PIXEL_FORMAT_XXX constants. Read/write. + static const char KEY_PICTURE_FORMAT[]; + // Supported image formats for captured pictures. + // Example value: "jpeg,rgb565". Read only. + static const char KEY_SUPPORTED_PICTURE_FORMATS[]; + // The width (in pixels) of EXIF thumbnail in Jpeg picture. + // Example value: "512". Read/write. + static const char KEY_JPEG_THUMBNAIL_WIDTH[]; + // The height (in pixels) of EXIF thumbnail in Jpeg picture. + // Example value: "384". Read/write. + static const char KEY_JPEG_THUMBNAIL_HEIGHT[]; + // Supported EXIF thumbnail sizes (width x height). 0x0 means not thumbnail + // in EXIF. + // Example value: "512x384,320x240,0x0". Read only. + static const char KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES[]; + // The quality of the EXIF thumbnail in Jpeg picture. The range is 1 to 100, + // with 100 being the best. + // Example value: "90". Read/write. + static const char KEY_JPEG_THUMBNAIL_QUALITY[]; + // Jpeg quality of captured picture. The range is 1 to 100, with 100 being + // the best. + // Example value: "90". Read/write. + static const char KEY_JPEG_QUALITY[]; + // The rotation angle in degrees relative to the orientation of the camera. + // This affects the pictures returned from CAMERA_MSG_COMPRESSED_IMAGE. The + // camera driver may set orientation in the EXIF header without rotating the + // picture. Or the driver may rotate the picture and the EXIF thumbnail. If + // the Jpeg picture is rotated, the orientation in the EXIF header will be + // missing or 1 (row #0 is top and column #0 is left side). + // + // Note that the JPEG pictures of front-facing cameras are not mirrored + // as in preview display. + // + // For example, suppose the natural orientation of the device is portrait. + // The device is rotated 270 degrees clockwise, so the device orientation is + // 270. Suppose a back-facing camera sensor is mounted in landscape and the + // top side of the camera sensor is aligned with the right edge of the + // display in natural orientation. So the camera orientation is 90. The + // rotation should be set to 0 (270 + 90). + // + // Example value: "0" or "90" or "180" or "270". Write only. + static const char KEY_ROTATION[]; + // GPS latitude coordinate. GPSLatitude and GPSLatitudeRef will be stored in + // JPEG EXIF header. + // Example value: "25.032146" or "-33.462809". Write only. + static const char KEY_GPS_LATITUDE[]; + // GPS longitude coordinate. GPSLongitude and GPSLongitudeRef will be stored + // in JPEG EXIF header. + // Example value: "121.564448" or "-70.660286". Write only. + static const char KEY_GPS_LONGITUDE[]; + // GPS altitude. GPSAltitude and GPSAltitudeRef will be stored in JPEG EXIF + // header. + // Example value: "21.0" or "-5". Write only. + static const char KEY_GPS_ALTITUDE[]; + // GPS timestamp (UTC in seconds since January 1, 1970). This should be + // stored in JPEG EXIF header. + // Example value: "1251192757". Write only. + static const char KEY_GPS_TIMESTAMP[]; + // GPS Processing Method + // Example value: "GPS" or "NETWORK". Write only. + static const char KEY_GPS_PROCESSING_METHOD[]; + // Current white balance setting. + // Example value: "auto" or WHITE_BALANCE_XXX constants. Read/write. + static const char KEY_WHITE_BALANCE[]; + // Supported white balance settings. + // Example value: "auto,incandescent,daylight". Read only. + static const char KEY_SUPPORTED_WHITE_BALANCE[]; + // Current color effect setting. + // Example value: "none" or EFFECT_XXX constants. Read/write. + static const char KEY_EFFECT[]; + // Supported color effect settings. + // Example value: "none,mono,sepia". Read only. + static const char KEY_SUPPORTED_EFFECTS[]; + // Current antibanding setting. + // Example value: "auto" or ANTIBANDING_XXX constants. Read/write. + static const char KEY_ANTIBANDING[]; + // Supported antibanding settings. + // Example value: "auto,50hz,60hz,off". Read only. + static const char KEY_SUPPORTED_ANTIBANDING[]; + // Current scene mode. + // Example value: "auto" or SCENE_MODE_XXX constants. Read/write. + static const char KEY_SCENE_MODE[]; + // Supported scene mode settings. + // Example value: "auto,night,fireworks". Read only. + static const char KEY_SUPPORTED_SCENE_MODES[]; + // Current flash mode. + // Example value: "auto" or FLASH_MODE_XXX constants. Read/write. + static const char KEY_FLASH_MODE[]; + // Supported flash modes. + // Example value: "auto,on,off". Read only. + static const char KEY_SUPPORTED_FLASH_MODES[]; + // Current focus mode. This will not be empty. Applications should call + // CameraHardwareInterface.autoFocus to start the focus if focus mode is + // FOCUS_MODE_AUTO or FOCUS_MODE_MACRO. + // Example value: "auto" or FOCUS_MODE_XXX constants. Read/write. + static const char KEY_FOCUS_MODE[]; + // Supported focus modes. + // Example value: "auto,macro,fixed". Read only. + static const char KEY_SUPPORTED_FOCUS_MODES[]; + // The maximum number of focus areas supported. This is the maximum length + // of KEY_FOCUS_AREAS. + // Example value: "0" or "2". Read only. + static const char KEY_MAX_NUM_FOCUS_AREAS[]; + // Current focus areas. + // + // Before accessing this parameter, apps should check + // KEY_MAX_NUM_FOCUS_AREAS first to know the maximum number of focus areas + // first. If the value is 0, focus area is not supported. + // + // Each focus area is a five-element int array. The first four elements are + // the rectangle of the area (left, top, right, bottom). The direction is + // relative to the sensor orientation, that is, what the sensor sees. The + // direction is not affected by the rotation or mirroring of + // CAMERA_CMD_SET_DISPLAY_ORIENTATION. Coordinates range from -1000 to 1000. + // (-1000,-1000) is the upper left point. (1000, 1000) is the lower right + // point. The width and height of focus areas cannot be 0 or negative. + // + // The fifth element is the weight. Values for weight must range from 1 to + // 1000. The weight should be interpreted as a per-pixel weight - all + // pixels in the area have the specified weight. This means a small area + // with the same weight as a larger area will have less influence on the + // focusing than the larger area. Focus areas can partially overlap and the + // driver will add the weights in the overlap region. + // + // A special case of single focus area (0,0,0,0,0) means driver to decide + // the focus area. For example, the driver may use more signals to decide + // focus areas and change them dynamically. Apps can set (0,0,0,0,0) if they + // want the driver to decide focus areas. + // + // Focus areas are relative to the current field of view (KEY_ZOOM). No + // matter what the zoom level is, (-1000,-1000) represents the top of the + // currently visible camera frame. The focus area cannot be set to be + // outside the current field of view, even when using zoom. + // + // Focus area only has effect if the current focus mode is FOCUS_MODE_AUTO, + // FOCUS_MODE_MACRO, FOCUS_MODE_CONTINUOUS_VIDEO, or + // FOCUS_MODE_CONTINUOUS_PICTURE. + // Example value: "(-10,-10,0,0,300),(0,0,10,10,700)". Read/write. + static const char KEY_FOCUS_AREAS[]; + // Focal length in millimeter. + // Example value: "4.31". Read only. + static const char KEY_FOCAL_LENGTH[]; + // Horizontal angle of view in degrees. + // Example value: "54.8". Read only. + static const char KEY_HORIZONTAL_VIEW_ANGLE[]; + // Vertical angle of view in degrees. + // Example value: "42.5". Read only. + static const char KEY_VERTICAL_VIEW_ANGLE[]; + // Exposure compensation index. 0 means exposure is not adjusted. + // Example value: "-5" or "5". Read/write. + static const char KEY_EXPOSURE_COMPENSATION[]; + // The maximum exposure compensation index (>=0). + // Example value: "6". Read only. + static const char KEY_MAX_EXPOSURE_COMPENSATION[]; + // The minimum exposure compensation index (<=0). + // Example value: "-6". Read only. + static const char KEY_MIN_EXPOSURE_COMPENSATION[]; + // The exposure compensation step. Exposure compensation index multiply by + // step eqals to EV. Ex: if exposure compensation index is -6 and step is + // 0.3333, EV is -2. + // Example value: "0.333333333" or "0.5". Read only. + static const char KEY_EXPOSURE_COMPENSATION_STEP[]; + // The state of the auto-exposure lock. "true" means that + // auto-exposure is locked to its current value and will not + // change. "false" means the auto-exposure routine is free to + // change exposure values. If auto-exposure is already locked, + // setting this to true again has no effect (the driver will not + // recalculate exposure values). Changing exposure compensation + // settings will still affect the exposure settings while + // auto-exposure is locked. Stopping preview or taking a still + // image will not change the lock. In conjunction with + // exposure compensation, this allows for capturing multi-exposure + // brackets with known relative exposure values. Locking + // auto-exposure after open but before the first call to + // startPreview may result in severely over- or under-exposed + // images. The driver will not change the AE lock after + // auto-focus completes. + static const char KEY_AUTO_EXPOSURE_LOCK[]; + // Whether locking the auto-exposure is supported. "true" means it is, and + // "false" or this key not existing means it is not supported. + static const char KEY_AUTO_EXPOSURE_LOCK_SUPPORTED[]; + // The state of the auto-white balance lock. "true" means that + // auto-white balance is locked to its current value and will not + // change. "false" means the auto-white balance routine is free to + // change white balance values. If auto-white balance is already + // locked, setting this to true again has no effect (the driver + // will not recalculate white balance values). Stopping preview or + // taking a still image will not change the lock. In conjunction + // with exposure compensation, this allows for capturing + // multi-exposure brackets with fixed white balance. Locking + // auto-white balance after open but before the first call to + // startPreview may result in severely incorrect color. The + // driver will not change the AWB lock after auto-focus + // completes. + static const char KEY_AUTO_WHITEBALANCE_LOCK[]; + // Whether locking the auto-white balance is supported. "true" + // means it is, and "false" or this key not existing means it is + // not supported. + static const char KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED[]; + + // The maximum number of metering areas supported. This is the maximum + // length of KEY_METERING_AREAS. + // Example value: "0" or "2". Read only. + static const char KEY_MAX_NUM_METERING_AREAS[]; + // Current metering areas. Camera driver uses these areas to decide + // exposure. + // + // Before accessing this parameter, apps should check + // KEY_MAX_NUM_METERING_AREAS first to know the maximum number of metering + // areas first. If the value is 0, metering area is not supported. + // + // Each metering area is a rectangle with specified weight. The direction is + // relative to the sensor orientation, that is, what the sensor sees. The + // direction is not affected by the rotation or mirroring of + // CAMERA_CMD_SET_DISPLAY_ORIENTATION. Coordinates of the rectangle range + // from -1000 to 1000. (-1000, -1000) is the upper left point. (1000, 1000) + // is the lower right point. The width and height of metering areas cannot + // be 0 or negative. + // + // The fifth element is the weight. Values for weight must range from 1 to + // 1000. The weight should be interpreted as a per-pixel weight - all + // pixels in the area have the specified weight. This means a small area + // with the same weight as a larger area will have less influence on the + // metering than the larger area. Metering areas can partially overlap and + // the driver will add the weights in the overlap region. + // + // A special case of all-zero single metering area means driver to decide + // the metering area. For example, the driver may use more signals to decide + // metering areas and change them dynamically. Apps can set all-zero if they + // want the driver to decide metering areas. + // + // Metering areas are relative to the current field of view (KEY_ZOOM). + // No matter what the zoom level is, (-1000,-1000) represents the top of the + // currently visible camera frame. The metering area cannot be set to be + // outside the current field of view, even when using zoom. + // + // No matter what metering areas are, the final exposure are compensated + // by KEY_EXPOSURE_COMPENSATION. + // Example value: "(-10,-10,0,0,300),(0,0,10,10,700)". Read/write. + static const char KEY_METERING_AREAS[]; + // Current zoom value. + // Example value: "0" or "6". Read/write. + static const char KEY_ZOOM[]; + // Maximum zoom value. + // Example value: "6". Read only. + static const char KEY_MAX_ZOOM[]; + // The zoom ratios of all zoom values. The zoom ratio is in 1/100 + // increments. Ex: a zoom of 3.2x is returned as 320. The number of list + // elements is KEY_MAX_ZOOM + 1. The first element is always 100. The last + // element is the zoom ratio of zoom value KEY_MAX_ZOOM. + // Example value: "100,150,200,250,300,350,400". Read only. + static const char KEY_ZOOM_RATIOS[]; + // Whether zoom is supported. Zoom is supported if the value is "true". Zoom + // is not supported if the value is not "true" or the key does not exist. + // Example value: "true". Read only. + static const char KEY_ZOOM_SUPPORTED[]; + // Whether if smooth zoom is supported. Smooth zoom is supported if the + // value is "true". It is not supported if the value is not "true" or the + // key does not exist. + // See CAMERA_CMD_START_SMOOTH_ZOOM, CAMERA_CMD_STOP_SMOOTH_ZOOM, and + // CAMERA_MSG_ZOOM in frameworks/base/include/camera/Camera.h. + // Example value: "true". Read only. + static const char KEY_SMOOTH_ZOOM_SUPPORTED[]; + + // The distances (in meters) from the camera to where an object appears to + // be in focus. The object is sharpest at the optimal focus distance. The + // depth of field is the far focus distance minus near focus distance. + // + // Focus distances may change after starting auto focus, canceling auto + // focus, or starting the preview. Applications can read this anytime to get + // the latest focus distances. If the focus mode is FOCUS_MODE_CONTINUOUS, + // focus distances may change from time to time. + // + // This is intended to estimate the distance between the camera and the + // subject. After autofocus, the subject distance may be within near and far + // focus distance. However, the precision depends on the camera hardware, + // autofocus algorithm, the focus area, and the scene. The error can be + // large and it should be only used as a reference. + // + // Far focus distance > optimal focus distance > near focus distance. If + // the far focus distance is infinity, the value should be "Infinity" (case + // sensitive). The format is three float values separated by commas. The + // first is near focus distance. The second is optimal focus distance. The + // third is far focus distance. + // Example value: "0.95,1.9,Infinity" or "0.049,0.05,0.051". Read only. + static const char KEY_FOCUS_DISTANCES[]; + + // The current dimensions in pixels (width x height) for video frames. + // The width and height must be one of the supported sizes retrieved + // via KEY_SUPPORTED_VIDEO_SIZES. + // Example value: "1280x720". Read/write. + static const char KEY_VIDEO_SIZE[]; + // A list of the supported dimensions in pixels (width x height) + // for video frames. See CAMERA_MSG_VIDEO_FRAME for details in + // frameworks/base/include/camera/Camera.h. + // Example: "176x144,1280x720". Read only. + static const char KEY_SUPPORTED_VIDEO_SIZES[]; + + // The maximum number of detected faces supported by hardware face + // detection. If the value is 0, hardware face detection is not supported. + // Example: "5". Read only + static const char KEY_MAX_NUM_DETECTED_FACES_HW[]; + + // The maximum number of detected faces supported by software face + // detection. If the value is 0, software face detection is not supported. + // Example: "5". Read only + static const char KEY_MAX_NUM_DETECTED_FACES_SW[]; + + // Preferred preview frame size in pixels for video recording. + // The width and height must be one of the supported sizes retrieved + // via KEY_SUPPORTED_PREVIEW_SIZES. This key can be used only when + // getSupportedVideoSizes() does not return an empty Vector of Size. + // Camcorder applications are recommended to set the preview size + // to a value that is not larger than the preferred preview size. + // In other words, the product of the width and height of the + // preview size should not be larger than that of the preferred + // preview size. In addition, we recommend to choos a preview size + // that has the same aspect ratio as the resolution of video to be + // recorded. + // Example value: "800x600". Read only. + static const char KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[]; + + // The image format for video frames. See CAMERA_MSG_VIDEO_FRAME in + // frameworks/base/include/camera/Camera.h. + // Example value: "yuv420sp" or PIXEL_FORMAT_XXX constants. Read only. + static const char KEY_VIDEO_FRAME_FORMAT[]; + + // Sets the hint of the recording mode. If this is true, MediaRecorder.start + // may be faster or has less glitches. This should be called before starting + // the preview for the best result. But it is allowed to change the hint + // while the preview is active. The default value is false. + // + // The apps can still call Camera.takePicture when the hint is true. The + // apps can call MediaRecorder.start when the hint is false. But the + // performance may be worse. + // Example value: "true" or "false". Read/write. + static const char KEY_RECORDING_HINT[]; + + // Returns true if video snapshot is supported. That is, applications + // can call Camera.takePicture during recording. Applications do not need to + // call Camera.startPreview after taking a picture. The preview will be + // still active. Other than that, taking a picture during recording is + // identical to taking a picture normally. All settings and methods related + // to takePicture work identically. Ex: KEY_PICTURE_SIZE, + // KEY_SUPPORTED_PICTURE_SIZES, KEY_JPEG_QUALITY, KEY_ROTATION, and etc. + // The picture will have an EXIF header. FLASH_MODE_AUTO and FLASH_MODE_ON + // also still work, but the video will record the flash. + // + // Applications can set shutter callback as null to avoid the shutter + // sound. It is also recommended to set raw picture and post view callbacks + // to null to avoid the interrupt of preview display. + // + // Field-of-view of the recorded video may be different from that of the + // captured pictures. + // Example value: "true" or "false". Read only. + static const char KEY_VIDEO_SNAPSHOT_SUPPORTED[]; + + // The state of the video stabilization. If set to true, both the + // preview stream and the recorded video stream are stabilized by + // the camera. Only valid to set if KEY_VIDEO_STABILIZATION_SUPPORTED is + // set to true. + // + // The value of this key can be changed any time the camera is + // open. If preview or recording is active, it is acceptable for + // there to be a slight video glitch when video stabilization is + // toggled on and off. + // + // This only stabilizes video streams (between-frames stabilization), and + // has no effect on still image capture. + static const char KEY_VIDEO_STABILIZATION[]; + + // Returns true if video stabilization is supported. That is, applications + // can set KEY_VIDEO_STABILIZATION to true and have a stabilized preview + // stream and record stabilized videos. + static const char KEY_VIDEO_STABILIZATION_SUPPORTED[]; + + // Supported modes for special effects with light. + // Example values: "lowlight,hdr". + static const char KEY_LIGHTFX[]; + + // Value for KEY_ZOOM_SUPPORTED or KEY_SMOOTH_ZOOM_SUPPORTED. + static const char TRUE[]; + static const char FALSE[]; + + // Value for KEY_FOCUS_DISTANCES. + static const char FOCUS_DISTANCE_INFINITY[]; + + // Values for white balance settings. + static const char WHITE_BALANCE_AUTO[]; + static const char WHITE_BALANCE_INCANDESCENT[]; + static const char WHITE_BALANCE_FLUORESCENT[]; + static const char WHITE_BALANCE_WARM_FLUORESCENT[]; + static const char WHITE_BALANCE_DAYLIGHT[]; + static const char WHITE_BALANCE_CLOUDY_DAYLIGHT[]; + static const char WHITE_BALANCE_TWILIGHT[]; + static const char WHITE_BALANCE_SHADE[]; + + // Values for effect settings. + static const char EFFECT_NONE[]; + static const char EFFECT_MONO[]; + static const char EFFECT_NEGATIVE[]; + static const char EFFECT_SOLARIZE[]; + static const char EFFECT_SEPIA[]; + static const char EFFECT_POSTERIZE[]; + static const char EFFECT_WHITEBOARD[]; + static const char EFFECT_BLACKBOARD[]; + static const char EFFECT_AQUA[]; + + // Values for antibanding settings. + static const char ANTIBANDING_AUTO[]; + static const char ANTIBANDING_50HZ[]; + static const char ANTIBANDING_60HZ[]; + static const char ANTIBANDING_OFF[]; + + // Values for flash mode settings. + // Flash will not be fired. + static const char FLASH_MODE_OFF[]; + // Flash will be fired automatically when required. The flash may be fired + // during preview, auto-focus, or snapshot depending on the driver. + static const char FLASH_MODE_AUTO[]; + // Flash will always be fired during snapshot. The flash may also be + // fired during preview or auto-focus depending on the driver. + static const char FLASH_MODE_ON[]; + // Flash will be fired in red-eye reduction mode. + static const char FLASH_MODE_RED_EYE[]; + // Constant emission of light during preview, auto-focus and snapshot. + // This can also be used for video recording. + static const char FLASH_MODE_TORCH[]; + + // Values for scene mode settings. + static const char SCENE_MODE_AUTO[]; + static const char SCENE_MODE_ACTION[]; + static const char SCENE_MODE_PORTRAIT[]; + static const char SCENE_MODE_LANDSCAPE[]; + static const char SCENE_MODE_NIGHT[]; + static const char SCENE_MODE_NIGHT_PORTRAIT[]; + static const char SCENE_MODE_THEATRE[]; + static const char SCENE_MODE_BEACH[]; + static const char SCENE_MODE_SNOW[]; + static const char SCENE_MODE_SUNSET[]; + static const char SCENE_MODE_STEADYPHOTO[]; + static const char SCENE_MODE_FIREWORKS[]; + static const char SCENE_MODE_SPORTS[]; + static const char SCENE_MODE_PARTY[]; + static const char SCENE_MODE_CANDLELIGHT[]; + // Applications are looking for a barcode. Camera driver will be optimized + // for barcode reading. + static const char SCENE_MODE_BARCODE[]; + // A high-dynamic range mode. In this mode, the HAL module will use a + // capture strategy that extends the dynamic range of the captured + // image in some fashion. Only the final image is returned. + static const char SCENE_MODE_HDR[]; + + // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT, + // and KEY_VIDEO_FRAME_FORMAT + static const char PIXEL_FORMAT_YUV422SP[]; + static const char PIXEL_FORMAT_YUV420SP[]; // NV21 + static const char PIXEL_FORMAT_YUV422I[]; // YUY2 + static const char PIXEL_FORMAT_YUV420P[]; // YV12 + static const char PIXEL_FORMAT_RGB565[]; + static const char PIXEL_FORMAT_RGBA8888[]; + static const char PIXEL_FORMAT_JPEG[]; + // Raw bayer format used for images, which is 10 bit precision samples + // stored in 16 bit words. The filter pattern is RGGB. + static const char PIXEL_FORMAT_BAYER_RGGB[]; + // Pixel format is not known to the framework + static const char PIXEL_FORMAT_ANDROID_OPAQUE[]; + + // Values for focus mode settings. + // Auto-focus mode. Applications should call + // CameraHardwareInterface.autoFocus to start the focus in this mode. + static const char FOCUS_MODE_AUTO[]; + // Focus is set at infinity. Applications should not call + // CameraHardwareInterface.autoFocus in this mode. + static const char FOCUS_MODE_INFINITY[]; + // Macro (close-up) focus mode. Applications should call + // CameraHardwareInterface.autoFocus to start the focus in this mode. + static const char FOCUS_MODE_MACRO[]; + // Focus is fixed. The camera is always in this mode if the focus is not + // adjustable. If the camera has auto-focus, this mode can fix the + // focus, which is usually at hyperfocal distance. Applications should + // not call CameraHardwareInterface.autoFocus in this mode. + static const char FOCUS_MODE_FIXED[]; + // Extended depth of field (EDOF). Focusing is done digitally and + // continuously. Applications should not call + // CameraHardwareInterface.autoFocus in this mode. + static const char FOCUS_MODE_EDOF[]; + // Continuous auto focus mode intended for video recording. The camera + // continuously tries to focus. This is the best choice for video + // recording because the focus changes smoothly . Applications still can + // call CameraHardwareInterface.takePicture in this mode but the subject may + // not be in focus. Auto focus starts when the parameter is set. + // + // Applications can call CameraHardwareInterface.autoFocus in this mode. The + // focus callback will immediately return with a boolean that indicates + // whether the focus is sharp or not. The focus position is locked after + // autoFocus call. If applications want to resume the continuous focus, + // cancelAutoFocus must be called. Restarting the preview will not resume + // the continuous autofocus. To stop continuous focus, applications should + // change the focus mode to other modes. + static const char FOCUS_MODE_CONTINUOUS_VIDEO[]; + // Continuous auto focus mode intended for taking pictures. The camera + // continuously tries to focus. The speed of focus change is more aggressive + // than FOCUS_MODE_CONTINUOUS_VIDEO. Auto focus starts when the parameter is + // set. + // + // Applications can call CameraHardwareInterface.autoFocus in this mode. If + // the autofocus is in the middle of scanning, the focus callback will + // return when it completes. If the autofocus is not scanning, focus + // callback will immediately return with a boolean that indicates whether + // the focus is sharp or not. The apps can then decide if they want to take + // a picture immediately or to change the focus mode to auto, and run a full + // autofocus cycle. The focus position is locked after autoFocus call. If + // applications want to resume the continuous focus, cancelAutoFocus must be + // called. Restarting the preview will not resume the continuous autofocus. + // To stop continuous focus, applications should change the focus mode to + // other modes. + static const char FOCUS_MODE_CONTINUOUS_PICTURE[]; + + // Values for light special effects + // Low-light enhancement mode + static const char LIGHTFX_LOWLIGHT[]; + // High-dynamic range mode + static const char LIGHTFX_HDR[]; + + /** + * Returns the the supported preview formats as an enum given in graphics.h + * corrsponding to the format given in the input string or -1 if no such + * conversion exists. + */ + static int previewFormatToEnum(const char* format); + +private: + DefaultKeyedVector mMap; +}; + +}; +}; +}; +}; +}; +}; // namespace + +#endif diff --git a/camera/common/1.0/default/include/Exif.h b/camera/common/1.0/default/include/Exif.h new file mode 100644 index 0000000..dc31679 --- /dev/null +++ b/camera/common/1.0/default/include/Exif.h @@ -0,0 +1,256 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_INTERFACES_CAMERA_COMMON_1_0_EXIF_H +#define ANDROID_HARDWARE_INTERFACES_CAMERA_COMMON_1_0_EXIF_H + +#include "CameraMetadata.h" + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + + +// This is based on the original ChromeOS ARC implementation of a V4L2 HAL + +// ExifUtils can generate APP1 segment with tags which caller set. ExifUtils can +// also add a thumbnail in the APP1 segment if thumbnail size is specified. +// ExifUtils can be reused with different images by calling initialize(). +// +// Example of using this class : +// std::unique_ptr utils(ExifUtils::Create()); +// utils->initialize(); +// ... +// // Call ExifUtils functions to set Exif tags. +// ... +// utils->GenerateApp1(thumbnail_buffer, thumbnail_size); +// unsigned int app1Length = utils->GetApp1Length(); +// uint8_t* app1Buffer = new uint8_t[app1Length]; +// memcpy(app1Buffer, utils->GetApp1Buffer(), app1Length); +class ExifUtils { + + public: + virtual ~ExifUtils(); + + static ExifUtils* create(); + + // Initialize() can be called multiple times. The setting of Exif tags will be + // cleared. + virtual bool initialize() = 0; + + // Set all known fields from a metadata structure + virtual bool setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, + const size_t imageHeight) = 0; + + // Sets the len aperture. + // Returns false if memory allocation fails. + virtual bool setAperture(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the value of brightness. + // Returns false if memory allocation fails. + virtual bool setBrightness(int32_t numerator, int32_t denominator) = 0; + + // Sets the color space. + // Returns false if memory allocation fails. + virtual bool setColorSpace(uint16_t color_space) = 0; + + // Sets the information to compressed data. + // Returns false if memory allocation fails. + virtual bool setComponentsConfiguration(const std::string& components_configuration) = 0; + + // Sets the compression scheme used for the image data. + // Returns false if memory allocation fails. + virtual bool setCompression(uint16_t compression) = 0; + + // Sets image contrast. + // Returns false if memory allocation fails. + virtual bool setContrast(uint16_t contrast) = 0; + + // Sets the date and time of image last modified. It takes local time. The + // name of the tag is DateTime in IFD0. + // Returns false if memory allocation fails. + virtual bool setDateTime(const struct tm& t) = 0; + + // Sets the image description. + // Returns false if memory allocation fails. + virtual bool setDescription(const std::string& description) = 0; + + // Sets the digital zoom ratio. If the numerator is 0, it means digital zoom + // was not used. + // Returns false if memory allocation fails. + virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the exposure bias. + // Returns false if memory allocation fails. + virtual bool setExposureBias(int32_t numerator, int32_t denominator) = 0; + + // Sets the exposure mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setExposureMode(uint16_t exposure_mode) = 0; + + // Sets the program used by the camera to set exposure when the picture is + // taken. + // Returns false if memory allocation fails. + virtual bool setExposureProgram(uint16_t exposure_program) = 0; + + // Sets the exposure time, given in seconds. + // Returns false if memory allocation fails. + virtual bool setExposureTime(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the status of flash. + // Returns false if memory allocation fails. + virtual bool setFlash(uint16_t flash) = 0; + + // Sets the F number. + // Returns false if memory allocation fails. + virtual bool setFNumber(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the focal length of lens used to take the image in millimeters. + // Returns false if memory allocation fails. + virtual bool setFocalLength(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the degree of overall image gain adjustment. + // Returns false if memory allocation fails. + virtual bool setGainControl(uint16_t gain_control) = 0; + + // Sets the altitude in meters. + // Returns false if memory allocation fails. + virtual bool setGpsAltitude(double altitude) = 0; + + // Sets the latitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLatitude(double latitude) = 0; + + // Sets the longitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLongitude(double longitude) = 0; + + // Sets GPS processing method. + // Returns false if memory allocation fails. + virtual bool setGpsProcessingMethod(const std::string& method) = 0; + + // Sets GPS date stamp and time stamp (atomic clock). It takes UTC time. + // Returns false if memory allocation fails. + virtual bool setGpsTimestamp(const struct tm& t) = 0; + + // Sets the height (number of rows) of main image. + // Returns false if memory allocation fails. + virtual bool setImageHeight(uint32_t length) = 0; + + // Sets the width (number of columns) of main image. + // Returns false if memory allocation fails. + virtual bool setImageWidth(uint32_t width) = 0; + + // Sets the ISO speed. + // Returns false if memory allocation fails. + virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings) = 0; + + // Sets the kind of light source. + // Returns false if memory allocation fails. + virtual bool setLightSource(uint16_t light_source) = 0; + + // Sets the smallest F number of the lens. + // Returns false if memory allocation fails. + virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the metering mode. + // Returns false if memory allocation fails. + virtual bool setMeteringMode(uint16_t metering_mode) = 0; + + // Sets image orientation. + // Returns false if memory allocation fails. + virtual bool setOrientation(uint16_t orientation) = 0; + + // Sets the unit for measuring XResolution and YResolution. + // Returns false if memory allocation fails. + virtual bool setResolutionUnit(uint16_t resolution_unit) = 0; + + // Sets image saturation. + // Returns false if memory allocation fails. + virtual bool setSaturation(uint16_t saturation) = 0; + + // Sets the type of scene that was shot. + // Returns false if memory allocation fails. + virtual bool setSceneCaptureType(uint16_t type) = 0; + + // Sets image sharpness. + // Returns false if memory allocation fails. + virtual bool setSharpness(uint16_t sharpness) = 0; + + // Sets the shutter speed. + // Returns false if memory allocation fails. + virtual bool setShutterSpeed(int32_t numerator, int32_t denominator) = 0; + + // Sets the distance to the subject, given in meters. + // Returns false if memory allocation fails. + virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the fractions of seconds for the tag. + // Returns false if memory allocation fails. + virtual bool setSubsecTime(const std::string& subsec_time) = 0; + + // Sets the white balance mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setWhiteBalance(uint16_t white_balance) = 0; + + // Sets the number of pixels per resolution unit in the image width. + // Returns false if memory allocation fails. + virtual bool setXResolution(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the position of chrominance components in relation to the luminance + // component. + // Returns false if memory allocation fails. + virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning) = 0; + + // Sets the number of pixels per resolution unit in the image length. + // Returns false if memory allocation fails. + virtual bool setYResolution(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the manufacturer of camera. + // Returns false if memory allocation fails. + virtual bool setMake(const std::string& make) = 0; + + // Sets the model number of camera. + // Returns false if memory allocation fails. + virtual bool setModel(const std::string& model) = 0; + + // Generates APP1 segment. + // Returns false if generating APP1 segment fails. + virtual bool generateApp1(const void* thumbnail_buffer, uint32_t size) = 0; + + // Gets buffer of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual const uint8_t* getApp1Buffer() = 0; + + // Gets length of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual unsigned int getApp1Length() = 0; +}; + + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android + + +#endif // ANDROID_HARDWARE_INTERFACES_CAMERA_COMMON_1_0_EXIF_H diff --git a/camera/common/1.0/default/include/HandleImporter.h b/camera/common/1.0/default/include/HandleImporter.h new file mode 100644 index 0000000..e404439 --- /dev/null +++ b/camera/common/1.0/default/include/HandleImporter.h @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CAMERA_COMMON_1_0_HANDLEIMPORTED_H +#define CAMERA_COMMON_1_0_HANDLEIMPORTED_H + +#include +#include +#include +#include +#include + +using android::hardware::graphics::mapper::V2_0::IMapper; +using android::hardware::graphics::mapper::V2_0::YCbCrLayout; + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +// Borrowed from graphics HAL. Use this until gralloc mapper HAL is working +class HandleImporter { +public: + HandleImporter(); + + // In IComposer, any buffer_handle_t is owned by the caller and we need to + // make a clone for hwcomposer2. We also need to translate empty handle + // to nullptr. This function does that, in-place. + bool importBuffer(buffer_handle_t& handle); + void freeBuffer(buffer_handle_t handle); + bool importFence(const native_handle_t* handle, int& fd) const; + void closeFence(int fd) const; + + // Locks 1-D buffer. Assumes caller has waited for acquire fences. + void* lock(buffer_handle_t& buf, uint64_t cpuUsage, size_t size); + + // Locks 2-D buffer. Assumes caller has waited for acquire fences. + void* lock(buffer_handle_t& buf, uint64_t cpuUsage, const IMapper::Rect& accessRegion); + + // Assumes caller has waited for acquire fences. + YCbCrLayout lockYCbCr(buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion); + + // Query the stride of the first plane in bytes. + status_t getMonoPlanarStrideBytes(buffer_handle_t& buf, uint32_t* stride /*out*/); + + int unlock(buffer_handle_t& buf); // returns release fence + +private: + void initializeLocked(); + void cleanup(); + + template + bool importBufferInternal(const sp mapper, buffer_handle_t& handle); + template + YCbCrLayout lockYCbCrInternal(const sp mapper, buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion); + template + int unlockInternal(const sp mapper, buffer_handle_t& buf); + + Mutex mLock; + bool mInitialized; + sp mMapperV2; + sp mMapperV3; + sp mMapperV4; +}; + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // CAMERA_COMMON_1_0_HANDLEIMPORTED_H diff --git a/camera/common/1.0/default/include/VendorTagDescriptor.h b/camera/common/1.0/default/include/VendorTagDescriptor.h new file mode 100644 index 0000000..0f54db5 --- /dev/null +++ b/camera/common/1.0/default/include/VendorTagDescriptor.h @@ -0,0 +1,242 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CAMERA_COMMON_1_0_VENDORTAGDESCRIPTOR_H +#define CAMERA_COMMON_1_0_VENDORTAGDESCRIPTOR_H + +#include +#include +#include +#include +#include + +#include +#include + +namespace android { +namespace hardware { +namespace camera2 { +namespace params { + +/** + * VendorTagDescriptor objects are containers for the vendor tag + * definitions provided, and are typically used to pass the vendor tag + * information enumerated by the HAL to clients of the camera service. + */ +class VendorTagDescriptor { + public: + virtual ~VendorTagDescriptor(); + + VendorTagDescriptor(); + VendorTagDescriptor(const VendorTagDescriptor& src); + VendorTagDescriptor& operator=(const VendorTagDescriptor& rhs); + + void copyFrom(const VendorTagDescriptor& src); + + /** + * The following 'get*' methods implement the corresponding + * functions defined in + * system/media/camera/include/system/camera_vendor_tags.h + */ + + // Returns the number of vendor tags defined. + int getTagCount() const; + + // Returns an array containing the id's of vendor tags defined. + void getTagArray(uint32_t* tagArray) const; + + // Returns the section name string for a given vendor tag id. + const char* getSectionName(uint32_t tag) const; + + // Returns the index in section vectors returned in getAllSectionNames() + // for a given vendor tag id. -1 if input tag does not exist. + ssize_t getSectionIndex(uint32_t tag) const; + + // Returns the tag name string for a given vendor tag id. + const char* getTagName(uint32_t tag) const; + + // Returns the tag type for a given vendor tag id. + int getTagType(uint32_t tag) const; + + /** + * Convenience method to get a vector containing all vendor tag + * sections, or an empty vector if none are defined. + * The pointer is valid for the lifetime of the VendorTagDescriptor, + * or until copyFrom is invoked. + */ + const SortedVector* getAllSectionNames() const; + + /** + * Lookup the tag id for a given tag name and section. + * + * Returns OK on success, or a negative error code. + */ + status_t lookupTag(const String8& name, const String8& section, /*out*/uint32_t* tag) const; + + /** + * Dump the currently configured vendor tags to a file descriptor. + */ + void dump(int fd, int verbosity, int indentation) const; + + protected: + KeyedVector*> mReverseMapping; + KeyedVector mTagToNameMap; + KeyedVector mTagToSectionMap; // Value is offset in mSections + + std::unordered_map mTagToTypeMap; + SortedVector mSections; + // must be int32_t to be compatible with Parcel::writeInt32 + int32_t mTagCount; + + vendor_tag_ops mVendorOps; +}; +} /* namespace params */ +} /* namespace camera2 */ + +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +/** + * This version of VendorTagDescriptor must be stored in Android sp<>, and adds support for using it + * as a global tag descriptor. + * + * It's a child class of the basic hardware::camera2::params::VendorTagDescriptor since basic + * Parcelable objects cannot require being kept in an sp<> and still work with auto-generated AIDL + * interface implementations. + */ +class VendorTagDescriptor : + public ::android::hardware::camera2::params::VendorTagDescriptor, + public LightRefBase { + + public: + + /** + * Create a VendorTagDescriptor object from the given vendor_tag_ops_t + * struct. + * + * Returns OK on success, or a negative error code. + */ + static status_t createDescriptorFromOps(const vendor_tag_ops_t* vOps, + /*out*/ + sp& descriptor); + + /** + * Sets the global vendor tag descriptor to use for this process. + * Camera metadata operations that access vendor tags will use the + * vendor tag definitions set this way. + * + * Returns OK on success, or a negative error code. + */ + static status_t setAsGlobalVendorTagDescriptor(const sp& desc); + + /** + * Returns the global vendor tag descriptor used by this process. + * This will contain NULL if no vendor tags are defined. + */ + static sp getGlobalVendorTagDescriptor(); + + /** + * Clears the global vendor tag descriptor used by this process. + */ + static void clearGlobalVendorTagDescriptor(); + +}; + +} /* namespace helper */ +} /* namespace V1_0 */ +} /* namespace common */ +} /* namespace camera */ + +namespace camera2 { +namespace params { + +class VendorTagDescriptorCache { + public: + typedef android::hardware::camera::common::V1_0::helper::VendorTagDescriptor + VendorTagDescriptor; + VendorTagDescriptorCache(){}; + int32_t addVendorDescriptor(metadata_vendor_id_t id, sp desc); + + int32_t getVendorTagDescriptor(metadata_vendor_id_t id, sp* desc /*out*/); + + // Returns the number of vendor tags defined. + int getTagCount(metadata_vendor_id_t id) const; + + // Returns an array containing the id's of vendor tags defined. + void getTagArray(uint32_t* tagArray, metadata_vendor_id_t id) const; + + // Returns the section name string for a given vendor tag id. + const char* getSectionName(uint32_t tag, metadata_vendor_id_t id) const; + + // Returns the tag name string for a given vendor tag id. + const char* getTagName(uint32_t tag, metadata_vendor_id_t id) const; + + // Returns the tag type for a given vendor tag id. + int getTagType(uint32_t tag, metadata_vendor_id_t id) const; + + /** + * Dump the currently configured vendor tags to a file descriptor. + */ + void dump(int fd, int verbosity, int indentation) const; + + protected: + std::unordered_map> mVendorMap; + struct vendor_tag_cache_ops mVendorCacheOps; +}; + +} /* namespace params */ +} /* namespace camera2 */ + +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + +class VendorTagDescriptorCache + : public ::android::hardware::camera2::params::VendorTagDescriptorCache, + public LightRefBase { + public: + /** + * Sets the global vendor tag descriptor cache to use for this process. + * Camera metadata operations that access vendor tags will use the + * vendor tag definitions set this way. + * + * Returns OK on success, or a negative error code. + */ + static status_t setAsGlobalVendorTagCache(const sp& cache); + + /** + * Returns the global vendor tag cache used by this process. + * This will contain NULL if no vendor tags are defined. + */ + static sp getGlobalVendorTagCache(); + + /** + * Clears the global vendor tag cache used by this process. + */ + static void clearGlobalVendorTagCache(); +}; + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android + +#endif /* CAMERA_COMMON_1_0_VENDORTAGDESCRIPTOR_H */ diff --git a/camera/common/1.0/types.hal b/camera/common/1.0/types.hal new file mode 100644 index 0000000..0393107 --- /dev/null +++ b/camera/common/1.0/types.hal @@ -0,0 +1,413 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.common@1.0; + +/** + * Common enumeration and structure definitions for all HALs under + * android.hardware.camera + */ + +/** + * Status codes for camera HAL method calls. + * + */ +enum Status : uint32_t { + /** + * Method call succeeded + */ + OK = 0, + + /** + * One of the arguments to the method call is invalid. For example, + * the camera ID is unknown. + */ + ILLEGAL_ARGUMENT = 1, + + /** + * The specified camera device is already in use + */ + CAMERA_IN_USE = 2, + + /** + * The HAL cannot support more simultaneous cameras in use. + */ + MAX_CAMERAS_IN_USE = 3, + + /** + * This HAL does not support this method. + */ + METHOD_NOT_SUPPORTED = 4, + + /** + * The specified camera device does not support this operation. + */ + OPERATION_NOT_SUPPORTED = 5, + + /** + * This camera device is no longer connected or otherwise available for use + */ + CAMERA_DISCONNECTED = 6, + + /** + * The HAL has encountered an internal error and cannot complete the + * request. + */ + INTERNAL_ERROR = 7 +}; + +/** + * Possible states that the flash unit on a closed camera device can be set to + * via the ICameraProvider::setTorchMode() method. + */ +enum TorchMode : uint32_t { + OFF = 0, // Turn off the flash + ON = 1 // Turn on the flash to torch mode +}; + +/** + * Camera metadata type - duplicated from VNDK libcamera_metadata for vendor tag + * definitions. + */ +enum CameraMetadataType : uint32_t { + // Unsigned 8-bit integer (uint8_t) + BYTE = 0, + // Signed 32-bit integer (int32_t) + INT32 = 1, + // 32-bit float (float) + FLOAT = 2, + // Signed 64-bit integer (int64_t) + INT64 = 3, + // 64-bit float (double) + DOUBLE = 4, + // A 64-bit fraction (camera_metadata_rational_t) + RATIONAL = 5 +}; + +/** + * A single vendor-unique metadata tag. + * The full name of the tag is . + */ +struct VendorTag { + uint32_t tagId; // Tag identifier, must be >= TagBoundaryId::VENDOR + string tagName; // Name of tag, not including section name + CameraMetadataType tagType; +}; + +/** + * A set of related vendor tags. + */ +struct VendorTagSection { + string sectionName; // Section name; must be namespaced within vendor's name + vec tags; // List of tags in this section +}; + +enum TagBoundaryId : uint32_t { + AOSP = 0x0, // First valid tag id for android-defined tags + VENDOR = 0x80000000u // First valid tag id for vendor extension tags +}; + +/** + * CameraDeviceStatus + * + * The current status of a camera device, as sent by a camera provider HAL + * through the ICameraProviderCallback::cameraDeviceStatusChange() call. + * + * At startup, the camera service must assume all internal camera devices listed + * by ICameraProvider::getCameraIdList() are in the PRESENT state. The provider + * must invoke ICameraProviderCallback::cameraDeviceStatusChange to inform the + * service of any initially NOT_PRESENT internal devices, and of any PRESENT + * external camera devices, as soon as the camera service has called + * ICameraProvider::setCallback(). + * + * Allowed state transitions: + * PRESENT -> NOT_PRESENT + * NOT_PRESENT -> ENUMERATING + * NOT_PRESENT -> PRESENT + * ENUMERATING -> PRESENT + * ENUMERATING -> NOT_PRESENT + */ +enum CameraDeviceStatus : uint32_t { + /** + * The camera device is not currently connected, and trying to reference it + * in provider method calls must return status code ILLEGAL_ARGUMENT. + * + */ + NOT_PRESENT = 0, + + /** + * The camera device is connected, and opening it is possible, as long as + * sufficient resources are available. + * + * By default, the framework must assume all devices returned by + * ICameraProvider::getCameraIdList() are in this state. + */ + PRESENT = 1, + + /** + * The camera device is connected, but it is undergoing enumeration and + * startup, and so opening the device must return CAMERA_IN_USE. + * + * Attempting to call ICameraProvider::getCameraCharacteristics() must + * succeed, however. + */ + ENUMERATING = 2, + +}; + +/** + * TorchModeStatus: + * + * The current status of the torch mode on a given camera device, sent by a + * camera provider HAL via the ICameraProviderCallback::TorchModeStatusChange() + * call. + * + * The torch mode status of a camera device is applicable only when the camera + * device is present. The camera service must not call + * ICameraProvider::setTorchMode() to turn on torch mode of a camera device if + * the camera device is not present. At camera service startup time, the + * framework must assume torch modes are in the AVAILABLE_OFF state if the + * camera device is present and the camera characteristics entry + * android.flash.info.available is reported as true via + * ICameraProvider::getCameraCharacteristics() call. The same is assumed for + * external camera devices when they are initially connected. + * + * The camera service requires the following behaviors from the camera provider + * HAL when a camera device's status changes: + * + * 1. A previously-disconnected camera device becomes connected. After + * ICameraProviderCallback::CameraDeviceStatusChange() is invoked to inform + * the camera service that the camera device is present, the framework must + * assume the camera device's torch mode is in AVAILABLE_OFF state if it + * has a flash unit. The camera provider HAL does not need to invoke + * ICameraProviderCallback::TorchModeStatusChange() unless the flash unit + * is unavailable to use by ICameraProvider::setTorchMode(). + * + * 2. A previously-connected camera becomes disconnected. After + * ICameraProviderCallback::CameraDeviceStatusChange() is invoked to inform + * the camera service that the camera device is not present, the framework + * must not call ICameraProvider::setTorchMode() for the disconnected camera + * device until it is connected again. The camera provider HAL does not + * need to invoke ICameraProviderCallback::TorchModeStatusChange() + * separately to inform that the flash unit has become NOT_AVAILABLE. + * + * 3. openCameraDevice() or openCameraDeviceVersion() is called to open a + * camera device. The camera provider HAL must invoke + * ICameraProviderCallback::TorchModeStatusChange() for all flash units + * that have entered NOT_AVAILABLE state and can not be turned on by + * calling ICameraProvider::setTorchMode() due to this open() call. + * openCameraDevice() must not trigger AVAILABLE_OFF before NOT_AVAILABLE + * for all flash units that have become unavailable. + * + * 4. ICameraDevice.close() is called to close a camera device. The camera + * provider HAL must call ICameraProviderCallback::torchModeStatusChange() + * for all flash units that have now entered the AVAILABLE_OFF state and + * can be turned on by calling ICameraProvider::setTorchMode() again because + * of sufficient new camera resources being freed up by this close() call. + * + * Note that the camera service calling ICameraProvider::setTorchMode() + * successfully must trigger AVAILABLE_OFF or AVAILABLE_ON callback for the + * given camera device. Additionally it must trigger AVAILABLE_OFF callbacks + * for other previously-on torch modes if HAL cannot keep multiple devices' + * flashlights on simultaneously. + */ +enum TorchModeStatus : uint32_t { + /** + * The flash unit is no longer available and the torch mode can not be + * turned on by calling setTorchMode(). If the torch mode was AVAILABLE_ON, + * the flashlight must be turned off by the provider HAL before the provider + * HAL calls torchModeStatusChange(). + */ + NOT_AVAILABLE = 0, + + /** + * A torch mode has become off and is available to be turned on via + * ICameraProvider::setTorchMode(). This may happen in the following + * cases: + * 1. After the resources to turn on the torch mode have become available. + * 2. After ICameraProvider::setTorchMode() is called to turn off the torch + * mode. + * 3. After the camera service turned on the torch mode for some other + * camera device and the provider HAL had to turn off the torch modes + * of other camera device(s) that were previously on, due to lack of + * resources to keep them all on. + */ + AVAILABLE_OFF = 1, + + /** + * A torch mode has become on and is available to be turned off via + * ICameraProvider::setTorchMode(). This can happen only after + * ICameraProvider::setTorchMode() has been called to turn on the torch mode. + */ + AVAILABLE_ON = 2, + +}; + +/** + * CameraResourceCost: + * + * Structure defining the abstract resource cost of opening a camera device, + * and any usage conflicts between multiple camera devices. + * + * Obtainable via ICameraDevice::getResourceCost() + */ +struct CameraResourceCost { + /** + * The total resource "cost" of using this camera, represented as an integer + * value in the range [0, 100] where 100 represents total usage of the + * shared resource that is the limiting bottleneck of the camera subsystem. + * This may be a very rough estimate, and is used as a hint to the camera + * service to determine when to disallow multiple applications from + * simultaneously opening different cameras advertised by the camera + * service. + * + * The camera service must be able to simultaneously open and use any + * combination of camera devices exposed by the HAL where the sum of + * the resource costs of these cameras is <= 100. For determining cost, + * each camera device must be assumed to be configured and operating at + * the maximally resource-consuming framerate and stream size settings + * available in the configuration settings exposed for that device through + * the camera metadata. + * + * The camera service may still attempt to simultaneously open combinations + * of camera devices with a total resource cost > 100. This may succeed or + * fail. If this succeeds, combinations of configurations that are not + * supported due to resource constraints from having multiple open devices + * must fail during the configure calls. If the total resource cost is <= + * 100, open and configure must never fail for any stream configuration + * settings or other device capabilities that would normally succeed for a + * device when it is the only open camera device. + * + * This field may be used to determine whether background applications are + * allowed to use this camera device while other applications are using + * other camera devices. Note: multiple applications must never be allowed + * by the camera service to simultaneously open the same camera device. + * + * Example use cases: + * + * Ex. 1: Camera Device 0 = Back Camera + * Camera Device 1 = Front Camera + * - Using both camera devices causes a large framerate slowdown due to + * limited ISP bandwidth. + * + * Configuration: + * + * Camera Device 0 - resourceCost = 51 + * conflicting_devices = empty + * Camera Device 1 - resourceCost = 51 + * conflicting_devices = empty + * + * Result: + * + * Since the sum of the resource costs is > 100, if a higher-priority + * application has either device open, no lower-priority applications must + * be allowed by the camera service to open either device. If a + * lower-priority application is using a device that a higher-priority + * subsequently attempts to open, the lower-priority application must be + * forced to disconnect the the device. + * + * If the highest-priority application chooses, it may still attempt to + * open both devices (since these devices are not listed as conflicting in + * the conflicting_devices fields), but usage of these devices may fail in + * the open or configure calls. + * + * Ex. 2: Camera Device 0 = Left Back Camera + * Camera Device 1 = Right Back Camera + * Camera Device 2 = Combined stereo camera using both right and left + * back camera sensors used by devices 0, and 1 + * Camera Device 3 = Front Camera + * - Due to do hardware constraints, up to two cameras may be open at + * once. The combined stereo camera may never be used at the same time + * as either of the two back camera devices (device 0, 1), and typically + * requires too much bandwidth to use at the same time as the front + * camera (device 3). + * + * Configuration: + * + * Camera Device 0 - resourceCost = 50 + * conflicting_devices = { 2 } + * Camera Device 1 - resourceCost = 50 + * conflicting_devices = { 2 } + * Camera Device 2 - resourceCost = 100 + * conflicting_devices = { 0, 1 } + * Camera Device 3 - resourceCost = 50 + * conflicting_devices = empty + * + * Result: + * + * Based on the conflicting_devices fields, the camera service guarantees + * that the following sets of open devices must never be allowed: { 1, 2 + * }, { 0, 2 }. + * + * Based on the resourceCost fields, if a high-priority foreground + * application is using camera device 0, a background application would be + * allowed to open camera device 1 or 3 (but would be forced to disconnect + * it again if the foreground application opened another device). + * + * The highest priority application may still attempt to simultaneously + * open devices 0, 2, and 3, but the HAL may fail in open or configure + * calls for this combination. + * + * Ex. 3: Camera Device 0 = Back Camera + * Camera Device 1 = Front Camera + * Camera Device 2 = Low-power Front Camera that uses the same sensor + * as device 1, but only exposes image stream + * resolutions that can be used in low-power mode + * - Using both front cameras (device 1, 2) at the same time is impossible + * due a shared physical sensor. Using the back and "high-power" front + * camera (device 1) may be impossible for some stream configurations due + * to hardware limitations, but the "low-power" front camera option may + * always be used as it has special dedicated hardware. + * + * Configuration: + * + * Camera Device 0 - resourceCost = 100 + * conflicting_devices = empty + * Camera Device 1 - resourceCost = 100 + * conflicting_devices = { 2 } + * Camera Device 2 - resourceCost = 0 + * conflicting_devices = { 1 } + * Result: + * + * Based on the conflicting_devices fields, the camera service guarantees + * that the following sets of open devices must never be allowed: + * { 1, 2 }. + * + * Based on the resourceCost fields, only the highest priority application + * may attempt to open both device 0 and 1 at the same time. If a + * higher-priority application is not using device 1 or 2, a low-priority + * background application may open device 2 (but must be forced to + * disconnect it if a higher-priority application subsequently opens + * device 1 or 2). + */ + uint32_t resourceCost; + + /** + * An array of camera device IDs indicating other devices that cannot be + * simultaneously opened while this camera device is in use. + * + * This field is intended to be used to indicate that this camera device + * is a composite of several other camera devices, or otherwise has + * hardware dependencies that prohibit simultaneous usage. If there are no + * dependencies, an empty list may be returned to indicate this. + * + * The camera service must never simultaneously open any of the devices + * in this list while this camera device is open. + * + */ + vec conflictingDevices; + +}; diff --git a/camera/common/README.md b/camera/common/README.md new file mode 100644 index 0000000..c177ad8 --- /dev/null +++ b/camera/common/README.md @@ -0,0 +1,21 @@ +## Camera common HAL definitions ## +--- + +## Overview: ## + +The camera.common namesapce is used by the Android camera HALs for common +enumeration and structure definitions. + +This includes standard status codes returned by most camera HAL methods. + +More complete information about the Android camera HAL and subsystem can be found at +[source.android.com](http://source.android.com/devices/camera/index.html). + +## Version history: ## + +## types.hal: ## + +### @1.0: + +Common enum and struct definitions for all camera HAL interfaces. Does not +define any interfaces of its own. diff --git a/camera/device/1.0/ICameraDevice.hal b/camera/device/1.0/ICameraDevice.hal new file mode 100644 index 0000000..d4aa8cc --- /dev/null +++ b/camera/device/1.0/ICameraDevice.hal @@ -0,0 +1,419 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@1.0; + +import android.hardware.camera.common@1.0::types; +import ICameraDeviceCallback; +import ICameraDevicePreviewCallback; + +/** + * Camera device HAL, legacy version + * + * DEPRECATED. New devices are strongly recommended to use Camera HAL v3.2 or + * newer. + * + * Supports the android.hardware.Camera API, and the android.hardware.camera2 + * API in LEGACY mode only. + * + * Will be removed in the Android P release. + */ +interface ICameraDevice { + + /** + * Get camera device resource cost information. + * + * This method may be called at any time, including before open() + * + * @return status Status code for the operation, one of: + * OK: + * On success. + * INTERNAL_ERROR: + * An unexpected internal camera HAL error occurred, and the + * resource cost is not available. + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * @return resourceCost + * The resources required to open this camera device, or unspecified + * values if status is not OK. + */ + getResourceCost() generates (Status status, CameraResourceCost resourceCost); + + /** + * Get basic camera information. + * + * This method may be called at any time, including before open() + * + * @return status Status code for the operation, one of: + * OK: + * On success. + * INTERNAL_ERROR: + * An unexpected internal camera HAL error occurred, and the + * camera information is not available. + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * @return info Basic information about this camera device, or unspecified + * values if status is not OK. + */ + getCameraInfo() generates (Status status, CameraInfo info); + + /** + * setTorchMode: + * + * Turn on or off the torch mode of the flash unit associated with a given + * camera ID. If the operation is successful, HAL must notify the framework + * torch state by invoking + * ICameraProviderCallback::torchModeStatusChange() with the new state. + * + * The camera device has a higher priority accessing the flash unit. When + * there are any resource conflicts, such as when open() is called to fully + * activate a camera device, the provider must notify the framework through + * ICameraProviderCallback::torchModeStatusChange() that the torch mode has + * been turned off and the torch mode state has become + * TORCH_MODE_STATUS_NOT_AVAILABLE. When resources to turn on torch mode + * become available again, the provider must notify the framework through + * ICameraProviderCallback::torchModeStatusChange() that the torch mode + * state has become TORCH_MODE_STATUS_AVAILABLE_OFF for set_torch_mode() to + * be called. + * + * When the framework calls setTorchMode() to turn on the torch mode of a + * flash unit, if HAL cannot keep multiple torch modes on simultaneously, + * HAL must turn off the torch mode that was turned on by + * a previous setTorchMode() call and notify the framework that the torch + * mode state of that flash unit has become TORCH_MODE_STATUS_AVAILABLE_OFF. + * + * @param torchMode The new mode to set the device flash unit to. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful change to the torch state. + * INTERNAL_ERROR: + * The flash unit cannot be operated due to an unexpected internal + * error. + * ILLEGAL_ARGUMENT: + * The camera ID is unknown. + * CAMERA_IN_USE: + * This camera device has been opened, so the torch cannot be + * controlled until it is closed. + * MAX_CAMERAS_IN_USE: + * Due to other camera devices being open, or due to other + * resource constraints, the torch cannot be controlled currently. + * METHOD_NOT_SUPPORTED: + * This provider does not support direct operation of flashlight + * torch mode. The framework must open the camera device and turn + * the torch on through the device interface. + * OPERATION_NOT_SUPPORTED: + * This camera device does not have a flash unit. This must + * be returned if and only if parameter key flash-mode-values is not present. + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * + */ + setTorchMode(TorchMode mode) generates (Status status); + + /** + * Dump state of the camera hardware. + * + * This must be callable at any time, whether the device is open or not. + * + * @param fd A native handle with one valid file descriptor. The descriptor + * must be able to be used with dprintf() or equivalent to dump the + * state of this camera device into the camera service dumpsys output. + * + * @return status The status code for this operation. + */ + dumpState(handle fd) generates (Status status); + + /** + * Open the camera device for active use. + * + * All methods besides getResourceCost(), getCameraInfo(), setTorchMode(), + * and dump() must not be called unless open() has been called successfully, + * and close() has not yet been called. + * + * @param callback Interface to invoke by the HAL for device callbacks. + * @return status Status code for the operation, one of: + * OK: + * On a successful open of the camera device. + * INTERNAL_ERROR: + * The camera device cannot be opened due to an internal + * error. + * ILLEGAL_ARGUMENT: + * The callback handle is invalid (for example, it is null). + * CAMERA_IN_USE: + * This camera device is already open. + * MAX_CAMERAS_IN_USE: + * The maximal number of camera devices that can be + * opened concurrently were opened already. + * CAMERA_DISCONNECTED: + * This external camera device has been disconnected, and is no + * longer available. This interface is now stale, and a new instance + * must be acquired if the device is reconnected. All subsequent + * calls on this interface must return CAMERA_DISCONNECTED. + */ + open(ICameraDeviceCallback callback) generates (Status status); + + + /***** + * All methods below this point must only be called between a successful + * open() call and a close() call. + */ + + /** Set the callback interface through which preview frames are sent */ + setPreviewWindow(ICameraDevicePreviewCallback window) + generates (Status status); + + /** + * Enable a message, or set of messages. + * + * @param msgType The bitfield of messages to enable. + */ + enableMsgType(FrameCallbackFlags msgType); + + /** + * Disable a message, or a set of messages. + * + * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera + * HAL must not rely on its client to call releaseRecordingFrame() to + * release video recording frames sent out by the cameral HAL before and + * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL + * clients must not modify/access any video recording frame after calling + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). + * + * @param msgType The bitfield of messages to disable. + */ + disableMsgType(FrameCallbackFlags msgType); + + /** + * Query whether a message, or a set of messages, is enabled. Note that + * this is operates as an AND, if any of the messages queried are off, this + * must return false. + * + * @param msgType The bitfield of messages to query. + * @return enabled Whether all the specified flags are enabled. + */ + msgTypeEnabled(FrameCallbackFlags msgType) generates (bool enabled); + + /** + * Start preview mode. + * + * @return status The status code for this operation. + */ + startPreview() generates (Status status); + + /** + * Stop a previously started preview. + */ + stopPreview(); + + /** + * Returns true if preview is enabled. + * + * @return enabled Whether preview is currently enabled. + */ + previewEnabled() generates (bool enabled); + + /** + * Request the camera HAL to store meta data or real YUV data in the video + * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If + * it is not called, the default camera HAL behavior is to store real YUV + * data in the video buffers. + * + * This method must be called before startRecording() in order to be + * effective. + * + * If meta data is stored in the video buffers, it is up to the receiver of + * the video buffers to interpret the contents and to find the actual frame + * data with the help of the meta data in the buffer. How this is done is + * outside of the scope of this method. + * + * Some camera HALs may not support storing meta data in the video buffers, + * but all camera HALs must support storing real YUV data in the video + * buffers. If the camera HAL does not support storing the meta data in the + * video buffers when it is requested to do do, INVALID_OPERATION must be + * returned. It is very useful for the camera HAL to pass meta data rather + * than the actual frame data directly to the video encoder, since the + * amount of the uncompressed frame data can be very large if video size is + * large. + * + * @param enable Set to true to instruct the camera HAL to store meta data + * in the video buffers; false to instruct the camera HAL to store real + * YUV data in the video buffers. + * + * @return status OK on success. + */ + storeMetaDataInBuffers(bool enable) generates (Status status); + + /** + * Start record mode. + * + * When a record image is available, a CAMERA_MSG_VIDEO_FRAME message is + * sent with the corresponding frame. Every record frame must be released by + * a camera HAL client via releaseRecordingFrame() before the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames, + * and the client must not modify/access any video recording frames. + * + * @return status The status code for the operation. + */ + startRecording() generates (Status status); + + /** + * Stop a previously started recording. + */ + stopRecording(); + + /** + * Returns true if recording is enabled. + * + * @return enabled True if recording is currently active. + */ + recordingEnabled() generates (bool enabled); + + /** + * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME in + * dataCallbackTimestamp. + * + * It is camera HAL client's responsibility to release video recording + * frames sent out by the camera HAL before the camera HAL receives a call + * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames. + * + * @param memId The memory buffer to release a recording frame from. + * @param bufferIndex The specific buffer index to return to the HAL. + */ + releaseRecordingFrame(MemoryId memId, uint32_t bufferIndex); + + /** + * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME in + * handleCallbackTimestamp. + * + * It is camera HAL client's responsibility to release video recording + * frames sent out by the camera HAL before the camera HAL receives a call + * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames. + * + * @param memId The memory buffer to release a recording frame from. + * @param bufferIndex The specific buffer index to return to the HAL. + * @param frame The handle for a released video frame + */ + releaseRecordingFrameHandle(MemoryId memId, uint32_t bufferIndex, handle frame); + + /** + * Release a batch of record frames previously returned by CAMERA_MSG_VIDEO_FRAME + * in handleCallbackTimestampBatch. + * + * It is camera HAL client's responsibility to release video recording + * frames sent out by the camera HAL before the camera HAL receives a call + * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames. + * + * @param batch A batch of recording frames to be released by camera HAL. + */ + releaseRecordingFrameHandleBatch(vec batch); + + /** + * Start auto focus. + * + * The notification callback routine is called with + * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() can be + * called again after that if another auto focus is needed. + * + * @return status The status code for this operation. + */ + autoFocus() generates (Status status); + + /** + * Cancels auto-focus function. + * + * If the auto-focus is still in progress, this function must cancel + * it. Whether the auto-focus is in progress or not, this function must + * return the focus position to the default. If the camera does not support + * auto-focus, this is a no-op. + * + * @return status The status code for this operation. + */ + cancelAutoFocus() generates (Status status); + + /** + * Take a picture. + * + * @return status The status code for this operation. + */ + takePicture() generates (Status status); + + /** + * Cancel a picture that was started with takePicture. Calling this method + * when no picture is being taken is a no-op. + * + * @return status The status code for this operation. + */ + cancelPicture() generates (Status status); + + /** + * Set the camera parameters. + * + * @param params The parameter string, consisting of + * '=; ...;='. + * @return status The status code for this operation: + * OK: Parameter update was successful + * ILLEGAL_ARGUMENT: At least one parameter was invalid or not supported + * + */ + setParameters(string params) generates (Status status); + + /** + * Retrieve the camera parameters. + */ + getParameters() generates (string parms); + + /** + * Send command to camera driver. + * The meaning of the arguments is defined by the value of cmd, documented + * in the CommandType definition. + * + * @param cmd The command to invoke. + * @param arg1 The first argument for the command, if needed. + * @param arg2 The second argument for the command, if needed. + * + * @return status The status code for this operation. + */ + sendCommand(CommandType cmd, int32_t arg1, int32_t arg2) + generates (Status status); + + /** + * Release the hardware resources owned by this object, shutting down the + * camera device. + */ + close(); + +}; diff --git a/camera/device/1.0/ICameraDeviceCallback.hal b/camera/device/1.0/ICameraDeviceCallback.hal new file mode 100644 index 0000000..4e2e719 --- /dev/null +++ b/camera/device/1.0/ICameraDeviceCallback.hal @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@1.0; + +interface ICameraDeviceCallback { + + /** + * Notify the camera service of a particular event occurring + * The meaning of each parameter is defined by the value of msgType, and + * documented in the definition of NotifyCallbackMsg. + * + * @param msgType The type of the event. + * @param ext1 The first parameter for the event, if needed. + * @param ext2 The second parameter for the event, if needed. + */ + notifyCallback(NotifyCallbackMsg msgType, int32_t ext1, int32_t ext2); + + /** + * Define a memory buffer from the provided handle and size, and return a + * unique identifier for the HAL to use to reference it with. + * + * @param descriptor A native handle that must have exactly one file + * descriptor in it; the file descriptor must be memory mappable to + * bufferSize * bufferCount bytes. + * @param bufferSize The number of bytes a single buffer consists of. + * @param bufferCount The number of contiguous buffers that the descriptor + * contains. + * + * @return memId A positive integer identifier for this memory buffer, for + * use with data callbacks and unregistering memory. 0 must be returned + * in case of error, such as if the descriptor does not contain exactly + * one FD. + */ + registerMemory(handle descriptor, uint32_t bufferSize, uint32_t bufferCount) + generates (MemoryId memId); + + /** + * Unregister a previously registered memory buffer + */ + unregisterMemory(MemoryId memId); + + /** + * Send a buffer of image data to the camera service + * + * @param msgType The kind of image buffer data this call represents. + * @param data A memory handle to the buffer containing the data. + * @param bufferIndex The offset into the memory handle where the buffer + * starts. + * + */ + dataCallback(DataCallbackMsg msgType, MemoryId data, uint32_t bufferIndex, + CameraFrameMetadata metadata); + + /** + * Send a buffer of image data to the camera service, with a timestamp + * + * @param msgType The kind of image buffer data this call represents. + * @param data A memory handle to the buffer containing the data. + * @param bufferIndex The offset into the memory handle where the buffer + * starts. + * @param timestamp The time this buffer was captured by the camera, in + * nanoseconds. + * + */ + dataCallbackTimestamp(DataCallbackMsg msgType, MemoryId data, uint32_t bufferIndex, + int64_t timestamp); + + /** + * Send a buffer of image data to the camera service, with a timestamp + * + * @param msgType The kind of image buffer data this call represents. + * @param handle The handle of image buffer data this call represents. + * @param data A memory handle to the buffer containing the data. + * @param bufferIndex The offset into the memory handle where the buffer + * starts. + * @param timestamp The time this buffer was captured by the camera, in + * nanoseconds. + * + */ + handleCallbackTimestamp(DataCallbackMsg msgType, handle frameData, MemoryId data, + uint32_t bufferIndex, int64_t timestamp); + + /** + * Send a batch of image data buffer to the camera service, with timestamps + * + * This callback can be used to send multiple frames to camera framework in one callback, which + * reduce number of callbacks in performance intensive use cases, such as high speed video + * recording. The HAL must not mix use of this method with handleCallbackTimestamp in one + * recording session (between startRecording and stopRecording) + * + * @param msgType The kind of image buffer data this call represents. + * @param batch a vector messages. Each message contains a image buffer and a timestamp. The + * messages must be ordered in time from lower index to higher index, so that timestamp of + * i-th message is always smaller than i+1-th message. + * + */ + handleCallbackTimestampBatch(DataCallbackMsg msgType, vec batch); + +}; diff --git a/camera/device/1.0/ICameraDevicePreviewCallback.hal b/camera/device/1.0/ICameraDevicePreviewCallback.hal new file mode 100644 index 0000000..5421981 --- /dev/null +++ b/camera/device/1.0/ICameraDevicePreviewCallback.hal @@ -0,0 +1,120 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@1.0; + +import android.hardware.camera.common@1.0::types; +import android.hardware.graphics.common@1.0::types; + +/** + * Camera device HAL@1.0 preview stream operation interface. + */ +interface ICameraDevicePreviewCallback { + + /** + * Acquire a buffer to write a preview buffer into. + * + * @return status The status code for this operation. If not OK, then + * buffer and stride must not be used. + * @return bufferId A unique ID for the returned buffer. + * @return buffer A handle to the buffer to write into. Must be non-null if the bufferId has not + * been seen by HAL before. Must be null if the bufferId is seen before. HAL must keep track + * of the bufferId to actual buffer handle mapping. + * @return stride The stride between two rows of pixels in this buffer. + */ + dequeueBuffer() generates (Status status, uint64_t bufferId, handle buffer, uint32_t stride); + + /** + * Send a filled preview buffer to its consumer. + * + * @param bufferId The bufferId of the preview buffer + * @return status The status code for this operation. + */ + enqueueBuffer(uint64_t bufferId) generates (Status status); + + /** + * Return a preview buffer unfilled. This buffer must not be sent on to the + * preview consumer as a valid buffer, but may be reused as if it were + * empty. + * + * @param bufferId The bufferId of the preview buffer + * @return status The status code for this operation. + */ + cancelBuffer(uint64_t bufferId) generates (Status status); + + /** + * Set the number of preview buffers needed by the HAL. + * + * @param count The maximum number of preview buffers to allocate. + * @return status The status code for this operation. + */ + setBufferCount(uint32_t count) generates (Status status); + + /** + * Set the dimensions and format of future preview buffers. + * + * The next buffer that is dequeued must match the requested size and + * format. + * + * @return Status The status code for this operation. + */ + setBuffersGeometry(uint32_t w, uint32_t h, + android.hardware.graphics.common@1.0::PixelFormat format) + generates (Status status); + + /** + * Set the valid region of image data for the next buffer(s) to be enqueued. + * + * @return Status The status code for this operation. + */ + setCrop(int32_t left, int32_t top, int32_t right, int32_t bottom) + generates (Status status); + + /** + * Set the producer usage flags for the next buffer(s) to be enqueued. + * + * @return Status The status code for this operation. + */ + setUsage(BufferUsage usage) generates (Status status); + + /** + * Set the expected buffering mode for the preview output. + */ + setSwapInterval(int32_t interval) generates (Status status); + + /** + * Get the minimum number of buffers the preview consumer endpoint needs + * to hold for correct operation. + * + * @return Status The status code for this operation. + * @return count The number of buffers the consumer has requested. + */ + getMinUndequeuedBufferCount() generates (Status status, uint32_t count); + + /** + * Set the timestamp for the next buffer to enqueue + * + * Timestamps are measured in nanoseconds, and must be comparable + * and monotonically increasing between two frames in the same + * preview stream. They do not need to be comparable between + * consecutive or parallel preview streams, cameras, or app runs. + * + * @param timestamp The timestamp to set for future buffers. + * @return Status The status code for this operation. + */ + setTimestamp(int64_t timestamp) generates (Status status); + +}; diff --git a/camera/device/1.0/default/Android.bp b/camera/device/1.0/default/Android.bp new file mode 100644 index 0000000..63d154b --- /dev/null +++ b/camera/device/1.0/default/Android.bp @@ -0,0 +1,34 @@ +cc_library_shared { + name: "vendor.camera.device@1.0-impl", + defaults: ["hidl_defaults"], + proprietary: true, + srcs: [ + "CameraDevice.cpp", + ], + shared_libs: [ + "libhidlbase", + "libhidlmemory", + "libutils", + "android.hardware.camera.device@1.0", + "android.hardware.camera.common@1.0", + "android.hardware.graphics.allocator@2.0", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "android.hardware.graphics.common@1.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "libcutils", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "media_plugin_headers", + ], + export_include_dirs: ["."], +} diff --git a/camera/device/1.0/default/CameraDevice.cpp b/camera/device/1.0/default/CameraDevice.cpp new file mode 100644 index 0000000..80733d1 --- /dev/null +++ b/camera/device/1.0/default/CameraDevice.cpp @@ -0,0 +1,1027 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDev@1.0-impl" + +#include + +#include +#include +#include +#include +#include + +#include // For VideoNativeHandleMetadata +#include "CameraDevice_1_0.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V1_0 { +namespace implementation { + +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::PixelFormat; + +HandleImporter CameraDevice::sHandleImporter; + +Status CameraDevice::getHidlStatus(const int& status) { + switch (status) { + case 0: return Status::OK; + case -ENOSYS: return Status::OPERATION_NOT_SUPPORTED; + case -EBUSY : return Status::CAMERA_IN_USE; + case -EUSERS: return Status::MAX_CAMERAS_IN_USE; + case -ENODEV: return Status::INTERNAL_ERROR; + case -EINVAL: return Status::ILLEGAL_ARGUMENT; + default: + ALOGE("%s: unknown HAL status code %d", __FUNCTION__, status); + return Status::INTERNAL_ERROR; + } +} + +status_t CameraDevice::getStatusT(const Status& s) { + switch(s) { + case Status::OK: + return OK; + case Status::ILLEGAL_ARGUMENT: + return BAD_VALUE; + case Status::CAMERA_IN_USE: + return -EBUSY; + case Status::MAX_CAMERAS_IN_USE: + return -EUSERS; + case Status::METHOD_NOT_SUPPORTED: + return UNKNOWN_TRANSACTION; + case Status::OPERATION_NOT_SUPPORTED: + return INVALID_OPERATION; + case Status::CAMERA_DISCONNECTED: + return DEAD_OBJECT; + case Status::INTERNAL_ERROR: + return INVALID_OPERATION; + } + ALOGW("Unexpected HAL status code %d", s); + return INVALID_OPERATION; +} + +Status CameraDevice::initStatus() const { + Mutex::Autolock _l(mLock); + Status status = Status::OK; + if (mInitFail) { + status = Status::INTERNAL_ERROR; + } else if (mDisconnected) { + status = Status::CAMERA_DISCONNECTED; + } + return status; +} + +CameraDevice::CameraDevice( + sp module, const std::string& cameraId, + const SortedVector>& cameraDeviceNames) : + mModule(module), + mCameraId(cameraId), + mDisconnected(false), + mCameraDeviceNames(cameraDeviceNames) { + mCameraIdInt = atoi(mCameraId.c_str()); + // Should not reach here as provider also validate ID + if (mCameraIdInt < 0 || mCameraIdInt >= module->getNumberOfCameras()) { + ALOGE("%s: Invalid camera id: %s", __FUNCTION__, mCameraId.c_str()); + mInitFail = true; + } + + mDeviceVersion = mModule->getDeviceVersion(mCameraIdInt); + if (mDeviceVersion != CAMERA_DEVICE_API_VERSION_1_0 && !mModule->isOpenLegacyDefined()) { + ALOGI("%s: Camera id %s does not support HAL1.0", + __FUNCTION__, mCameraId.c_str()); + mInitFail = true; + } + + mAshmemAllocator = IAllocator::getService("ashmem"); + if (mAshmemAllocator == nullptr) { + ALOGI("%s: cannot get ashmemAllocator", __FUNCTION__); + mInitFail = true; + } +} + +CameraDevice::~CameraDevice() { + Mutex::Autolock _l(mLock); + if (mDevice != nullptr) { + ALOGW("%s: camera %s is deleted while open", __FUNCTION__, mCameraId.c_str()); + closeLocked(); + } + mHalPreviewWindow.cleanUpCirculatingBuffers(); +} + + +void CameraDevice::setConnectionStatus(bool connected) { + Mutex::Autolock _l(mLock); + mDisconnected = !connected; + if (mDevice == nullptr) { + return; + } + if (!connected) { + ALOGW("%s: camera %s is disconneted. Closing", __FUNCTION__, mCameraId.c_str()); + closeLocked(); + } + return; +} + +void CameraDevice::CameraPreviewWindow::cleanUpCirculatingBuffers() { + Mutex::Autolock _l(mLock); + for (auto pair : mCirculatingBuffers) { + sHandleImporter.freeBuffer(pair.second); + } + mCirculatingBuffers.clear(); + mBufferIdMap.clear(); +} + +int CameraDevice::sDequeueBuffer(struct preview_stream_ops* w, + buffer_handle_t** buffer, int *stride) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + + if (buffer == nullptr || stride == nullptr) { + ALOGE("%s: buffer (%p) and stride (%p) must not be null!", __FUNCTION__, buffer, stride); + return BAD_VALUE; + } + + Status s; + object->mPreviewCallback->dequeueBuffer( + [&](auto status, uint64_t bufferId, const auto& buf, uint32_t strd) { + s = status; + if (s == Status::OK) { + Mutex::Autolock _l(object->mLock); + if (object->mCirculatingBuffers.count(bufferId) == 0) { + buffer_handle_t importedBuf = buf.getNativeHandle(); + sHandleImporter.importBuffer(importedBuf); + if (importedBuf == nullptr) { + ALOGE("%s: preview buffer import failed!", __FUNCTION__); + s = Status::INTERNAL_ERROR; + return; + } else { + object->mCirculatingBuffers[bufferId] = importedBuf; + object->mBufferIdMap[&(object->mCirculatingBuffers[bufferId])] = bufferId; + } + } + *buffer = &(object->mCirculatingBuffers[bufferId]); + *stride = strd; + } + }); + return getStatusT(s); +} + +int CameraDevice::sLockBuffer(struct preview_stream_ops*, buffer_handle_t*) { + return 0; +} + +int CameraDevice::sEnqueueBuffer(struct preview_stream_ops* w, buffer_handle_t* buffer) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + uint64_t bufferId = object->mBufferIdMap.at(buffer); + return getStatusT(object->mPreviewCallback->enqueueBuffer(bufferId)); +} + +int CameraDevice::sCancelBuffer(struct preview_stream_ops* w, buffer_handle_t* buffer) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + uint64_t bufferId = object->mBufferIdMap.at(buffer); + return getStatusT(object->mPreviewCallback->cancelBuffer(bufferId)); +} + +int CameraDevice::sSetBufferCount(struct preview_stream_ops* w, int count) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + + object->cleanUpCirculatingBuffers(); + return getStatusT(object->mPreviewCallback->setBufferCount(count)); +} + +int CameraDevice::sSetBuffersGeometry(struct preview_stream_ops* w, + int width, int height, int format) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + + object->cleanUpCirculatingBuffers(); + return getStatusT( + object->mPreviewCallback->setBuffersGeometry(width, height, (PixelFormat) format)); +} + +int CameraDevice::sSetCrop(struct preview_stream_ops *w, + int left, int top, int right, int bottom) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + + return getStatusT(object->mPreviewCallback->setCrop(left, top, right, bottom)); +} + +int CameraDevice::sSetTimestamp(struct preview_stream_ops *w, int64_t timestamp) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + + return getStatusT(object->mPreviewCallback->setTimestamp(timestamp)); +} + +int CameraDevice::sSetUsage(struct preview_stream_ops* w, int usage) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + + object->cleanUpCirculatingBuffers(); + return getStatusT(object->mPreviewCallback->setUsage((BufferUsage)usage)); +} + +int CameraDevice::sSetSwapInterval(struct preview_stream_ops *w, int interval) { + CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + + return getStatusT(object->mPreviewCallback->setSwapInterval(interval)); +} + +int CameraDevice::sGetMinUndequeuedBufferCount( + const struct preview_stream_ops *w, + int *count) { + const CameraPreviewWindow* object = static_cast(w); + if (object->mPreviewCallback == nullptr) { + ALOGE("%s: camera HAL calling preview ops while there is no preview window!", __FUNCTION__); + return INVALID_OPERATION; + } + if (count == nullptr) { + ALOGE("%s: count is null!", __FUNCTION__); + return BAD_VALUE; + } + + Status s; + object->mPreviewCallback->getMinUndequeuedBufferCount( + [&](auto status, uint32_t cnt) { + s = status; + if (s == Status::OK) { + *count = cnt; + } + }); + return getStatusT(s); +} + +CameraDevice::CameraHeapMemory::CameraHeapMemory( + int fd, size_t buf_size, uint_t num_buffers) : + mBufSize(buf_size), + mNumBufs(num_buffers) { + mHidlHandle = native_handle_create(1,0); + mHidlHandle->data[0] = fcntl(fd, F_DUPFD_CLOEXEC, 0); + const size_t pagesize = getpagesize(); + size_t size = ((buf_size * num_buffers + pagesize-1) & ~(pagesize-1)); + mHidlHeap = hidl_memory("ashmem", mHidlHandle, size); + commonInitialization(); +} + +CameraDevice::CameraHeapMemory::CameraHeapMemory( + sp ashmemAllocator, + size_t buf_size, uint_t num_buffers) : + mBufSize(buf_size), + mNumBufs(num_buffers) { + const size_t pagesize = getpagesize(); + size_t size = ((buf_size * num_buffers + pagesize-1) & ~(pagesize-1)); + ashmemAllocator->allocate(size, + [&](bool success, const hidl_memory& mem) { + if (!success) { + ALOGE("%s: allocating ashmem of %zu bytes failed!", + __FUNCTION__, buf_size * num_buffers); + return; + } + mHidlHandle = native_handle_clone(mem.handle()); + mHidlHeap = hidl_memory("ashmem", mHidlHandle, size); + }); + + commonInitialization(); +} + +void CameraDevice::CameraHeapMemory::commonInitialization() { + mHidlHeapMemory = mapMemory(mHidlHeap); + if (mHidlHeapMemory == nullptr) { + ALOGE("%s: memory map failed!", __FUNCTION__); + native_handle_close(mHidlHandle); // close FD for the shared memory + native_handle_delete(mHidlHandle); + mHidlHeap = hidl_memory(); + mHidlHandle = nullptr; + return; + } + mHidlHeapMemData = mHidlHeapMemory->getPointer(); + handle.data = mHidlHeapMemData; + handle.size = mBufSize * mNumBufs; + handle.handle = this; + handle.release = sPutMemory; +} + +CameraDevice::CameraHeapMemory::~CameraHeapMemory() { + if (mHidlHeapMemory != nullptr) { + mHidlHeapMemData = nullptr; + mHidlHeapMemory.clear(); // The destructor will trigger munmap + } + + if (mHidlHandle) { + native_handle_close(mHidlHandle); // close FD for the shared memory + native_handle_delete(mHidlHandle); + } +} + +// shared memory methods +camera_memory_t* CameraDevice::sGetMemory(int fd, size_t buf_size, uint_t num_bufs, void *user) { + ALOGV("%s", __FUNCTION__); + CameraDevice* object = static_cast(user); + if (object->mDeviceCallback == nullptr) { + ALOGE("%s: camera HAL request memory while camera is not opened!", __FUNCTION__); + return nullptr; + } + + CameraHeapMemory* mem; + if (fd < 0) { + mem = new CameraHeapMemory(object->mAshmemAllocator, buf_size, num_bufs); + } else { + mem = new CameraHeapMemory(fd, buf_size, num_bufs); + } + mem->incStrong(mem); + hidl_handle hidlHandle = mem->mHidlHandle; + MemoryId id = object->mDeviceCallback->registerMemory(hidlHandle, buf_size, num_bufs); + mem->handle.mId = id; + + { + Mutex::Autolock _l(object->mMemoryMapLock); + if (object->mMemoryMap.count(id) != 0) { + ALOGE("%s: duplicate MemoryId %d returned by client!", __FUNCTION__, id); + } + object->mMemoryMap[id] = mem; + } + mem->handle.mDevice = object; + return &mem->handle; +} + +void CameraDevice::sPutMemory(camera_memory_t *data) { + if (!data) + return; + + CameraHeapMemory* mem = static_cast(data->handle); + CameraDevice* device = mem->handle.mDevice; + if (device == nullptr) { + ALOGE("%s: camera HAL return memory for a null device!", __FUNCTION__); + return; + } + if (device->mDeviceCallback == nullptr) { + ALOGE("%s: camera HAL return memory while camera is not opened!", __FUNCTION__); + return; + } + device->mDeviceCallback->unregisterMemory(mem->handle.mId); + { + Mutex::Autolock _l(device->mMemoryMapLock); + device->mMemoryMap.erase(mem->handle.mId); + } + mem->decStrong(mem); +} + +// Callback forwarding methods +void CameraDevice::sNotifyCb(int32_t msg_type, int32_t ext1, int32_t ext2, void *user) { + ALOGV("%s", __FUNCTION__); + CameraDevice* object = static_cast(user); + if (object->mDeviceCallback != nullptr) { + object->mDeviceCallback->notifyCallback((NotifyCallbackMsg) msg_type, ext1, ext2); + } +} + +void CameraDevice::sDataCb(int32_t msg_type, const camera_memory_t *data, unsigned int index, + camera_frame_metadata_t *metadata, void *user) { + ALOGV("%s", __FUNCTION__); + CameraDevice* object = static_cast(user); + sp mem(static_cast(data->handle)); + if (index >= mem->mNumBufs) { + ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, + index, mem->mNumBufs); + return; + } + if (object->mDeviceCallback != nullptr) { + CameraFrameMetadata hidlMetadata; + if (metadata) { + hidlMetadata.faces.resize(metadata->number_of_faces); + for (size_t i = 0; i < hidlMetadata.faces.size(); i++) { + hidlMetadata.faces[i].score = metadata->faces[i].score; + hidlMetadata.faces[i].id = metadata->faces[i].id; + for (int k = 0; k < 4; k++) { + hidlMetadata.faces[i].rect[k] = metadata->faces[i].rect[k]; + } + for (int k = 0; k < 2; k++) { + hidlMetadata.faces[i].leftEye[k] = metadata->faces[i].left_eye[k]; + } + for (int k = 0; k < 2; k++) { + hidlMetadata.faces[i].rightEye[k] = metadata->faces[i].right_eye[k]; + } + for (int k = 0; k < 2; k++) { + hidlMetadata.faces[i].mouth[k] = metadata->faces[i].mouth[k]; + } + } + } + CameraHeapMemory* mem = static_cast(data->handle); + object->mDeviceCallback->dataCallback( + (DataCallbackMsg) msg_type, mem->handle.mId, index, hidlMetadata); + } +} + +void CameraDevice::handleCallbackTimestamp( + nsecs_t timestamp, int32_t msg_type, + MemoryId memId , unsigned index, native_handle_t* handle) { + uint32_t batchSize = 0; + { + Mutex::Autolock _l(mBatchLock); + batchSize = mBatchSize; + } + + if (batchSize == 0) { // non-batch mode + mDeviceCallback->handleCallbackTimestamp( + (DataCallbackMsg) msg_type, handle, memId, index, timestamp); + } else { // batch mode + Mutex::Autolock _l(mBatchLock); + size_t inflightSize = mInflightBatch.size(); + if (inflightSize == 0) { + mBatchMsgType = msg_type; + } else if (mBatchMsgType != msg_type) { + ALOGE("%s: msg_type change (from %d to %d) is not supported!", + __FUNCTION__, mBatchMsgType, msg_type); + return; + } + mInflightBatch.push_back({handle, memId, index, timestamp}); + + // Send batched frames to camera framework + if (mInflightBatch.size() >= batchSize) { + mDeviceCallback->handleCallbackTimestampBatch( + (DataCallbackMsg) mBatchMsgType, mInflightBatch); + mInflightBatch.clear(); + } + } +} + +void CameraDevice::sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type, + const camera_memory_t *data, unsigned index, void *user) { + ALOGV("%s", __FUNCTION__); + CameraDevice* object = static_cast(user); + // Start refcounting the heap object from here on. When the clients + // drop all references, it will be destroyed (as well as the enclosed + // MemoryHeapBase. + sp mem(static_cast(data->handle)); + if (index >= mem->mNumBufs) { + ALOGE("%s: invalid buffer index %d, max allowed is %d", __FUNCTION__, + index, mem->mNumBufs); + return; + } + + native_handle_t* handle = nullptr; + if (object->mMetadataMode) { + if (mem->mBufSize == sizeof(VideoNativeHandleMetadata)) { + VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) + ((uint8_t*) mem->mHidlHeapMemData + index * mem->mBufSize); + if (md->eType == kMetadataBufferTypeNativeHandleSource) { + handle = md->pHandle; + } + } + } + + if (object->mDeviceCallback != nullptr) { + if (handle == nullptr) { + object->mDeviceCallback->dataCallbackTimestamp( + (DataCallbackMsg) msg_type, mem->handle.mId, index, timestamp); + } else { + object->handleCallbackTimestamp(timestamp, msg_type, mem->handle.mId, index, handle); + } + } +} + +void CameraDevice::initHalPreviewWindow() +{ + mHalPreviewWindow.cancel_buffer = sCancelBuffer; + mHalPreviewWindow.lock_buffer = sLockBuffer; + mHalPreviewWindow.dequeue_buffer = sDequeueBuffer; + mHalPreviewWindow.enqueue_buffer = sEnqueueBuffer; + mHalPreviewWindow.set_buffer_count = sSetBufferCount; + mHalPreviewWindow.set_buffers_geometry = sSetBuffersGeometry; + mHalPreviewWindow.set_crop = sSetCrop; + mHalPreviewWindow.set_timestamp = sSetTimestamp; + mHalPreviewWindow.set_usage = sSetUsage; + mHalPreviewWindow.set_swap_interval = sSetSwapInterval; + + mHalPreviewWindow.get_min_undequeued_buffer_count = + sGetMinUndequeuedBufferCount; +} + +// Methods from ::android::hardware::camera::device::V1_0::ICameraDevice follow. +Return CameraDevice::getResourceCost(getResourceCost_cb _hidl_cb) { + Status status = initStatus(); + CameraResourceCost resCost; + if (status == Status::OK) { + int cost = 100; + std::vector conflicting_devices; + struct camera_info info; + + // If using post-2.4 module version, query the cost + conflicting devices from the HAL + if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) { + int ret = mModule->getCameraInfo(mCameraIdInt, &info); + if (ret == OK) { + cost = info.resource_cost; + for (size_t i = 0; i < info.conflicting_devices_length; i++) { + std::string cameraId(info.conflicting_devices[i]); + for (const auto& pair : mCameraDeviceNames) { + if (cameraId == pair.first) { + conflicting_devices.push_back(pair.second); + } + } + } + } else { + status = Status::INTERNAL_ERROR; + } + } + + if (status == Status::OK) { + resCost.resourceCost = cost; + resCost.conflictingDevices.resize(conflicting_devices.size()); + for (size_t i = 0; i < conflicting_devices.size(); i++) { + resCost.conflictingDevices[i] = conflicting_devices[i]; + ALOGV("CamDevice %s is conflicting with camDevice %s", + mCameraId.c_str(), resCost.conflictingDevices[i].c_str()); + } + } + } + _hidl_cb(status, resCost); + return Void(); +} + +Return CameraDevice::getCameraInfo(getCameraInfo_cb _hidl_cb) { + Status status = initStatus(); + CameraInfo cameraInfo; + if (status == Status::OK) { + struct camera_info info; + int ret = mModule->getCameraInfo(mCameraIdInt, &info); + if (ret == OK) { + cameraInfo.facing = (CameraFacing) info.facing; + // Device 1.0 does not support external camera facing. + // The closest approximation would be front camera. + if (cameraInfo.facing == CameraFacing::EXTERNAL) { + cameraInfo.facing = CameraFacing::FRONT; + } + cameraInfo.orientation = info.orientation; + } else { + ALOGE("%s: get camera info failed!", __FUNCTION__); + status = Status::INTERNAL_ERROR; + } + } + _hidl_cb(status, cameraInfo); + return Void(); +} + +Return CameraDevice::setTorchMode(TorchMode mode) { + if (!mModule->isSetTorchModeSupported()) { + return Status::METHOD_NOT_SUPPORTED; + } + + Status status = initStatus(); + if (status == Status::OK) { + bool enable = (mode == TorchMode::ON) ? true : false; + status = getHidlStatus(mModule->setTorchMode(mCameraId.c_str(), enable)); + } + return status; +} + +Return CameraDevice::dumpState(const hidl_handle& handle) { + Mutex::Autolock _l(mLock); + if (handle.getNativeHandle() == nullptr) { + ALOGE("%s: handle must not be null", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + if (handle->numFds != 1 || handle->numInts != 0) { + ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", + __FUNCTION__, handle->numFds, handle->numInts); + return Status::ILLEGAL_ARGUMENT; + } + int fd = handle->data[0]; + + if (mDevice != nullptr) { + if (mDevice->ops->dump) { // It's fine if the HAL doesn't implement dump() + return getHidlStatus(mDevice->ops->dump(mDevice, fd)); + } + } + return Status::OK; +} + +Return CameraDevice::open(const sp& callback) { + ALOGI("Opening camera %s", mCameraId.c_str()); + Mutex::Autolock _l(mLock); + + camera_info info; + status_t res = mModule->getCameraInfo(mCameraIdInt, &info); + if (res != OK) { + ALOGE("Could not get camera info: %s: %d", mCameraId.c_str(), res); + return getHidlStatus(res); + } + + int rc = OK; + if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_3 && + info.device_version > CAMERA_DEVICE_API_VERSION_1_0) { + // Open higher version camera device as HAL1.0 device. + rc = mModule->openLegacy(mCameraId.c_str(), + CAMERA_DEVICE_API_VERSION_1_0, + (hw_device_t **)&mDevice); + } else { + rc = mModule->open(mCameraId.c_str(), (hw_device_t **)&mDevice); + } + if (rc != OK) { + mDevice = nullptr; + ALOGE("Could not open camera %s: %d", mCameraId.c_str(), rc); + return getHidlStatus(rc); + } + + initHalPreviewWindow(); + mDeviceCallback = callback; + + if (mDevice->ops->set_callbacks) { + mDevice->ops->set_callbacks(mDevice, + sNotifyCb, sDataCb, sDataCbTimestamp, sGetMemory, this); + } + + return getHidlStatus(rc); +} + +Return CameraDevice::setPreviewWindow(const sp& window) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + + mHalPreviewWindow.mPreviewCallback = window; + if (mDevice->ops->set_preview_window) { + return getHidlStatus(mDevice->ops->set_preview_window(mDevice, + (window == nullptr) ? nullptr : &mHalPreviewWindow)); + } + return Status::INTERNAL_ERROR; // HAL should provide set_preview_window +} + +Return CameraDevice::enableMsgType(uint32_t msgType) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Void(); + } + if (mDevice->ops->enable_msg_type) { + mDevice->ops->enable_msg_type(mDevice, msgType); + } + return Void(); +} + +Return CameraDevice::disableMsgType(uint32_t msgType) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Void(); + } + if (mDevice->ops->disable_msg_type) { + mDevice->ops->disable_msg_type(mDevice, msgType); + } + return Void(); +} + +Return CameraDevice::msgTypeEnabled(uint32_t msgType) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return false; + } + if (mDevice->ops->msg_type_enabled) { + return mDevice->ops->msg_type_enabled(mDevice, msgType); + } + return false; +} + +Return CameraDevice::startPreview() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->start_preview) { + return getHidlStatus(mDevice->ops->start_preview(mDevice)); + } + return Status::INTERNAL_ERROR; // HAL should provide start_preview +} + +Return CameraDevice::stopPreview() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Void(); + } + if (mDevice->ops->stop_preview) { + mDevice->ops->stop_preview(mDevice); + } + return Void(); +} + +Return CameraDevice::previewEnabled() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return false; + } + if (mDevice->ops->preview_enabled) { + return mDevice->ops->preview_enabled(mDevice); + } + return false; +} + +Return CameraDevice::storeMetaDataInBuffers(bool enable) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->store_meta_data_in_buffers) { + status_t s = mDevice->ops->store_meta_data_in_buffers(mDevice, enable); + if (s == OK && enable) { + mMetadataMode = true; + } + return getHidlStatus(s); + } + return enable ? Status::ILLEGAL_ARGUMENT : Status::OK; +} + +Return CameraDevice::startRecording() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->start_recording) { + return getHidlStatus(mDevice->ops->start_recording(mDevice)); + } + return Status::ILLEGAL_ARGUMENT; +} + +Return CameraDevice::stopRecording() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Void(); + } + if (mDevice->ops->stop_recording) { + mDevice->ops->stop_recording(mDevice); + } + return Void(); +} + +Return CameraDevice::recordingEnabled() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return false; + } + if (mDevice->ops->recording_enabled) { + return mDevice->ops->recording_enabled(mDevice); + } + return false; +} + +void CameraDevice::releaseRecordingFrameLocked( + uint32_t memId, uint32_t bufferIndex, const native_handle_t* handle) { + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return; + } + if (mDevice->ops->release_recording_frame) { + CameraHeapMemory* camMemory; + { + Mutex::Autolock _l(mMemoryMapLock); + auto it = mMemoryMap.find(memId); + if (it == mMemoryMap.end() || it->second == nullptr) { + ALOGE("%s unknown memoryId %d", __FUNCTION__, memId); + return; + } + camMemory = it->second; + } + if (bufferIndex >= camMemory->mNumBufs) { + ALOGE("%s: bufferIndex %d exceeds number of buffers %d", + __FUNCTION__, bufferIndex, camMemory->mNumBufs); + return; + } + void *data = ((uint8_t *) camMemory->mHidlHeapMemData) + bufferIndex * camMemory->mBufSize; + if (handle) { + VideoNativeHandleMetadata* md = (VideoNativeHandleMetadata*) data; + if (md->eType == kMetadataBufferTypeNativeHandleSource) { + // Input handle will be closed by HIDL transport later, so clone it + // HAL implementation is responsible to close/delete the clone + native_handle_t* clone = native_handle_clone(handle); + if (!clone) { + ALOGE("%s: failed to clone buffer %p", __FUNCTION__, handle); + return; + } + md->pHandle = clone; + } else { + ALOGE("%s:Malform VideoNativeHandleMetadata at memId %d, bufferId %d", + __FUNCTION__, memId, bufferIndex); + return; + } + } + mDevice->ops->release_recording_frame(mDevice, data); + } +} + +Return CameraDevice::releaseRecordingFrame(uint32_t memId, uint32_t bufferIndex) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + releaseRecordingFrameLocked(memId, bufferIndex, nullptr); + return Void(); +} + +Return CameraDevice::releaseRecordingFrameHandle( + uint32_t memId, uint32_t bufferIndex, const hidl_handle& frame) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + releaseRecordingFrameLocked( + memId, bufferIndex, frame.getNativeHandle()); + return Void(); +} + +Return CameraDevice::releaseRecordingFrameHandleBatch( + const hidl_vec& msgs) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + for (auto& msg : msgs) { + releaseRecordingFrameLocked( + msg.data, msg.bufferIndex, msg.frameData.getNativeHandle()); + } + return Void(); +} + +Return CameraDevice::autoFocus() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->auto_focus) { + return getHidlStatus(mDevice->ops->auto_focus(mDevice)); + } + return Status::ILLEGAL_ARGUMENT; +} + +Return CameraDevice::cancelAutoFocus() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->cancel_auto_focus) { + return getHidlStatus(mDevice->ops->cancel_auto_focus(mDevice)); + } + return Status::ILLEGAL_ARGUMENT; +} + +Return CameraDevice::takePicture() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->take_picture) { + return getHidlStatus(mDevice->ops->take_picture(mDevice)); + } + return Status::ILLEGAL_ARGUMENT; +} + +Return CameraDevice::cancelPicture() { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->cancel_picture) { + return getHidlStatus(mDevice->ops->cancel_picture(mDevice)); + } + return Status::ILLEGAL_ARGUMENT; +} + +Return CameraDevice::setParameters(const hidl_string& params) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->set_parameters) { + return getHidlStatus(mDevice->ops->set_parameters(mDevice, params.c_str())); + } + return Status::ILLEGAL_ARGUMENT; +} + +Return CameraDevice::getParameters(getParameters_cb _hidl_cb) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + hidl_string outStr; + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + _hidl_cb(outStr); + return Void(); + } + if (mDevice->ops->get_parameters) { + char *temp = mDevice->ops->get_parameters(mDevice); + outStr = temp; + if (mDevice->ops->put_parameters) { + mDevice->ops->put_parameters(mDevice, temp); + } else { + free(temp); + } + } + _hidl_cb(outStr); + return Void(); +} + +Return CameraDevice::sendCommand(CommandType cmd, int32_t arg1, int32_t arg2) { + ALOGV("%s(%s)", __FUNCTION__, mCameraId.c_str()); + Mutex::Autolock _l(mLock); + if (!mDevice) { + ALOGE("%s called while camera is not opened", __FUNCTION__); + return Status::OPERATION_NOT_SUPPORTED; + } + if (mDevice->ops->send_command) { + return getHidlStatus(mDevice->ops->send_command(mDevice, (int32_t) cmd, arg1, arg2)); + } + return Status::ILLEGAL_ARGUMENT; +} + +Return CameraDevice::close() { + Mutex::Autolock _l(mLock); + closeLocked(); + return Void(); +} + +void CameraDevice::closeLocked() { + ALOGI("Closing camera %s", mCameraId.c_str()); + if(mDevice) { + int rc = mDevice->common.close(&mDevice->common); + if (rc != OK) { + ALOGE("Could not close camera %s: %d", mCameraId.c_str(), rc); + } + mDevice = nullptr; + } +} + +} // namespace implementation +} // namespace V1_0 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/1.0/default/CameraDevice_1_0.h b/camera/device/1.0/default/CameraDevice_1_0.h new file mode 100644 index 0000000..2c980f0 --- /dev/null +++ b/camera/device/1.0/default/CameraDevice_1_0.h @@ -0,0 +1,237 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V1_0_CAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V1_0_CAMERADEVICE_H + +#include +#include "utils/Mutex.h" +#include "utils/SortedVector.h" +#include "CameraModule.h" +#include "HandleImporter.h" + +#include +#include +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V1_0 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::CameraResourceCost; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::device::V1_0::CameraInfo; +using ::android::hardware::camera::device::V1_0::CommandType; +using ::android::hardware::camera::device::V1_0::ICameraDevice; +using ::android::hardware::camera::device::V1_0::ICameraDeviceCallback; +using ::android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback; +using ::android::hardware::camera::device::V1_0::MemoryId; +using ::android::hidl::allocator::V1_0::IAllocator; +using ::android::hidl::base::V1_0::IBase; +using ::android::hidl::memory::V1_0::IMemory; +using ::android::hardware::hidl_array; +using ::android::hardware::hidl_memory; +using ::android::hardware::hidl_string; +using ::android::hardware::hidl_vec; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::sp; + +struct CameraDevice : public ICameraDevice { + + // Called by provider HAL. Provider HAL must ensure the uniqueness of + // CameraDevice object per cameraId, or there could be multiple CameraDevice + // trying to access the same physical camera. + // Also, provider will have to keep track of all CameraDevice objects in + // order to notify CameraDevice when the underlying camera is detached + CameraDevice(sp module, + const std::string& cameraId, + const SortedVector>& cameraDeviceNames); + ~CameraDevice(); + + // Caller must use this method to check if CameraDevice ctor failed + bool isInitFailed() { return mInitFail; } + // Used by provider HAL to signal external camera disconnected + void setConnectionStatus(bool connected); + + // Methods from ::android::hardware::camera::device::V1_0::ICameraDevice follow. + Return getResourceCost(getResourceCost_cb _hidl_cb) override; + Return getCameraInfo(getCameraInfo_cb _hidl_cb) override; + Return setTorchMode(TorchMode mode) override; + Return dumpState(const hidl_handle& fd) override; + Return open(const sp& callback) override; + Return setPreviewWindow(const sp& window) override; + Return enableMsgType(uint32_t msgType) override; + Return disableMsgType(uint32_t msgType) override; + Return msgTypeEnabled(uint32_t msgType) override; + Return startPreview() override; + Return stopPreview() override; + Return previewEnabled() override; + Return storeMetaDataInBuffers(bool enable) override; + Return startRecording() override; + Return stopRecording() override; + Return recordingEnabled() override; + Return releaseRecordingFrame(uint32_t memId, uint32_t bufferIndex) override; + Return releaseRecordingFrameHandle( + uint32_t memId, uint32_t bufferIndex, const hidl_handle& frame) override; + Return releaseRecordingFrameHandleBatch( + const hidl_vec&) override; + Return autoFocus() override; + Return cancelAutoFocus() override; + Return takePicture() override; + Return cancelPicture() override; + Return setParameters(const hidl_string& params) override; + Return getParameters(getParameters_cb _hidl_cb) override; + Return sendCommand(CommandType cmd, int32_t arg1, int32_t arg2) override; + Return close() override; + +private: + struct CameraMemory : public camera_memory_t { + MemoryId mId; + CameraDevice* mDevice; + }; + + class CameraHeapMemory : public RefBase { + public: + CameraHeapMemory(int fd, size_t buf_size, uint_t num_buffers = 1); + explicit CameraHeapMemory( + sp ashmemAllocator, size_t buf_size, uint_t num_buffers = 1); + void commonInitialization(); + virtual ~CameraHeapMemory(); + + size_t mBufSize; + uint_t mNumBufs; + + // Shared memory related members + hidl_memory mHidlHeap; + native_handle_t* mHidlHandle; // contains one shared memory FD + void* mHidlHeapMemData; + sp mHidlHeapMemory; // munmap happens in ~IMemory() + + CameraMemory handle; + }; + sp mAshmemAllocator; + + const sp mModule; + const std::string mCameraId; + // const after ctor + int mCameraIdInt; + int mDeviceVersion; + + camera_device_t* mDevice = nullptr; + + void initHalPreviewWindow(); + struct CameraPreviewWindow : public preview_stream_ops { + // Called when we expect buffer will be re-allocated + void cleanUpCirculatingBuffers(); + + Mutex mLock; + sp mPreviewCallback = nullptr; + std::unordered_map mCirculatingBuffers; + std::unordered_map mBufferIdMap; + } mHalPreviewWindow; + + // gating access to mDevice, mInitFail, mDisconnected + mutable Mutex mLock; + + bool mInitFail = false; + // Set by provider (when external camera is connected/disconnected) + bool mDisconnected; + + static HandleImporter sHandleImporter; + + const SortedVector>& mCameraDeviceNames; + + sp mDeviceCallback = nullptr; + + mutable Mutex mMemoryMapLock; // gating access to mMemoryMap + // must not hold mLock after this lock is acquired + std::unordered_map mMemoryMap; + + bool mMetadataMode = false; + + mutable Mutex mBatchLock; + // Start of protection scope for mBatchLock + uint32_t mBatchSize = 0; // 0 for non-batch mode, set to other value to start batching + int32_t mBatchMsgType; // Maybe only allow DataCallbackMsg::VIDEO_FRAME? + std::vector mInflightBatch; + // End of protection scope for mBatchLock + + void handleCallbackTimestamp( + nsecs_t timestamp, int32_t msg_type, + MemoryId memId , unsigned index, native_handle_t* handle); + void releaseRecordingFrameLocked(uint32_t memId, uint32_t bufferIndex, const native_handle_t*); + + // shared memory methods + static camera_memory_t* sGetMemory(int fd, size_t buf_size, uint_t num_bufs, void *user); + static void sPutMemory(camera_memory_t *data); + + // Device callback forwarding methods + static void sNotifyCb(int32_t msg_type, int32_t ext1, int32_t ext2, void *user); + static void sDataCb(int32_t msg_type, const camera_memory_t *data, unsigned int index, + camera_frame_metadata_t *metadata, void *user); + static void sDataCbTimestamp(nsecs_t timestamp, int32_t msg_type, + const camera_memory_t *data, unsigned index, void *user); + + // Preview window callback forwarding methods + static int sDequeueBuffer(struct preview_stream_ops* w, + buffer_handle_t** buffer, int *stride); + + static int sLockBuffer(struct preview_stream_ops* w, buffer_handle_t* buffer); + + static int sEnqueueBuffer(struct preview_stream_ops* w, buffer_handle_t* buffer); + + static int sCancelBuffer(struct preview_stream_ops* w, buffer_handle_t* buffer); + + static int sSetBufferCount(struct preview_stream_ops* w, int count); + + static int sSetBuffersGeometry(struct preview_stream_ops* w, + int width, int height, int format); + + static int sSetCrop(struct preview_stream_ops *w, int left, int top, int right, int bottom); + + static int sSetTimestamp(struct preview_stream_ops *w, int64_t timestamp); + + static int sSetUsage(struct preview_stream_ops* w, int usage); + + static int sSetSwapInterval(struct preview_stream_ops *w, int interval); + + static int sGetMinUndequeuedBufferCount(const struct preview_stream_ops *w, int *count); + + // convert conventional HAL status to HIDL Status + static Status getHidlStatus(const int&); + static status_t getStatusT(const Status& s); + + Status initStatus() const; + void closeLocked(); +}; + +} // namespace implementation +} // namespace V1_0 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V1_0_CAMERADEVICE_H diff --git a/camera/device/1.0/default/OWNERS b/camera/device/1.0/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/device/1.0/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/device/1.0/types.hal b/camera/device/1.0/types.hal new file mode 100644 index 0000000..ce5205e --- /dev/null +++ b/camera/device/1.0/types.hal @@ -0,0 +1,289 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@1.0; + +enum CameraFacing : uint32_t { + /** The facing of the camera is opposite to that of the screen. */ + BACK = 0, + /** The facing of the camera is the same as that of the screen. */ + FRONT = 1, + /** + * The facing of the camera is not fixed relative to the screen. + * The cameras with this facing are external cameras, e.g. USB cameras. + */ + EXTERNAL = 2 +}; + +/** + * Basic information about a camera device, always accessible via + * ICameraDevice::getCameraInfo(). + */ +struct CameraInfo { + /** + * The direction that this device faces. + */ + CameraFacing facing; + + /** + * The orientation of the camera image. The value is the angle that the + * camera image needs to be rotated clockwise so it shows correctly on the + * display in its natural orientation. It must be 0, 90, 180, or 270. + * + * For example, suppose a device has a naturally tall screen. The + * back-facing camera sensor is mounted in landscape. You are looking at the + * screen. If the top side of the camera sensor is aligned with the right + * edge of the screen in natural orientation, the value must be 90. If the + * top side of a front-facing camera sensor is aligned with the right of the + * screen, the value must be 270. + * + * An external camera device must leave this set to 0. + * + */ + uint32_t orientation; + +}; + +/** + * Message types for ICameraDevice@1.0::enableMsgType()/disableMsgType() + * + * A set of bit masks for specifying how the received preview frames are + * handled before the previewCallback() call. + * + * The least significant 3 bits of an "int" value are used for this purpose: + * + * ..... 0 0 0 + * ^ ^ ^ + * | | |---------> determine whether the callback is enabled or not + * | |-----------> determine whether the callback is one-shot or not + * |-------------> determine whether the frame is copied out or not + * + * WARNING: When a frame is sent directly without copying, it is the frame + * receiver's responsiblity to make sure that the frame data won't get + * corrupted by subsequent preview frames filled by the camera. This flag is + * recommended only when copying out data brings significant performance price + * and the handling/processing of the received frame data is always faster than + * the preview frame rate so that data corruption won't occur. + * + * For instance, + * 1. 0x00 disables the callback. In this case, copy out and one shot bits + * are ignored. + * 2. 0x01 enables a callback without copying out the received frames. A + * typical use case is the Camcorder application to avoid making costly + * frame copies. + * 3. 0x05 is enabling a callback with frame copied out repeatedly. A typical + * use case is the Camera application. + * 4. 0x07 is enabling a callback with frame copied out only once. A typical + * use case is the Barcode scanner application. + */ +enum FrameCallbackFlag : uint32_t { + ENABLE_MASK = 0x01, + ONE_SHOT_MASK = 0x02, + COPY_OUT_MASK = 0x04, + /** Typical use cases */ + NOOP = 0x00, + CAMCORDER = 0x01, + CAMERA = 0x05, + BARCODE_SCANNER = 0x07 +}; + +typedef bitfield FrameCallbackFlags; + +/** + * Subset of commands in /system/core/include/system/camera.h relevant for + * ICameraDevice@1.0::sendCommand() + */ +enum CommandType : uint32_t { + START_SMOOTH_ZOOM = 1, + STOP_SMOOTH_ZOOM = 2, + + /** + * Start the face detection. This must be called only after preview is + * started. The camera must notify the listener of CAMERA_MSG_FACE and the + * detected faces in the preview frame. The detected faces may be the same + * as the previous ones. Apps must call CAMERA_CMD_STOP_FACE_DETECTION to + * stop the face detection. This method is supported if CameraParameters + * KEY_MAX_NUM_HW_DETECTED_FACES or KEY_MAX_NUM_SW_DETECTED_FACES is bigger + * than 0. Hardware and software face detection must not be running at the + * same time. If the face detection has started, apps must not send this + * again. + * + * In hardware face detection mode, CameraParameters KEY_WHITE_BALANCE, + * KEY_FOCUS_AREAS and KEY_METERING_AREAS have no effect. + * + * arg1 is the face detection type. It can be CAMERA_FACE_DETECTION_HW or + * CAMERA_FACE_DETECTION_SW. If the type of face detection requested is not + * supported, the HAL must return BAD_VALUE. + */ + START_FACE_DETECTION = 6, + + /** + * Stop the face detection. + */ + STOP_FACE_DETECTION = 7, + + /** + * Enable/disable focus move callback (CAMERA_MSG_FOCUS_MOVE). Passing + * arg1 = 0 must disable, while passing arg1 = 1 must enable the callback. + */ + ENABLE_FOCUS_MOVE_MSG = 8, + + /** + * Configure an explicit format to use for video recording metadata mode. + * This can be used to switch the format from the + * default IMPLEMENTATION_DEFINED gralloc format to some other + * device-supported format, and the default dataspace from the BT_709 color + * space to some other device-supported dataspace. arg1 is the HAL pixel + * format, and arg2 is the HAL dataSpace. This command returns + * INVALID_OPERATION error if it is sent after video recording is started, + * or the command is not supported at all. + * + * If the gralloc format is set to a format other than + * IMPLEMENTATION_DEFINED, then HALv3 devices must use gralloc usage flags + * of SW_READ_OFTEN. + */ + SET_VIDEO_FORMAT = 11 +}; + +/** + * Message types for ICameraDevice1Callback::notifyCallback() + */ +enum NotifyCallbackMsg : uint32_t { + ERROR = 0x0001, + SHUTTER = 0x0002, + FOCUS = 0x0004, + ZOOM = 0x0008, + // Notify on autofocus start and stop. This is useful in continuous + // autofocus - FOCUS_MODE_CONTINUOUS_VIDEO and FOCUS_MODE_CONTINUOUS_PICTURE. + FOCUS_MOVE = 0x0800 +}; + +/** + * Message types for ICameraDevice1Callback::dataCallback() and + * ICameraDevice1Callback::dataCallbackTimestamp() + */ +enum DataCallbackMsg : uint32_t { + PREVIEW_FRAME = 0x0010, + VIDEO_FRAME = 0x0020, + POSTVIEW_FRAME = 0x0040, + RAW_IMAGE = 0x0080, + COMPRESSED_IMAGE = 0x0100, + RAW_IMAGE_NOTIFY = 0x0200, + // Preview frame metadata. This can be combined with + // CAMERA_MSG_PREVIEW_FRAME in dataCallback. For example, the apps can + // request FRAME and METADATA. Or the apps can request only FRAME or only + // METADATA. + PREVIEW_METADATA = 0x0400 +}; + +/** + * Information for a single detected face. + */ + struct CameraFace { + /** + * Bounds of the face [left, top, right, bottom]. (-1000, -1000) represents + * the top-left of the camera field of view, and (1000, 1000) represents the + * bottom-right of the field of view. The width and height cannot be 0 or + * negative. This is supported by both hardware and software face detection. + * + * The direction is relative to the sensor orientation, that is, what the + * sensor sees. The direction is not affected by the rotation or mirroring + * of CAMERA_CMD_SET_DISPLAY_ORIENTATION. + */ + int32_t[4] rect; + + /** + * The confidence level of the face. The range is 1 to 100. 100 is the + * highest confidence. This is supported by both hardware and software + * face detection. + */ + int32_t score; + + /** + * An unique id per face while the face is visible to the tracker. If + * the face leaves the field-of-view and comes back, it will get a new + * id. If the value is 0, id is not supported. + */ + int32_t id; + + /** + * The coordinates of the center of the left eye. The range is -1000 to + * 1000. -2000, -2000 if this is not supported. + */ + int32_t[2] leftEye; + + /** + * The coordinates of the center of the right eye. The range is -1000 to + * 1000. -2000, -2000 if this is not supported. + */ + int32_t[2] rightEye; + + /** + * The coordinates of the center of the mouth. The range is -1000 to 1000. + * -2000, -2000 if this is not supported. + */ + int32_t[2] mouth; + +}; + +/** + * The metadata of the frame data, such as face detection result. + */ +struct CameraFrameMetadata { + /** + * A vector of the detected faces. + */ + vec faces; +}; + +/** + * A simple integer handle to use to reference a particular memory buffer + * between the HAL and the framework. + */ +typedef uint32_t MemoryId; + +/* + * Struct containing arguments of ICameraDeviceCallback::handleCallbackTimestamp. + * Used to send a batch of messages in ICameraDeviceCallback::handleCallbackTimestampBatch. + */ +struct HandleTimestampMessage { + // The handle of image buffer data. + handle frameData; + + // A memory handle to the buffer containing the data + MemoryId data; + + // The offset into the memory handle where the buffer starts. + uint32_t bufferIndex; + + // The time this buffer was captured by the camera, in nanoseconds + int64_t timestamp; +}; + +/* + * Struct containing arguments of ICameraDevice::releaseRecordingFrameHandle. + * Used by camera framework to send a batch of recording frames back to camera HAL. + */ +struct VideoFrameMessage { + // The handle of image buffer data. + handle frameData; + + // A memory handle to the buffer containing the data + MemoryId data; + + // The offset into the memory handle where the buffer starts. + uint32_t bufferIndex; +}; diff --git a/camera/device/3.2/ICameraDevice.hal b/camera/device/3.2/ICameraDevice.hal new file mode 100644 index 0000000..5236bb1 --- /dev/null +++ b/camera/device/3.2/ICameraDevice.hal @@ -0,0 +1,201 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.2; + +import android.hardware.camera.common@1.0::types; +import ICameraDeviceSession; +import ICameraDeviceCallback; + +/** + * Camera device HAL, first modern version + * + * Supports the android.hardware.Camera API, and the android.hardware.camera2 + * API at LIMITED or better hardware level. + * + */ +interface ICameraDevice { + + /** + * Get camera device resource cost information. + * + * @return status Status code for the operation, one of: + * OK: + * On success + * INTERNAL_ERROR: + * An unexpected internal camera HAL error occurred, and the + * resource cost is not available. + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * @return resourceCost + * The resources required to open this camera device, or unspecified + * values if status is not OK. + */ + getResourceCost() generates (Status status, CameraResourceCost resourceCost); + + /** + * getCameraCharacteristics: + * + * Return the static camera information for this camera device. This + * information may not change between consecutive calls. + * + * When an external camera is disconnected, its camera id becomes + * invalid. Calling this method with this invalid camera id must result in + * ILLEGAL_ARGUMENT; this may happen even before the device status callback + * is invoked by the HAL. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful open of the camera device. + * INTERNAL_ERROR: + * The camera device cannot be opened due to an internal + * error. + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * + * @return cameraCharacteristics + * The static metadata for this camera device, or an empty metadata + * structure if status is not OK. + * + */ + getCameraCharacteristics() generates + (Status status, CameraMetadata cameraCharacteristics); + + /** + * setTorchMode: + * + * Turn on or off the torch mode of the flash unit associated with this + * camera device. If the operation is successful, HAL must notify the + * framework torch state by invoking + * ICameraProviderCallback::torchModeStatusChange() with the new state. + * + * An active camera session has a higher priority accessing the flash + * unit. When there are any resource conflicts, such as when open() is + * called to fully activate a camera device, the provider must notify the + * framework through ICameraProviderCallback::torchModeStatusChange() that + * the torch mode has been turned off and the torch mode state has become + * TORCH_MODE_STATUS_NOT_AVAILABLE. When resources to turn on torch mode + * become available again, the provider must notify the framework through + * ICameraProviderCallback::torchModeStatusChange() that the torch mode + * state has become TORCH_MODE_STATUS_AVAILABLE_OFF for set_torch_mode() to + * be called. + * + * When the client calls setTorchMode() to turn on the torch mode of a flash + * unit, if the HAL cannot keep multiple torch modes on simultaneously, the + * HAL must turn off the torch mode(s) that were turned on by previous + * setTorchMode() calls and notify the framework that the torch mode state + * of those flash unit(s) has become TORCH_MODE_STATUS_AVAILABLE_OFF. + * + * @param torchMode The new mode to set the device flash unit to. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful change to the torch state + * INTERNAL_ERROR: + * The flash unit cannot be operated due to an unexpected internal + * error. + * ILLEGAL_ARGUMENT: + * The camera ID is unknown. + * CAMERA_IN_USE: + * This camera device has been opened, so the torch cannot be + * controlled until it is closed. + * MAX_CAMERAS_IN_USE: + * Due to other camera devices being open, or due to other + * resource constraints, the torch cannot be controlled currently. + * METHOD_NOT_SUPPORTED: + * This provider does not support direct operation of flashlight + * torch mode. The framework must open the camera device and turn + * the torch on through the device interface. + * OPERATION_NOT_SUPPORTED: + * This camera device does not have a flash unit. This can + * be returned if and only if android.flash.info.available is + * false. + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * + */ + setTorchMode(TorchMode mode) generates (Status status); + + /** + * open: + * + * Power on and initialize this camera device for active use, returning a + * session handle for active operations. + * + * @param callback Interface to invoke by the HAL for device asynchronous + * events. For HALs newer than version 3.2, HAL must use castFrom + * method to check the exact version of callback sent by camera service. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful open of the camera device. + * INTERNAL_ERROR: + * The camera device cannot be opened due to an internal + * error. + * ILLEGAL_ARGUMENT: + * The callbacks handle is invalid (for example, it is null). + * CAMERA_IN_USE: + * This camera device is already open. + * MAX_CAMERAS_IN_USE: + * The maximal number of camera devices that can be + * opened concurrently were opened already. + * CAMERA_DISCONNECTED: + * This external camera device has been disconnected, and is no + * longer available. This interface is now stale, and a new instance + * must be acquired if the device is reconnected. All subsequent + * calls on this interface must return CAMERA_DISCONNECTED. + * @return session The interface to the newly-opened camera session, + * or null if status is not OK. + */ + open(ICameraDeviceCallback callback) generates + (Status status, ICameraDeviceSession session); + + /** + * dumpState: + * + * Print out debugging state for the camera device. This may be called by + * the framework when the camera service is asked for a debug dump, which + * happens when using the dumpsys tool, or when capturing a bugreport. + * + * The passed-in file descriptor can be used to write debugging text using + * dprintf() or write(). The text must be in ASCII encoding only. + * + * In case this camera device has been disconnected, the dump must not fail, + * but may simply print out 'Device disconnected' or equivalent. + * + * Performance requirements: + * + * This must be a non-blocking call. The HAL should return from this call + * in 1ms, must return from this call in 10ms. This call must avoid + * deadlocks, as it may be called at any point during camera operation. + * Any synchronization primitives used (such as mutex locks or semaphores) + * must be acquired with a timeout. + */ + dumpState(handle fd); + +}; diff --git a/camera/device/3.2/ICameraDeviceCallback.hal b/camera/device/3.2/ICameraDeviceCallback.hal new file mode 100644 index 0000000..206a649 --- /dev/null +++ b/camera/device/3.2/ICameraDeviceCallback.hal @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.2; + +import android.hardware.camera.common@1.0::types; + +/** + * + * Callback methods for the HAL to call into the framework. + * + * These methods are used to return metadata and image buffers for a completed + * or failed captures, and to notify the framework of asynchronous events such + * as errors. + * + * The framework must not call back into the HAL from within these callbacks, + * and these calls must not block for extended periods. + * + */ +interface ICameraDeviceCallback { + + /** + * processCaptureResult: + * + * Send results from one or more completed or partially completed captures + * to the framework. + * processCaptureResult() may be invoked multiple times by the HAL in + * response to a single capture request. This allows, for example, the + * metadata and low-resolution buffers to be returned in one call, and + * post-processed JPEG buffers in a later call, once it is available. Each + * call must include the frame number of the request it is returning + * metadata or buffers for. Only one call to processCaptureResult + * may be made at a time by the HAL although the calls may come from + * different threads in the HAL. + * + * A component (buffer or metadata) of the complete result may only be + * included in one process_capture_result call. A buffer for each stream, + * and the result metadata, must be returned by the HAL for each request in + * one of the processCaptureResult calls, even in case of errors producing + * some of the output. A call to processCaptureResult() with neither + * output buffers or result metadata is not allowed. + * + * The order of returning metadata and buffers for a single result does not + * matter, but buffers for a given stream must be returned in FIFO order. So + * the buffer for request 5 for stream A must always be returned before the + * buffer for request 6 for stream A. This also applies to the result + * metadata; the metadata for request 5 must be returned before the metadata + * for request 6. + * + * However, different streams are independent of each other, so it is + * acceptable and expected that the buffer for request 5 for stream A may be + * returned after the buffer for request 6 for stream B is. And it is + * acceptable that the result metadata for request 6 for stream B is + * returned before the buffer for request 5 for stream A is. If multiple + * capture results are included in a single call, camera framework must + * process results sequentially from lower index to higher index, as if + * these results were sent to camera framework one by one, from lower index + * to higher index. + * + * The HAL retains ownership of result structure, which only needs to be + * valid to access during this call. + * + * The output buffers do not need to be filled yet; the framework must wait + * on the stream buffer release sync fence before reading the buffer + * data. Therefore, this method should be called by the HAL as soon as + * possible, even if some or all of the output buffers are still in + * being filled. The HAL must include valid release sync fences into each + * output_buffers stream buffer entry, or -1 if that stream buffer is + * already filled. + * + * If the result buffer cannot be constructed for a request, the HAL must + * return an empty metadata buffer, but still provide the output buffers and + * their sync fences. In addition, notify() must be called with an + * ERROR_RESULT message. + * + * If an output buffer cannot be filled, its status field must be set to + * STATUS_ERROR. In this case, notify() isn't required to be called with + * an ERROR_BUFFER message. The framework will simply treat the notify() + * call with ERROR_BUFFER as a no-op, and derive whether and when to notify + * the application of buffer loss based on the buffer status and whether or not + * the entire capture has failed. + * + * If the entire capture has failed, then this method still needs to be + * called to return the output buffers to the framework. All the buffer + * statuses must be STATUS_ERROR, and the result metadata must be an + * empty buffer. In addition, notify() must be called with a ERROR_REQUEST + * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages + * must not be sent. Note that valid partial results are still allowed + * as long as the final result metadata fails to be generated. + * + * Performance requirements: + * + * This is a non-blocking call. The framework must handle each CaptureResult + * within 5ms. + * + * The pipeline latency (see S7 for definition) should be less than or equal to + * 4 frame intervals, and must be less than or equal to 8 frame intervals. + * + */ + processCaptureResult(vec results); + + /** + * notify: + * + * Asynchronous notification callback from the HAL, fired for various + * reasons. Only for information independent of frame capture, or that + * require specific timing. Multiple messages may be sent in one call; a + * message with a higher index must be considered to have occurred after a + * message with a lower index. + * + * Multiple threads may call notify() simultaneously. + * + * Buffers delivered to the framework must not be dispatched to the + * application layer until a start of exposure timestamp (or input image's + * start of exposure timestamp for a reprocess request) has been received + * via a SHUTTER notify() call. It is highly recommended to dispatch this + * call as early as possible. + * + * The SHUTTER notify calls for requests with android.control.enableZsl + * set to TRUE and ANDROID_CONTROL_CAPTURE_INTENT == STILL_CAPTURE may be + * out-of-order compared to SHUTTER notify for other kinds of requests + * (including regular, reprocess, or zero-shutter-lag requests with + * different capture intents). + * + * As a result, the capture results of zero-shutter-lag requests with + * ANDROID_CONTROL_CAPTURE_INTENT == STILL_CAPTURE may be out-of-order + * compared to capture results for other kinds of requests. + * + * Different SHUTTER notify calls for zero-shutter-lag requests with + * ANDROID_CONTROL_CAPTURE_INTENT == STILL_CAPTURE must be in order between + * them, as is for other kinds of requests. SHUTTER notify calls for + * zero-shutter-lag requests with non STILL_CAPTURE intent must be in order + * with SHUTTER notify calls for regular requests. + * ------------------------------------------------------------------------ + * Performance requirements: + * + * This is a non-blocking call. The framework must handle each message in 5ms. + */ + notify(vec msgs); + +}; diff --git a/camera/device/3.2/ICameraDeviceSession.hal b/camera/device/3.2/ICameraDeviceSession.hal new file mode 100644 index 0000000..278be5d --- /dev/null +++ b/camera/device/3.2/ICameraDeviceSession.hal @@ -0,0 +1,407 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.2; + +import android.hardware.camera.common@1.0::types; + +/** + * Camera device active session interface. + * + * Obtained via ICameraDevice::open(), this interface contains the methods to + * configure and request captures from an active camera device. + * + */ +interface ICameraDeviceSession { + + /** + * constructDefaultRequestSettings: + * + * Create capture settings for standard camera use cases. + * + * The device must return a settings buffer that is configured to meet the + * requested use case, which must be one of the CAMERA3_TEMPLATE_* + * enums. All request control fields must be included. + * + * Performance requirements: + * + * This must be a non-blocking call. The HAL should return from this call + * in 1ms, and must return from this call in 5ms. + * + * Return values: + * @return status Status code for the operation, one of: + * OK: + * On a successful construction of default settings. + * INTERNAL_ERROR: + * An unexpected internal error occurred, and the default settings + * are not available. + * ILLEGAL_ARGUMENT: + * The camera HAL does not support the input template type + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * @return template The default capture request settings for the requested + * use case, or an empty metadata structure if status is not OK. + * + */ + constructDefaultRequestSettings(RequestTemplate type) generates + (Status status, CameraMetadata requestTemplate); + + /** + * configureStreams: + * + * Reset the HAL camera device processing pipeline and set up new input and + * output streams. This call replaces any existing stream configuration with + * the streams defined in the streamList. This method must be called at + * least once before a request is submitted with processCaptureRequest(). + * + * The streamList must contain at least one output-capable stream, and may + * not contain more than one input-capable stream. + * + * The streamList may contain streams that are also in the currently-active + * set of streams (from the previous call to configureStreams()). These + * streams must already have valid values for usage, maxBuffers, and the + * private pointer. + * + * If the HAL needs to change the stream configuration for an existing + * stream due to the new configuration, it may rewrite the values of usage + * and/or maxBuffers during the configure call. + * + * The framework must detect such a change, and may then reallocate the + * stream buffers before using buffers from that stream in a request. + * + * If a currently-active stream is not included in streamList, the HAL may + * safely remove any references to that stream. It must not be reused in a + * later configureStreams() call by the framework, and all the gralloc + * buffers for it must be freed after the configureStreams() call returns. + * + * If the stream is new, the client must set the consumer usage flags in + * requestedConfiguration. Upon return, the HAL device must set producerUsage, + * maxBuffers, and other fields in the configureStreams() return values. These + * fields are then used by the framework and the platform gralloc module to + * allocate the gralloc buffers for each stream. + * + * Newly allocated buffers may be included in a capture request at any time + * by the framework. Once a gralloc buffer is returned to the framework + * with processCaptureResult (and its respective releaseFence has been + * signaled) the framework may free or reuse it at any time. + * + * ------------------------------------------------------------------------ + * + * Preconditions: + * + * The framework must only call this method when no captures are being + * processed. That is, all results have been returned to the framework, and + * all in-flight input and output buffers have been returned and their + * release sync fences have been signaled by the HAL. The framework must not + * submit new requests for capture while the configureStreams() call is + * underway. + * + * Postconditions: + * + * The HAL device must configure itself to provide maximum possible output + * frame rate given the sizes and formats of the output streams, as + * documented in the camera device's static metadata. + * + * Performance requirements: + * + * This call is expected to be heavyweight and possibly take several hundred + * milliseconds to complete, since it may require resetting and + * reconfiguring the image sensor and the camera processing pipeline. + * Nevertheless, the HAL device should attempt to minimize the + * reconfiguration delay to minimize the user-visible pauses during + * application operational mode changes (such as switching from still + * capture to video recording). + * + * The HAL should return from this call in 500ms, and must return from this + * call in 1000ms. + * + * @return Status Status code for the operation, one of: + * OK: + * On successful stream configuration. + * INTERNAL_ERROR: + * If there has been a fatal error and the device is no longer + * operational. Only close() can be called successfully by the + * framework after this error is returned. + * ILLEGAL_ARGUMENT: + * If the requested stream configuration is invalid. Some examples + * of invalid stream configurations include: + * - Including more than 1 INPUT stream + * - Not including any OUTPUT streams + * - Including streams with unsupported formats, or an unsupported + * size for that format. + * - Including too many output streams of a certain format. + * - Unsupported rotation configuration + * - Stream sizes/formats don't satisfy the + * StreamConfigurationMode requirements for non-NORMAL mode, or + * the requested operation_mode is not supported by the HAL. + * - Unsupported usage flag + * The camera service cannot filter out all possible illegal stream + * configurations, since some devices may support more simultaneous + * streams or larger stream resolutions than the minimum required + * for a given camera device hardware level. The HAL must return an + * ILLEGAL_ARGUMENT for any unsupported stream set, and then be + * ready to accept a future valid stream configuration in a later + * configureStreams call. + * @return finalConfiguration The stream parameters desired by the HAL for + * each stream, including maximum buffers, the usage flags, and the + * override format. + * + */ + configureStreams(StreamConfiguration requestedConfiguration) + generates (Status status, + HalStreamConfiguration halConfiguration); + + /** + * processCaptureRequest: + * + * Send a list of capture requests to the HAL. The HAL must not return from + * this call until it is ready to accept the next set of requests to + * process. Only one call to processCaptureRequest() must be made at a time + * by the framework, and the calls must all be from the same thread. The + * next call to processCaptureRequest() must be made as soon as a new + * request and its associated buffers are available. In a normal preview + * scenario, this means the function is generally called again by the + * framework almost instantly. If more than one request is provided by the + * client, the HAL must process the requests in order of lowest index to + * highest index. + * + * The cachesToRemove argument contains a list of buffer caches (see + * StreamBuffer document for more information on buffer cache) to be removed + * by camera HAL. Camera HAL must remove these cache entries whether or not + * this method returns OK. + * + * The actual request processing is asynchronous, with the results of + * capture being returned by the HAL through the processCaptureResult() + * call. This call requires the result metadata to be available, but output + * buffers may simply provide sync fences to wait on. Multiple requests are + * expected to be in flight at once, to maintain full output frame rate. + * + * The framework retains ownership of the request structure. It is only + * guaranteed to be valid during this call. The HAL device must make copies + * of the information it needs to retain for the capture processing. The HAL + * is responsible for waiting on and closing the buffers' fences and + * returning the buffer handles to the framework. + * + * The HAL must write the file descriptor for the input buffer's release + * sync fence into input_buffer->release_fence, if input_buffer is not + * valid. If the HAL returns -1 for the input buffer release sync fence, the + * framework is free to immediately reuse the input buffer. Otherwise, the + * framework must wait on the sync fence before refilling and reusing the + * input buffer. + * + * The input/output buffers provided by the framework in each request + * may be brand new (having never before seen by the HAL). + * + * ------------------------------------------------------------------------ + * Performance considerations: + * + * Handling a new buffer should be extremely lightweight and there must be + * no frame rate degradation or frame jitter introduced. + * + * This call must return fast enough to ensure that the requested frame + * rate can be sustained, especially for streaming cases (post-processing + * quality settings set to FAST). The HAL should return this call in 1 + * frame interval, and must return from this call in 4 frame intervals. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful start to processing the capture request + * ILLEGAL_ARGUMENT: + * If the input is malformed (the settings are empty when not + * allowed, there are 0 output buffers, etc) and capture processing + * cannot start. Failures during request processing must be + * handled by calling ICameraDeviceCallback::notify(). In case of + * this error, the framework retains responsibility for the + * stream buffers' fences and the buffer handles; the HAL must not + * close the fences or return these buffers with + * ICameraDeviceCallback::processCaptureResult(). + * INTERNAL_ERROR: + * If the camera device has encountered a serious error. After this + * error is returned, only the close() method can be successfully + * called by the framework. + * @return numRequestProcessed Number of requests successfully processed by + * camera HAL. When status is OK, this must be equal to the size of + * requests. When the call fails, this number is the number of requests + * that HAL processed successfully before HAL runs into an error. + * + */ + processCaptureRequest(vec requests, + vec cachesToRemove) + generates (Status status, uint32_t numRequestProcessed); + + /** + * getCaptureRequestMetadataQueue: + * + * Retrieves the queue used along with processCaptureRequest. If + * client decides to use fast message queue to pass request metadata, + * it must: + * - Call getCaptureRequestMetadataQueue to retrieve the fast message queue; + * - In each of the requests sent in processCaptureRequest, set + * fmqSettingsSize field of CaptureRequest to be the size to read from the + * fast message queue; leave settings field of CaptureRequest empty. + * + * @return queue the queue that client writes request metadata to. + */ + getCaptureRequestMetadataQueue() generates (fmq_sync queue); + + /** + * getCaptureResultMetadataQueue: + * + * Retrieves the queue used along with + * ICameraDeviceCallback.processCaptureResult. + * + * Clients to ICameraDeviceSession must: + * - Call getCaptureRequestMetadataQueue to retrieve the fast message queue; + * - In implementation of ICameraDeviceCallback, test whether + * .fmqResultSize field is zero. + * - If .fmqResultSize != 0, read result metadata from the fast message + * queue; + * - otherwise, read result metadata in CaptureResult.result. + * + * @return queue the queue that implementation writes result metadata to. + */ + getCaptureResultMetadataQueue() generates (fmq_sync queue); + + /** + * flush: + * + * Flush all currently in-process captures and all buffers in the pipeline + * on the given device. Generally, this method is used to dump all state as + * quickly as possible in order to prepare for a configure_streams() call. + * + * No buffers are required to be successfully returned, so every buffer + * held at the time of flush() (whether successfully filled or not) may be + * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed + * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call, + * provided they are successfully filled. + * + * All requests currently in the HAL are expected to be returned as soon as + * possible. Not-in-process requests must return errors immediately. Any + * interruptible hardware blocks must be stopped, and any uninterruptible + * blocks must be waited on. + * + * flush() may be called concurrently to processCaptureRequest(), with the + * expectation that processCaptureRequest returns quickly and the + * request submitted in that processCaptureRequest call is treated like + * all other in-flight requests. Due to concurrency issues, it is possible + * that from the HAL's point of view, a processCaptureRequest() call may + * be started after flush has been invoked but has not returned yet. If such + * a call happens before flush() returns, the HAL must treat the new + * capture request like other in-flight pending requests (see #4 below). + * + * More specifically, the HAL must follow below requirements for various + * cases: + * + * 1. For captures that are too late for the HAL to cancel/stop, and must be + * completed normally by the HAL; i.e. the HAL can send shutter/notify + * and processCaptureResult and buffers as normal. + * + * 2. For pending requests that have not done any processing, the HAL must + * call notify CAMERA3_MSG_ERROR_REQUEST, and return all the output + * buffers with processCaptureResult in the error state + * (CAMERA3_BUFFER_STATUS_ERROR). The HAL must not place the release + * fence into an error state, instead, the release fences must be set to + * the acquire fences passed by the framework, or -1 if they have been + * waited on by the HAL already. This is also the path to follow for any + * captures for which the HAL already called notify() with + * CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers + * for. After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only + * processCaptureResults with buffers in CAMERA3_BUFFER_STATUS_ERROR + * are allowed. No further notifys or processCaptureResult with + * non-empty metadata is allowed. + * + * 3. For partially completed pending requests that do not have all the + * output buffers or perhaps missing metadata, the HAL must follow + * below: + * + * 3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected + * result metadata (i.e. one or more partial metadata) won't be + * available for the capture. + * + * 3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that + * won't be produced for the capture. + * + * 3.3. Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp + * before any buffers/metadata are returned with + * processCaptureResult. + * + * 3.4. For captures that will produce some results, the HAL must not + * call CAMERA3_MSG_ERROR_REQUEST, since that indicates complete + * failure. + * + * 3.5. Valid buffers/metadata must be passed to the framework as + * normal. + * + * 3.6. Failed buffers must be returned to the framework as described + * for case 2. But failed buffers do not have to follow the strict + * ordering valid buffers do, and may be out-of-order with respect + * to valid buffers. For example, if buffers A, B, C, D, E are sent, + * D and E are failed, then A, E, B, D, C is an acceptable return + * order. + * + * 3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is + * sufficient, no need to call processCaptureResult with empty + * metadata or equivalent. + * + * 4. If a flush() is invoked while a processCaptureRequest() invocation + * is active, that process call must return as soon as possible. In + * addition, if a processCaptureRequest() call is made after flush() + * has been invoked but before flush() has returned, the capture request + * provided by the late processCaptureRequest call must be treated + * like a pending request in case #2 above. + * + * flush() must only return when there are no more outstanding buffers or + * requests left in the HAL. The framework may call configure_streams (as + * the HAL state is now quiesced) or may issue new requests. + * + * Note that it's sufficient to only support fully-succeeded and + * fully-failed result cases. However, it is highly desirable to support + * the partial failure cases as well, as it could help improve the flush + * call overall performance. + * + * Performance requirements: + * + * The HAL should return from this call in 100ms, and must return from this + * call in 1000ms. And this call must not be blocked longer than pipeline + * latency (see S7 for definition). + * + * @return status Status code for the operation, one of: + * OK: + * On a successful flush of the camera HAL. + * INTERNAL_ERROR: + * If the camera device has encountered a serious error. After this + * error is returned, only the close() method can be successfully + * called by the framework. + */ + flush() generates (Status status); + + /** + * close: + * + * Shut down the camera device. + * + * After this call, all calls to this session instance must return + * INTERNAL_ERROR. + * + * This method must always succeed, even if the device has encountered a + * serious error. + */ + close(); +}; diff --git a/camera/device/3.2/default/Android.bp b/camera/device/3.2/default/Android.bp new file mode 100644 index 0000000..dc4f950 --- /dev/null +++ b/camera/device/3.2/default/Android.bp @@ -0,0 +1,32 @@ +cc_library_shared { + name: "vendor.camera.device@3.2-impl", + defaults: ["hidl_defaults"], + proprietary: true, + srcs: [ + "CameraDevice.cpp", + "CameraDeviceSession.cpp", + "convert.cpp", + ], + shared_libs: [ + "libhidlbase", + "libutils", + "libcutils", + "android.hardware.camera.device@3.2", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + "libfmq", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + export_include_dirs: ["."], + export_shared_lib_headers: [ + "libfmq", + ], +} diff --git a/camera/device/3.2/default/CameraDevice.cpp b/camera/device/3.2/default/CameraDevice.cpp new file mode 100644 index 0000000..4f85b58 --- /dev/null +++ b/camera/device/3.2/default/CameraDevice.cpp @@ -0,0 +1,317 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDev@3.2-impl" +#include + +#include +#include +#include "CameraDevice_3_2.h" +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_2 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::Status; + +CameraDevice::CameraDevice( + sp module, const std::string& cameraId, + const SortedVector>& cameraDeviceNames) : + mModule(module), + mCameraId(cameraId), + mDisconnected(false), + mCameraDeviceNames(cameraDeviceNames) { + mCameraIdInt = atoi(mCameraId.c_str()); + // Should not reach here as provider also validate ID + if (mCameraIdInt < 0) { + ALOGE("%s: Invalid camera id: %s", __FUNCTION__, mCameraId.c_str()); + mInitFail = true; + } else if (mCameraIdInt >= mModule->getNumberOfCameras()) { + ALOGI("%s: Adding a new camera id: %s", __FUNCTION__, mCameraId.c_str()); + } + + mDeviceVersion = mModule->getDeviceVersion(mCameraIdInt); + if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) { + ALOGE("%s: Camera id %s does not support HAL3.2+", + __FUNCTION__, mCameraId.c_str()); + mInitFail = true; + } +} + +CameraDevice::~CameraDevice() {} + +Status CameraDevice::initStatus() const { + Mutex::Autolock _l(mLock); + Status status = Status::OK; + if (mInitFail) { + status = Status::INTERNAL_ERROR; + } else if (mDisconnected) { + status = Status::CAMERA_DISCONNECTED; + } + return status; +} + +void CameraDevice::setConnectionStatus(bool connected) { + Mutex::Autolock _l(mLock); + mDisconnected = !connected; + if (mSession == nullptr) { + return; + } + sp session = mSession.promote(); + if (session == nullptr) { + return; + } + // Only notify active session disconnect events. + // Users will need to re-open camera after disconnect event + if (!connected) { + session->disconnect(); + } + return; +} + +Status CameraDevice::getHidlStatus(int status) { + switch (status) { + case 0: return Status::OK; + case -ENOSYS: return Status::OPERATION_NOT_SUPPORTED; + case -EBUSY : return Status::CAMERA_IN_USE; + case -EUSERS: return Status::MAX_CAMERAS_IN_USE; + case -ENODEV: return Status::INTERNAL_ERROR; + case -EINVAL: return Status::ILLEGAL_ARGUMENT; + default: + ALOGE("%s: unknown HAL status code %d", __FUNCTION__, status); + return Status::INTERNAL_ERROR; + } +} + +// Methods from ::android::hardware::camera::device::V3_2::ICameraDevice follow. +Return CameraDevice::getResourceCost(ICameraDevice::getResourceCost_cb _hidl_cb) { + Status status = initStatus(); + CameraResourceCost resCost; + if (status == Status::OK) { + int cost = 100; + std::vector conflicting_devices; + struct camera_info info; + + // If using post-2.4 module version, query the cost + conflicting devices from the HAL + if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) { + int ret = mModule->getCameraInfo(mCameraIdInt, &info); + if (ret == OK) { + cost = info.resource_cost; + for (size_t i = 0; i < info.conflicting_devices_length; i++) { + std::string cameraId(info.conflicting_devices[i]); + for (const auto& pair : mCameraDeviceNames) { + if (cameraId == pair.first) { + conflicting_devices.push_back(pair.second); + } + } + } + } else { + status = Status::INTERNAL_ERROR; + } + } + + if (status == Status::OK) { + resCost.resourceCost = cost; + resCost.conflictingDevices.resize(conflicting_devices.size()); + for (size_t i = 0; i < conflicting_devices.size(); i++) { + resCost.conflictingDevices[i] = conflicting_devices[i]; + ALOGV("CamDevice %s is conflicting with camDevice %s", + mCameraId.c_str(), resCost.conflictingDevices[i].c_str()); + } + } + } + _hidl_cb(status, resCost); + return Void(); +} + +Return CameraDevice::getCameraCharacteristics( + ICameraDevice::getCameraCharacteristics_cb _hidl_cb) { + Status status = initStatus(); + CameraMetadata cameraCharacteristics; + if (status == Status::OK) { + //Module 2.1+ codepath. + struct camera_info info; + int ret = mModule->getCameraInfo(mCameraIdInt, &info); + if (ret == OK) { + convertToHidl(info.static_camera_characteristics, &cameraCharacteristics); + } else { + ALOGE("%s: get camera info failed!", __FUNCTION__); + status = Status::INTERNAL_ERROR; + } + } + _hidl_cb(status, cameraCharacteristics); + return Void(); +} + +Return CameraDevice::setTorchMode(TorchMode mode) { + if (!mModule->isSetTorchModeSupported()) { + return Status::METHOD_NOT_SUPPORTED; + } + + Status status = initStatus(); + if (status == Status::OK) { + bool enable = (mode == TorchMode::ON) ? true : false; + status = getHidlStatus(mModule->setTorchMode(mCameraId.c_str(), enable)); + } + return status; +} + +Return CameraDevice::open(const sp& callback, + ICameraDevice::open_cb _hidl_cb) { + Status status = initStatus(); + sp session = nullptr; + + if (callback == nullptr) { + ALOGE("%s: cannot open camera %s. callback is null!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + if (status != Status::OK) { + // Provider will never pass initFailed device to client, so + // this must be a disconnected camera + ALOGE("%s: cannot open camera %s. camera is disconnected!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::CAMERA_DISCONNECTED, nullptr); + return Void(); + } else { + mLock.lock(); + + ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mCameraIdInt); + session = mSession.promote(); + if (session != nullptr && !session->isClosed()) { + ALOGE("%s: cannot open an already opened camera!", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::CAMERA_IN_USE, nullptr); + return Void(); + } + + /** Open HAL device */ + status_t res; + camera3_device_t *device; + + ATRACE_BEGIN("camera3->open"); + res = mModule->open(mCameraId.c_str(), + reinterpret_cast(&device)); + ATRACE_END(); + + if (res != OK) { + ALOGE("%s: cannot open camera %s!", __FUNCTION__, mCameraId.c_str()); + mLock.unlock(); + _hidl_cb(getHidlStatus(res), nullptr); + return Void(); + } + + /** Cross-check device version */ + if (device->common.version < CAMERA_DEVICE_API_VERSION_3_2) { + ALOGE("%s: Could not open camera: " + "Camera device should be at least %x, reports %x instead", + __FUNCTION__, + CAMERA_DEVICE_API_VERSION_3_2, + device->common.version); + device->common.close(&device->common); + mLock.unlock(); + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + struct camera_info info; + res = mModule->getCameraInfo(mCameraIdInt, &info); + if (res != OK) { + ALOGE("%s: Could not open camera: getCameraInfo failed", __FUNCTION__); + device->common.close(&device->common); + mLock.unlock(); + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + session = createSession( + device, info.static_camera_characteristics, callback); + if (session == nullptr) { + ALOGE("%s: camera device session allocation failed", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + if (session->isInitFailed()) { + ALOGE("%s: camera device session init failed", __FUNCTION__); + session = nullptr; + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + mSession = session; + + IF_ALOGV() { + session->getInterface()->interfaceChain([]( + ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) { + ALOGV("Session interface chain:"); + for (const auto& iface : interfaceChain) { + ALOGV(" %s", iface.c_str()); + } + }); + } + mLock.unlock(); + } + _hidl_cb(status, session->getInterface()); + return Void(); +} + +Return CameraDevice::dumpState(const ::android::hardware::hidl_handle& handle) { + Mutex::Autolock _l(mLock); + if (handle.getNativeHandle() == nullptr) { + ALOGE("%s: handle must not be null", __FUNCTION__); + return Void(); + } + if (handle->numFds != 1 || handle->numInts != 0) { + ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", + __FUNCTION__, handle->numFds, handle->numInts); + return Void(); + } + int fd = handle->data[0]; + if (mSession == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + sp session = mSession.promote(); + if (session == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + // Call into active session to dump states + session->dumpState(handle); + return Void(); +} + +sp CameraDevice::createSession(camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) { + return new CameraDeviceSession(device, deviceInfo, callback); +} + +// End of methods from ::android::hardware::camera::device::V3_2::ICameraDevice. + +} // namespace implementation +} // namespace V3_2 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.2/default/CameraDeviceSession.cpp b/camera/device/3.2/default/CameraDeviceSession.cpp new file mode 100644 index 0000000..769991c --- /dev/null +++ b/camera/device/3.2/default/CameraDeviceSession.cpp @@ -0,0 +1,1637 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDevSession@3.2-impl" +#include + +#include +#include +#include +#include +#include +#include "CameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_2 { +namespace implementation { + +// Size of request metadata fast message queue. Change to 0 to always use hwbinder buffer. +static constexpr int32_t CAMERA_REQUEST_METADATA_QUEUE_SIZE = 1 << 20 /* 1MB */; +// Size of result metadata fast message queue. Change to 0 to always use hwbinder buffer. +static constexpr int32_t CAMERA_RESULT_METADATA_QUEUE_SIZE = 1 << 20 /* 1MB */; + +// Metadata sent by HAL will be replaced by a compact copy +// if their (total size >= compact size + METADATA_SHRINK_ABS_THRESHOLD && +// total_size >= compact size * METADATA_SHRINK_REL_THRESHOLD) +// Heuristically picked by size of one page +static constexpr int METADATA_SHRINK_ABS_THRESHOLD = 4096; +static constexpr int METADATA_SHRINK_REL_THRESHOLD = 2; + +HandleImporter CameraDeviceSession::sHandleImporter; +buffer_handle_t CameraDeviceSession::sEmptyBuffer = nullptr; + +const int CameraDeviceSession::ResultBatcher::NOT_BATCHED; + +CameraDeviceSession::CameraDeviceSession( + camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) : + camera3_callback_ops({&sProcessCaptureResult, &sNotify, nullptr, nullptr}), + mDevice(device), + mDeviceVersion(device->common.version), + mFreeBufEarly(shouldFreeBufEarly()), + mIsAELockAvailable(false), + mDerivePostRawSensKey(false), + mNumPartialResults(1), + mResultBatcher(callback) { + mDeviceInfo = deviceInfo; + camera_metadata_entry partialResultsCount = + mDeviceInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT); + if (partialResultsCount.count > 0) { + mNumPartialResults = partialResultsCount.data.i32[0]; + } + mResultBatcher.setNumPartialResults(mNumPartialResults); + + camera_metadata_entry aeLockAvailableEntry = mDeviceInfo.find( + ANDROID_CONTROL_AE_LOCK_AVAILABLE); + if (aeLockAvailableEntry.count > 0) { + mIsAELockAvailable = (aeLockAvailableEntry.data.u8[0] == + ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE); + } + + // Determine whether we need to derive sensitivity boost values for older devices. + // If post-RAW sensitivity boost range is listed, so should post-raw sensitivity control + // be listed (as the default value 100) + if (mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) { + mDerivePostRawSensKey = true; + } + + mInitFail = initialize(); +} + +bool CameraDeviceSession::initialize() { + /** Initialize device with callback functions */ + ATRACE_BEGIN("camera3->initialize"); + status_t res = mDevice->ops->initialize(mDevice, this); + ATRACE_END(); + + if (res != OK) { + ALOGE("%s: Unable to initialize HAL device: %s (%d)", + __FUNCTION__, strerror(-res), res); + mDevice->common.close(&mDevice->common); + mClosed = true; + return true; + } + + // "ro.camera" properties are no longer supported on vendor side. + // Support a fall back for the fmq size override that uses "ro.vendor.camera" + // properties. + int32_t reqFMQSize = property_get_int32("ro.vendor.camera.req.fmq.size", /*default*/-1); + if (reqFMQSize < 0) { + reqFMQSize = property_get_int32("ro.camera.req.fmq.size", /*default*/-1); + if (reqFMQSize < 0) { + reqFMQSize = CAMERA_REQUEST_METADATA_QUEUE_SIZE; + } else { + ALOGV("%s: request FMQ size overridden to %d", __FUNCTION__, reqFMQSize); + } + } else { + ALOGV("%s: request FMQ size overridden to %d via fallback property", __FUNCTION__, + reqFMQSize); + } + + mRequestMetadataQueue = std::make_unique( + static_cast(reqFMQSize), + false /* non blocking */); + if (!mRequestMetadataQueue->isValid()) { + ALOGE("%s: invalid request fmq", __FUNCTION__); + return true; + } + + // "ro.camera" properties are no longer supported on vendor side. + // Support a fall back for the fmq size override that uses "ro.vendor.camera" + // properties. + int32_t resFMQSize = property_get_int32("ro.vendor.camera.res.fmq.size", /*default*/-1); + if (resFMQSize < 0) { + resFMQSize = property_get_int32("ro.camera.res.fmq.size", /*default*/-1); + if (resFMQSize < 0) { + resFMQSize = CAMERA_RESULT_METADATA_QUEUE_SIZE; + } else { + ALOGV("%s: result FMQ size overridden to %d", __FUNCTION__, resFMQSize); + } + } else { + ALOGV("%s: result FMQ size overridden to %d via fallback property", __FUNCTION__, + resFMQSize); + } + + mResultMetadataQueue = std::make_shared( + static_cast(resFMQSize), + false /* non blocking */); + if (!mResultMetadataQueue->isValid()) { + ALOGE("%s: invalid result fmq", __FUNCTION__); + return true; + } + mResultBatcher.setResultMetadataQueue(mResultMetadataQueue); + + return false; +} + +bool CameraDeviceSession::shouldFreeBufEarly() { + return property_get_bool("ro.vendor.camera.free_buf_early", 0) == 1; +} + +CameraDeviceSession::~CameraDeviceSession() { + if (!isClosed()) { + ALOGE("CameraDeviceSession deleted before close!"); + close(); + } +} + +bool CameraDeviceSession::isClosed() { + Mutex::Autolock _l(mStateLock); + return mClosed; +} + +Status CameraDeviceSession::initStatus() const { + Mutex::Autolock _l(mStateLock); + Status status = Status::OK; + if (mInitFail) { + status = Status::INTERNAL_ERROR; + } else if (mDisconnected) { + status = Status::CAMERA_DISCONNECTED; + } else if (mClosed) { + status = Status::INTERNAL_ERROR; + } + return status; +} + +void CameraDeviceSession::disconnect() { + Mutex::Autolock _l(mStateLock); + mDisconnected = true; + ALOGW("%s: Camera device is disconnected. Closing.", __FUNCTION__); + if (!mClosed) { + mDevice->common.close(&mDevice->common); + mClosed = true; + } +} + +void CameraDeviceSession::dumpState(const native_handle_t* fd) { + if (!isClosed()) { + mDevice->ops->dump(mDevice, fd->data[0]); + } +} + +/** + * For devices <= CAMERA_DEVICE_API_VERSION_3_2, AE_PRECAPTURE_TRIGGER_CANCEL is not supported so + * we need to override AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE and AE_LOCK_OFF + * to AE_LOCK_ON to start cancelling AE precapture. If AE lock is not available, it still overrides + * AE_PRECAPTURE_TRIGGER_CANCEL to AE_PRECAPTURE_TRIGGER_IDLE but doesn't add AE_LOCK_ON to the + * request. + */ +bool CameraDeviceSession::handleAePrecaptureCancelRequestLocked( + const camera3_capture_request_t &halRequest, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata *settings /*out*/, + AETriggerCancelOverride *override /*out*/) { + if ((mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) || + (nullptr == halRequest.settings) || (nullptr == settings) || + (0 == get_camera_metadata_entry_count(halRequest.settings))) { + return false; + } + + settings->clear(); + settings->append(halRequest.settings); + camera_metadata_entry_t aePrecaptureTrigger = + settings->find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); + if (aePrecaptureTrigger.count > 0 && + aePrecaptureTrigger.data.u8[0] == + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL) { + // Always override CANCEL to IDLE + uint8_t aePrecaptureTrigger = + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + settings->update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + &aePrecaptureTrigger, 1); + *override = { false, ANDROID_CONTROL_AE_LOCK_OFF, + true, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL }; + + if (mIsAELockAvailable == true) { + camera_metadata_entry_t aeLock = settings->find( + ANDROID_CONTROL_AE_LOCK); + if (aeLock.count == 0 || aeLock.data.u8[0] == + ANDROID_CONTROL_AE_LOCK_OFF) { + uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_ON; + settings->update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); + override->applyAeLock = true; + override->aeLock = ANDROID_CONTROL_AE_LOCK_OFF; + } + } + + return true; + } + + return false; +} + +/** + * Override result metadata for cancelling AE precapture trigger applied in + * handleAePrecaptureCancelRequestLocked(). + */ +void CameraDeviceSession::overrideResultForPrecaptureCancelLocked( + const AETriggerCancelOverride &aeTriggerCancelOverride, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata *settings /*out*/) { + if (aeTriggerCancelOverride.applyAeLock) { + // Only devices <= v3.2 should have this override + assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2); + settings->update(ANDROID_CONTROL_AE_LOCK, &aeTriggerCancelOverride.aeLock, 1); + } + + if (aeTriggerCancelOverride.applyAePrecaptureTrigger) { + // Only devices <= v3.2 should have this override + assert(mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_2); + settings->update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + &aeTriggerCancelOverride.aePrecaptureTrigger, 1); + } +} + +Status CameraDeviceSession::importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) { + + if (buf == nullptr && bufId == BUFFER_ID_NO_BUFFER) { + if (allowEmptyBuf) { + *outBufPtr = &sEmptyBuffer; + return Status::OK; + } else { + ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); + return Status::ILLEGAL_ARGUMENT; + } + } + + Mutex::Autolock _l(mInflightLock); + CirculatingBuffers& cbs = mCirculatingBuffers[streamId]; + if (cbs.count(bufId) == 0) { + // Register a newly seen buffer + buffer_handle_t importedBuf = buf; + sHandleImporter.importBuffer(importedBuf); + if (importedBuf == nullptr) { + ALOGE("%s: output buffer for stream %d is invalid!", __FUNCTION__, streamId); + return Status::INTERNAL_ERROR; + } else { + cbs[bufId] = importedBuf; + } + } + *outBufPtr = &cbs[bufId]; + return Status::OK; +} + +Status CameraDeviceSession::importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) { + return importRequestImpl(request, allBufPtrs, allFences); +} + +Status CameraDeviceSession::importRequestImpl( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences, + bool allowEmptyBuf) { + bool hasInputBuf = (request.inputBuffer.streamId != -1 && + request.inputBuffer.bufferId != 0); + size_t numOutputBufs = request.outputBuffers.size(); + size_t numBufs = numOutputBufs + (hasInputBuf ? 1 : 0); + // Validate all I/O buffers + hidl_vec allBufs; + hidl_vec allBufIds; + allBufs.resize(numBufs); + allBufIds.resize(numBufs); + allBufPtrs.resize(numBufs); + allFences.resize(numBufs); + std::vector streamIds(numBufs); + + for (size_t i = 0; i < numOutputBufs; i++) { + allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle(); + allBufIds[i] = request.outputBuffers[i].bufferId; + allBufPtrs[i] = &allBufs[i]; + streamIds[i] = request.outputBuffers[i].streamId; + } + if (hasInputBuf) { + allBufs[numOutputBufs] = request.inputBuffer.buffer.getNativeHandle(); + allBufIds[numOutputBufs] = request.inputBuffer.bufferId; + allBufPtrs[numOutputBufs] = &allBufs[numOutputBufs]; + streamIds[numOutputBufs] = request.inputBuffer.streamId; + } + + for (size_t i = 0; i < numBufs; i++) { + Status st = importBuffer( + streamIds[i], allBufIds[i], allBufs[i], &allBufPtrs[i], + // Disallow empty buf for input stream, otherwise follow + // the allowEmptyBuf argument. + (hasInputBuf && i == numOutputBufs) ? false : allowEmptyBuf); + if (st != Status::OK) { + // Detailed error logs printed in importBuffer + return st; + } + } + + // All buffers are imported. Now validate output buffer acquire fences + for (size_t i = 0; i < numOutputBufs; i++) { + if (!sHandleImporter.importFence( + request.outputBuffers[i].acquireFence, allFences[i])) { + ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i); + cleanupInflightFences(allFences, i); + return Status::INTERNAL_ERROR; + } + } + + // Validate input buffer acquire fences + if (hasInputBuf) { + if (!sHandleImporter.importFence( + request.inputBuffer.acquireFence, allFences[numOutputBufs])) { + ALOGE("%s: input buffer acquire fence is invalid", __FUNCTION__); + cleanupInflightFences(allFences, numOutputBufs); + return Status::INTERNAL_ERROR; + } + } + return Status::OK; +} + +void CameraDeviceSession::cleanupInflightFences( + hidl_vec& allFences, size_t numFences) { + for (size_t j = 0; j < numFences; j++) { + sHandleImporter.closeFence(allFences[j]); + } +} + +CameraDeviceSession::ResultBatcher::ResultBatcher( + const sp& callback) : mCallback(callback) {}; + +bool CameraDeviceSession::ResultBatcher::InflightBatch::allDelivered() const { + if (!mShutterDelivered) return false; + + if (mPartialResultProgress < mNumPartialResults) { + return false; + } + + for (const auto& pair : mBatchBufs) { + if (!pair.second.mDelivered) { + return false; + } + } + return true; +} + +void CameraDeviceSession::ResultBatcher::setNumPartialResults(uint32_t n) { + Mutex::Autolock _l(mLock); + mNumPartialResults = n; +} + +void CameraDeviceSession::ResultBatcher::setBatchedStreams( + const std::vector& streamsToBatch) { + Mutex::Autolock _l(mLock); + mStreamsToBatch = streamsToBatch; +} + +void CameraDeviceSession::ResultBatcher::setResultMetadataQueue( + std::shared_ptr q) { + Mutex::Autolock _l(mLock); + mResultMetadataQueue = q; +} + +void CameraDeviceSession::ResultBatcher::registerBatch(uint32_t frameNumber, uint32_t batchSize) { + auto batch = std::make_shared(); + batch->mFirstFrame = frameNumber; + batch->mBatchSize = batchSize; + batch->mLastFrame = batch->mFirstFrame + batch->mBatchSize - 1; + batch->mNumPartialResults = mNumPartialResults; + for (int id : mStreamsToBatch) { + batch->mBatchBufs.emplace(id, batch->mBatchSize); + } + Mutex::Autolock _l(mLock); + mInflightBatches.push_back(batch); +} + +std::pair> +CameraDeviceSession::ResultBatcher::getBatch( + uint32_t frameNumber) { + Mutex::Autolock _l(mLock); + int numBatches = mInflightBatches.size(); + if (numBatches == 0) { + return std::make_pair(NOT_BATCHED, nullptr); + } + uint32_t frameMin = mInflightBatches[0]->mFirstFrame; + uint32_t frameMax = mInflightBatches[numBatches - 1]->mLastFrame; + if (frameNumber < frameMin || frameNumber > frameMax) { + return std::make_pair(NOT_BATCHED, nullptr); + } + for (int i = 0; i < numBatches; i++) { + if (frameNumber >= mInflightBatches[i]->mFirstFrame && + frameNumber <= mInflightBatches[i]->mLastFrame) { + return std::make_pair(i, mInflightBatches[i]); + } + } + return std::make_pair(NOT_BATCHED, nullptr); +} + +void CameraDeviceSession::ResultBatcher::checkAndRemoveFirstBatch() { + Mutex::Autolock _l(mLock); + if (mInflightBatches.size() > 0) { + std::shared_ptr batch = mInflightBatches[0]; + bool shouldRemove = false; + { + Mutex::Autolock _l(batch->mLock); + if (batch->allDelivered()) { + batch->mRemoved = true; + shouldRemove = true; + } + } + if (shouldRemove) { + mInflightBatches.pop_front(); + } + } +} + +void CameraDeviceSession::ResultBatcher::sendBatchShutterCbsLocked( + std::shared_ptr batch) { + if (batch->mShutterDelivered) { + ALOGW("%s: batch shutter callback already sent!", __FUNCTION__); + return; + } + + auto ret = mCallback->notify(batch->mShutterMsgs); + if (!ret.isOk()) { + ALOGE("%s: notify shutter transaction failed: %s", + __FUNCTION__, ret.description().c_str()); + } + batch->mShutterDelivered = true; + batch->mShutterMsgs.clear(); +} + +void CameraDeviceSession::ResultBatcher::freeReleaseFences(hidl_vec& results) { + for (auto& result : results) { + if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + result.inputBuffer.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + for (auto& buf : result.outputBuffers) { + if (buf.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + buf.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + } + } + return; +} + +void CameraDeviceSession::ResultBatcher::moveStreamBuffer(StreamBuffer&& src, StreamBuffer& dst) { + // Only dealing with releaseFence here. Assume buffer/acquireFence are null + const native_handle_t* handle = src.releaseFence.getNativeHandle(); + src.releaseFence = nullptr; + dst = src; + dst.releaseFence = handle; + if (handle != dst.releaseFence.getNativeHandle()) { + ALOGE("%s: native handle cloned!", __FUNCTION__); + } +} + +void CameraDeviceSession::ResultBatcher::pushStreamBuffer( + StreamBuffer&& src, std::vector& dst) { + // Only dealing with releaseFence here. Assume buffer/acquireFence are null + const native_handle_t* handle = src.releaseFence.getNativeHandle(); + src.releaseFence = nullptr; + dst.push_back(src); + dst.back().releaseFence = handle; + if (handle != dst.back().releaseFence.getNativeHandle()) { + ALOGE("%s: native handle cloned!", __FUNCTION__); + } +} + +void CameraDeviceSession::ResultBatcher::sendBatchBuffersLocked( + std::shared_ptr batch) { + sendBatchBuffersLocked(batch, mStreamsToBatch); +} + +void CameraDeviceSession::ResultBatcher::sendBatchBuffersLocked( + std::shared_ptr batch, const std::vector& streams) { + size_t batchSize = 0; + for (int streamId : streams) { + auto it = batch->mBatchBufs.find(streamId); + if (it != batch->mBatchBufs.end()) { + InflightBatch::BufferBatch& bb = it->second; + if (bb.mDelivered) { + continue; + } + if (bb.mBuffers.size() > batchSize) { + batchSize = bb.mBuffers.size(); + } + } else { + ALOGE("%s: stream ID %d is not batched!", __FUNCTION__, streamId); + return; + } + } + + if (batchSize == 0) { + ALOGW("%s: there is no buffer to be delivered for this batch.", __FUNCTION__); + for (int streamId : streams) { + auto it = batch->mBatchBufs.find(streamId); + if (it == batch->mBatchBufs.end()) { + ALOGE("%s: cannot find stream %d in batched buffers!", __FUNCTION__, streamId); + return; + } + InflightBatch::BufferBatch& bb = it->second; + bb.mDelivered = true; + } + return; + } + + hidl_vec results; + results.resize(batchSize); + for (size_t i = 0; i < batchSize; i++) { + results[i].frameNumber = batch->mFirstFrame + i; + results[i].fmqResultSize = 0; + results[i].partialResult = 0; // 0 for buffer only results + results[i].inputBuffer.streamId = -1; + results[i].inputBuffer.bufferId = 0; + results[i].inputBuffer.buffer = nullptr; + std::vector outBufs; + outBufs.reserve(streams.size()); + for (int streamId : streams) { + auto it = batch->mBatchBufs.find(streamId); + if (it == batch->mBatchBufs.end()) { + ALOGE("%s: cannot find stream %d in batched buffers!", __FUNCTION__, streamId); + return; + } + InflightBatch::BufferBatch& bb = it->second; + if (bb.mDelivered) { + continue; + } + if (i < bb.mBuffers.size()) { + pushStreamBuffer(std::move(bb.mBuffers[i]), outBufs); + } + } + results[i].outputBuffers.resize(outBufs.size()); + for (size_t j = 0; j < outBufs.size(); j++) { + moveStreamBuffer(std::move(outBufs[j]), results[i].outputBuffers[j]); + } + } + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */false); + freeReleaseFences(results); + for (int streamId : streams) { + auto it = batch->mBatchBufs.find(streamId); + if (it == batch->mBatchBufs.end()) { + ALOGE("%s: cannot find stream %d in batched buffers!", __FUNCTION__, streamId); + return; + } + InflightBatch::BufferBatch& bb = it->second; + bb.mDelivered = true; + bb.mBuffers.clear(); + } +} + +void CameraDeviceSession::ResultBatcher::sendBatchMetadataLocked( + std::shared_ptr batch, uint32_t lastPartialResultIdx) { + if (lastPartialResultIdx <= batch->mPartialResultProgress) { + // Result has been delivered. Return + ALOGW("%s: partial result %u has been delivered", __FUNCTION__, lastPartialResultIdx); + return; + } + + std::vector results; + std::vector toBeRemovedIdxes; + for (auto& pair : batch->mResultMds) { + uint32_t partialIdx = pair.first; + if (partialIdx > lastPartialResultIdx) { + continue; + } + toBeRemovedIdxes.push_back(partialIdx); + InflightBatch::MetadataBatch& mb = pair.second; + for (const auto& p : mb.mMds) { + CaptureResult result; + result.frameNumber = p.first; + result.result = std::move(p.second); + result.fmqResultSize = 0; + result.inputBuffer.streamId = -1; + result.inputBuffer.bufferId = 0; + result.inputBuffer.buffer = nullptr; + result.partialResult = partialIdx; + results.push_back(std::move(result)); + } + mb.mMds.clear(); + } + hidl_vec hResults; + hResults.setToExternal(results.data(), results.size()); + invokeProcessCaptureResultCallback(hResults, /* tryWriteFmq */true); + batch->mPartialResultProgress = lastPartialResultIdx; + for (uint32_t partialIdx : toBeRemovedIdxes) { + batch->mResultMds.erase(partialIdx); + } +} + +void CameraDeviceSession::ResultBatcher::notifySingleMsg(NotifyMsg& msg) { + auto ret = mCallback->notify({msg}); + if (!ret.isOk()) { + ALOGE("%s: notify transaction failed: %s", + __FUNCTION__, ret.description().c_str()); + } + return; +} + +void CameraDeviceSession::ResultBatcher::notify(NotifyMsg& msg) { + uint32_t frameNumber; + if (CC_LIKELY(msg.type == MsgType::SHUTTER)) { + frameNumber = msg.msg.shutter.frameNumber; + } else { + frameNumber = msg.msg.error.frameNumber; + } + + auto pair = getBatch(frameNumber); + int batchIdx = pair.first; + if (batchIdx == NOT_BATCHED) { + notifySingleMsg(msg); + return; + } + + // When error happened, stop batching for all batches earlier + if (CC_UNLIKELY(msg.type == MsgType::ERROR)) { + Mutex::Autolock _l(mLock); + for (int i = 0; i <= batchIdx; i++) { + // Send batched data up + std::shared_ptr batch = mInflightBatches[0]; + { + Mutex::Autolock _l(batch->mLock); + sendBatchShutterCbsLocked(batch); + sendBatchBuffersLocked(batch); + sendBatchMetadataLocked(batch, mNumPartialResults); + if (!batch->allDelivered()) { + ALOGE("%s: error: some batch data not sent back to framework!", + __FUNCTION__); + } + batch->mRemoved = true; + } + mInflightBatches.pop_front(); + } + // Send the error up + notifySingleMsg(msg); + return; + } + // Queue shutter callbacks for future delivery + std::shared_ptr batch = pair.second; + { + Mutex::Autolock _l(batch->mLock); + // Check if the batch is removed (mostly by notify error) before lock was acquired + if (batch->mRemoved) { + // Fall back to non-batch path + notifySingleMsg(msg); + return; + } + + batch->mShutterMsgs.push_back(msg); + if (frameNumber == batch->mLastFrame) { + sendBatchShutterCbsLocked(batch); + } + } // end of batch lock scope + + // see if the batch is complete + if (frameNumber == batch->mLastFrame) { + checkAndRemoveFirstBatch(); + } +} + +void CameraDeviceSession::ResultBatcher::invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.result.size() > 0) { + if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { + result.fmqResultSize = result.result.size(); + result.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder, result size: %zu," + "shared message queue available size: %zu", + __FUNCTION__, result.result.size(), + mResultMetadataQueue->availableToWrite()); + result.fmqResultSize = 0; + } + } + } + } + auto ret = mCallback->processCaptureResult(results); + if (!ret.isOk()) { + ALOGE("%s: processCaptureResult transaction failed: %s", + __FUNCTION__, ret.description().c_str()); + } + mProcessCaptureResultLock.unlock(); +} + +void CameraDeviceSession::ResultBatcher::processOneCaptureResult(CaptureResult& result) { + hidl_vec results; + results.resize(1); + results[0] = std::move(result); + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + return; +} + +void CameraDeviceSession::ResultBatcher::processCaptureResult(CaptureResult& result) { + auto pair = getBatch(result.frameNumber); + int batchIdx = pair.first; + if (batchIdx == NOT_BATCHED) { + processOneCaptureResult(result); + return; + } + std::shared_ptr batch = pair.second; + { + Mutex::Autolock _l(batch->mLock); + // Check if the batch is removed (mostly by notify error) before lock was acquired + if (batch->mRemoved) { + // Fall back to non-batch path + processOneCaptureResult(result); + return; + } + + // queue metadata + if (result.result.size() != 0) { + // Save a copy of metadata + batch->mResultMds[result.partialResult].mMds.push_back( + std::make_pair(result.frameNumber, result.result)); + } + + // queue buffer + std::vector filledStreams; + std::vector nonBatchedBuffers; + for (auto& buffer : result.outputBuffers) { + auto it = batch->mBatchBufs.find(buffer.streamId); + if (it != batch->mBatchBufs.end()) { + InflightBatch::BufferBatch& bb = it->second; + auto id = buffer.streamId; + pushStreamBuffer(std::move(buffer), bb.mBuffers); + filledStreams.push_back(id); + } else { + pushStreamBuffer(std::move(buffer), nonBatchedBuffers); + } + } + + // send non-batched buffers up + if (nonBatchedBuffers.size() > 0 || result.inputBuffer.streamId != -1) { + CaptureResult nonBatchedResult; + nonBatchedResult.frameNumber = result.frameNumber; + nonBatchedResult.fmqResultSize = 0; + nonBatchedResult.outputBuffers.resize(nonBatchedBuffers.size()); + for (size_t i = 0; i < nonBatchedBuffers.size(); i++) { + moveStreamBuffer( + std::move(nonBatchedBuffers[i]), nonBatchedResult.outputBuffers[i]); + } + moveStreamBuffer(std::move(result.inputBuffer), nonBatchedResult.inputBuffer); + nonBatchedResult.partialResult = 0; // 0 for buffer only results + processOneCaptureResult(nonBatchedResult); + } + + if (result.frameNumber == batch->mLastFrame) { + // Send data up + if (result.partialResult > 0) { + sendBatchMetadataLocked(batch, result.partialResult); + } + // send buffer up + if (filledStreams.size() > 0) { + sendBatchBuffersLocked(batch, filledStreams); + } + } + } // end of batch lock scope + + // see if the batch is complete + if (result.frameNumber == batch->mLastFrame) { + checkAndRemoveFirstBatch(); + } +} + +// Methods from ::android::hardware::camera::device::V3_2::ICameraDeviceSession follow. +Return CameraDeviceSession::constructDefaultRequestSettings( + RequestTemplate type, ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { + CameraMetadata outMetadata; + Status status = constructDefaultRequestSettingsRaw( (int) type, &outMetadata); + _hidl_cb(status, outMetadata); + return Void(); +} + +Status CameraDeviceSession::constructDefaultRequestSettingsRaw(int type, CameraMetadata *outMetadata) { + Status status = initStatus(); + const camera_metadata_t *rawRequest; + if (status == Status::OK) { + ATRACE_BEGIN("camera3->construct_default_request_settings"); + rawRequest = mDevice->ops->construct_default_request_settings(mDevice, (int) type); + ATRACE_END(); + if (rawRequest == nullptr) { + ALOGI("%s: template %d is not supported on this camera device", + __FUNCTION__, type); + status = Status::ILLEGAL_ARGUMENT; + } else { + mOverridenRequest.clear(); + mOverridenRequest.append(rawRequest); + // Derive some new keys for backward compatibility + if (mDerivePostRawSensKey && !mOverridenRequest.exists( + ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) { + int32_t defaultBoost[1] = {100}; + mOverridenRequest.update( + ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, + defaultBoost, 1); + } + const camera_metadata_t *metaBuffer = + mOverridenRequest.getAndLock(); + convertToHidl(metaBuffer, outMetadata); + mOverridenRequest.unlock(metaBuffer); + } + } + return status; +} + +/** + * Map Android N dataspace definitions back to Android M definitions, for + * use with HALv3.3 or older. + * + * Only map where correspondences exist, and otherwise preserve the value. + */ +android_dataspace CameraDeviceSession::mapToLegacyDataspace( + android_dataspace dataSpace) const { + if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_3) { + switch (dataSpace) { + case HAL_DATASPACE_V0_SRGB_LINEAR: + return HAL_DATASPACE_SRGB_LINEAR; + case HAL_DATASPACE_V0_SRGB: + return HAL_DATASPACE_SRGB; + case HAL_DATASPACE_V0_JFIF: + return HAL_DATASPACE_JFIF; + case HAL_DATASPACE_V0_BT601_625: + return HAL_DATASPACE_BT601_625; + case HAL_DATASPACE_V0_BT601_525: + return HAL_DATASPACE_BT601_525; + case HAL_DATASPACE_V0_BT709: + return HAL_DATASPACE_BT709; + default: + return dataSpace; + } + } + + return dataSpace; +} + +bool CameraDeviceSession::preProcessConfigurationLocked( + const StreamConfiguration& requestedConfiguration, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/) { + + if ((stream_list == nullptr) || (streams == nullptr)) { + return false; + } + + stream_list->operation_mode = (uint32_t) requestedConfiguration.operationMode; + stream_list->num_streams = requestedConfiguration.streams.size(); + streams->resize(stream_list->num_streams); + stream_list->streams = streams->data(); + + for (uint32_t i = 0; i < stream_list->num_streams; i++) { + int id = requestedConfiguration.streams[i].id; + + if (mStreamMap.count(id) == 0) { + Camera3Stream stream; + convertFromHidl(requestedConfiguration.streams[i], &stream); + mStreamMap[id] = stream; + mStreamMap[id].data_space = mapToLegacyDataspace( + mStreamMap[id].data_space); + mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{}); + } else { + // width/height/format must not change, but usage/rotation might need to change + if (mStreamMap[id].stream_type != + (int) requestedConfiguration.streams[i].streamType || + mStreamMap[id].width != requestedConfiguration.streams[i].width || + mStreamMap[id].height != requestedConfiguration.streams[i].height || + mStreamMap[id].format != (int) requestedConfiguration.streams[i].format || + mStreamMap[id].data_space != + mapToLegacyDataspace( static_cast ( + requestedConfiguration.streams[i].dataSpace))) { + ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id); + return false; + } + mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].rotation; + mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].usage; + } + (*streams)[i] = &mStreamMap[id]; + } + + if (mFreeBufEarly) { + // Remove buffers of deleted streams + for(auto it = mStreamMap.begin(); it != mStreamMap.end(); it++) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + cleanupBuffersLocked(id); + } + } + } + + return true; +} + +void CameraDeviceSession::postProcessConfigurationLocked( + const StreamConfiguration& requestedConfiguration) { + // delete unused streams, note we do this after adding new streams to ensure new stream + // will not have the same address as deleted stream, and HAL has a chance to reference + // the to be deleted stream in configure_streams call + for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + // in case the configuration call succeeds and HAL + // is able to release the corresponding resources too. + if (!mFreeBufEarly) { + cleanupBuffersLocked(id); + } + it = mStreamMap.erase(it); + } else { + ++it; + } + } + + // Track video streams + mVideoStreamIds.clear(); + for (const auto& stream : requestedConfiguration.streams) { + if (stream.streamType == StreamType::OUTPUT && + stream.usage & + graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) { + mVideoStreamIds.push_back(stream.id); + } + } + mResultBatcher.setBatchedStreams(mVideoStreamIds); +} + + +void CameraDeviceSession::postProcessConfigurationFailureLocked( + const StreamConfiguration& requestedConfiguration) { + if (mFreeBufEarly) { + // Re-build the buf cache entry for deleted streams + for(auto it = mStreamMap.begin(); it != mStreamMap.end(); it++) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.id) { + found = true; + break; + } + } + if (!found) { + mCirculatingBuffers.emplace(id, CirculatingBuffers{}); + } + } + } +} + +Return CameraDeviceSession::configureStreams( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_cb _hidl_cb) { + Status status = initStatus(); + HalStreamConfiguration outStreams; + + // hold the inflight lock for entire configureStreams scope since there must not be any + // inflight request/results during stream configuration. + Mutex::Autolock _l(mInflightLock); + if (!mInflightBuffers.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight buffers!", + __FUNCTION__, mInflightBuffers.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (!mInflightAETriggerOverrides.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " trigger overrides!", __FUNCTION__, + mInflightAETriggerOverrides.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (!mInflightRawBoostPresent.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " boost overrides!", __FUNCTION__, + mInflightRawBoostPresent.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (status != Status::OK) { + _hidl_cb(status, outStreams); + return Void(); + } + + camera3_stream_configuration_t stream_list{}; + hidl_vec streams; + if (!preProcessConfigurationLocked(requestedConfiguration, &stream_list, &streams)) { + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + ATRACE_BEGIN("camera3->configure_streams"); + status_t ret = mDevice->ops->configure_streams(mDevice, &stream_list); + ATRACE_END(); + + // In case Hal returns error most likely it was not able to release + // the corresponding resources of the deleted streams. + if (ret == OK) { + postProcessConfigurationLocked(requestedConfiguration); + } else { + postProcessConfigurationFailureLocked(requestedConfiguration); + } + + if (ret == -EINVAL) { + status = Status::ILLEGAL_ARGUMENT; + } else if (ret != OK) { + status = Status::INTERNAL_ERROR; + } else { + convertToHidl(stream_list, &outStreams); + mFirstRequest = true; + } + + _hidl_cb(status, outStreams); + return Void(); +} + +// Needs to get called after acquiring 'mInflightLock' +void CameraDeviceSession::cleanupBuffersLocked(int id) { + for (auto& pair : mCirculatingBuffers.at(id)) { + sHandleImporter.freeBuffer(pair.second); + } + mCirculatingBuffers[id].clear(); + mCirculatingBuffers.erase(id); +} + +void CameraDeviceSession::updateBufferCaches(const hidl_vec& cachesToRemove) { + Mutex::Autolock _l(mInflightLock); + for (auto& cache : cachesToRemove) { + auto cbsIt = mCirculatingBuffers.find(cache.streamId); + if (cbsIt == mCirculatingBuffers.end()) { + // The stream could have been removed + continue; + } + CirculatingBuffers& cbs = cbsIt->second; + auto it = cbs.find(cache.bufferId); + if (it != cbs.end()) { + sHandleImporter.freeBuffer(it->second); + cbs.erase(it); + } else { + ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", + __FUNCTION__, cache.streamId, cache.bufferId); + } + } +} + +Return CameraDeviceSession::getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) { + _hidl_cb(*mRequestMetadataQueue->getDesc()); + return Void(); +} + +Return CameraDeviceSession::getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { + _hidl_cb(*mResultMetadataQueue->getDesc()); + return Void(); +} + +Return CameraDeviceSession::processCaptureRequest( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) { + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest(requests[i]); + if (s != Status::OK) { + break; + } + } + + if (s == Status::OK && requests.size() > 1) { + mResultBatcher.registerBatch(requests[0].frameNumber, requests.size()); + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Status CameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) { + Status status = initStatus(); + if (status != Status::OK) { + ALOGE("%s: camera init failed or disconnected", __FUNCTION__); + return status; + } + + camera3_capture_request_t halRequest; + halRequest.frame_number = request.frameNumber; + + bool converted = true; + CameraMetadata settingsFmq; // settings from FMQ + if (request.fmqSettingsSize > 0) { + // non-blocking read; client must write metadata before calling + // processOneCaptureRequest + settingsFmq.resize(request.fmqSettingsSize); + bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize); + if (read) { + converted = convertFromHidl(settingsFmq, &halRequest.settings); + } else { + ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); + converted = false; + } + } else { + converted = convertFromHidl(request.settings, &halRequest.settings); + } + + if (!converted) { + ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && halRequest.settings == nullptr) { + ALOGE("%s: capture request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + hidl_vec allBufPtrs; + hidl_vec allFences; + bool hasInputBuf = (request.inputBuffer.streamId != -1 && + request.inputBuffer.bufferId != 0); + size_t numOutputBufs = request.outputBuffers.size(); + size_t numBufs = numOutputBufs + (hasInputBuf ? 1 : 0); + + if (numOutputBufs == 0) { + ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + status = importRequest(request, allBufPtrs, allFences); + if (status != Status::OK) { + return status; + } + + hidl_vec outHalBufs; + outHalBufs.resize(numOutputBufs); + bool aeCancelTriggerNeeded = false; + ::android::hardware::camera::common::V1_0::helper::CameraMetadata settingsOverride; + { + Mutex::Autolock _l(mInflightLock); + if (hasInputBuf) { + auto key = std::make_pair(request.inputBuffer.streamId, request.frameNumber); + auto& bufCache = mInflightBuffers[key] = camera3_stream_buffer_t{}; + convertFromHidl( + allBufPtrs[numOutputBufs], request.inputBuffer.status, + &mStreamMap[request.inputBuffer.streamId], allFences[numOutputBufs], + &bufCache); + halRequest.input_buffer = &bufCache; + } else { + halRequest.input_buffer = nullptr; + } + + halRequest.num_output_buffers = numOutputBufs; + for (size_t i = 0; i < numOutputBufs; i++) { + auto key = std::make_pair(request.outputBuffers[i].streamId, request.frameNumber); + auto& bufCache = mInflightBuffers[key] = camera3_stream_buffer_t{}; + convertFromHidl( + allBufPtrs[i], request.outputBuffers[i].status, + &mStreamMap[request.outputBuffers[i].streamId], allFences[i], + &bufCache); + outHalBufs[i] = bufCache; + } + halRequest.output_buffers = outHalBufs.data(); + + AETriggerCancelOverride triggerOverride; + aeCancelTriggerNeeded = handleAePrecaptureCancelRequestLocked( + halRequest, &settingsOverride /*out*/, &triggerOverride/*out*/); + if (aeCancelTriggerNeeded) { + mInflightAETriggerOverrides[halRequest.frame_number] = + triggerOverride; + halRequest.settings = settingsOverride.getAndLock(); + } + } + halRequest.num_physcam_settings = 0; + + ATRACE_ASYNC_BEGIN("frame capture", request.frameNumber); + ATRACE_BEGIN("camera3->process_capture_request"); + status_t ret = mDevice->ops->process_capture_request(mDevice, &halRequest); + ATRACE_END(); + if (aeCancelTriggerNeeded) { + settingsOverride.unlock(halRequest.settings); + } + if (ret != OK) { + Mutex::Autolock _l(mInflightLock); + ALOGE("%s: HAL process_capture_request call failed!", __FUNCTION__); + + cleanupInflightFences(allFences, numBufs); + if (hasInputBuf) { + auto key = std::make_pair(request.inputBuffer.streamId, request.frameNumber); + mInflightBuffers.erase(key); + } + for (size_t i = 0; i < numOutputBufs; i++) { + auto key = std::make_pair(request.outputBuffers[i].streamId, request.frameNumber); + mInflightBuffers.erase(key); + } + if (aeCancelTriggerNeeded) { + mInflightAETriggerOverrides.erase(request.frameNumber); + } + return Status::INTERNAL_ERROR; + } + + mFirstRequest = false; + return Status::OK; +} + +Return CameraDeviceSession::flush() { + Status status = initStatus(); + if (status == Status::OK) { + // Flush is always supported on device 3.1 or later + status_t ret = mDevice->ops->flush(mDevice); + if (ret != OK) { + status = Status::INTERNAL_ERROR; + } + } + return status; +} + +Return CameraDeviceSession::close() { + Mutex::Autolock _l(mStateLock); + if (!mClosed) { + { + Mutex::Autolock _l(mInflightLock); + if (!mInflightBuffers.empty()) { + ALOGE("%s: trying to close while there are still %zu inflight buffers!", + __FUNCTION__, mInflightBuffers.size()); + } + if (!mInflightAETriggerOverrides.empty()) { + ALOGE("%s: trying to close while there are still %zu inflight " + "trigger overrides!", __FUNCTION__, + mInflightAETriggerOverrides.size()); + } + if (!mInflightRawBoostPresent.empty()) { + ALOGE("%s: trying to close while there are still %zu inflight " + " RAW boost overrides!", __FUNCTION__, + mInflightRawBoostPresent.size()); + } + + } + + ATRACE_BEGIN("camera3->close"); + mDevice->common.close(&mDevice->common); + ATRACE_END(); + + // free all imported buffers + Mutex::Autolock _l(mInflightLock); + for(auto& pair : mCirculatingBuffers) { + CirculatingBuffers& buffers = pair.second; + for (auto& p2 : buffers) { + sHandleImporter.freeBuffer(p2.second); + } + buffers.clear(); + } + mCirculatingBuffers.clear(); + + mClosed = true; + } + return Void(); +} + +uint64_t CameraDeviceSession::getCapResultBufferId(const buffer_handle_t&, int) { + // No need to fill in bufferId by default + return BUFFER_ID_NO_BUFFER; +} + +status_t CameraDeviceSession::constructCaptureResult(CaptureResult& result, + const camera3_capture_result *hal_result) { + uint32_t frameNumber = hal_result->frame_number; + bool hasInputBuf = (hal_result->input_buffer != nullptr); + size_t numOutputBufs = hal_result->num_output_buffers; + size_t numBufs = numOutputBufs + (hasInputBuf ? 1 : 0); + if (numBufs > 0) { + Mutex::Autolock _l(mInflightLock); + if (hasInputBuf) { + int streamId = static_cast(hal_result->input_buffer->stream)->mId; + // validate if buffer is inflight + auto key = std::make_pair(streamId, frameNumber); + if (mInflightBuffers.count(key) != 1) { + ALOGE("%s: input buffer for stream %d frame %d is not inflight!", + __FUNCTION__, streamId, frameNumber); + return -EINVAL; + } + } + + for (size_t i = 0; i < numOutputBufs; i++) { + int streamId = static_cast(hal_result->output_buffers[i].stream)->mId; + // validate if buffer is inflight + auto key = std::make_pair(streamId, frameNumber); + if (mInflightBuffers.count(key) != 1) { + ALOGE("%s: output buffer for stream %d frame %d is not inflight!", + __FUNCTION__, streamId, frameNumber); + return -EINVAL; + } + } + } + // We don't need to validate/import fences here since we will be passing them to camera service + // within the scope of this function + result.frameNumber = frameNumber; + result.fmqResultSize = 0; + result.partialResult = hal_result->partial_result; + convertToHidl(hal_result->result, &result.result); + if (nullptr != hal_result->result) { + bool resultOverriden = false; + Mutex::Autolock _l(mInflightLock); + + // Derive some new keys for backward compatibility + if (mDerivePostRawSensKey) { + camera_metadata_ro_entry entry; + if (find_camera_metadata_ro_entry(hal_result->result, + ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &entry) == 0) { + mInflightRawBoostPresent[frameNumber] = true; + } else { + auto entry = mInflightRawBoostPresent.find(frameNumber); + if (mInflightRawBoostPresent.end() == entry) { + mInflightRawBoostPresent[frameNumber] = false; + } + } + + if ((hal_result->partial_result == mNumPartialResults)) { + if (!mInflightRawBoostPresent[frameNumber]) { + if (!resultOverriden) { + mOverridenResult.clear(); + mOverridenResult.append(hal_result->result); + resultOverriden = true; + } + int32_t defaultBoost[1] = {100}; + mOverridenResult.update( + ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, + defaultBoost, 1); + } + + mInflightRawBoostPresent.erase(frameNumber); + } + } + + auto entry = mInflightAETriggerOverrides.find(frameNumber); + if (mInflightAETriggerOverrides.end() != entry) { + if (!resultOverriden) { + mOverridenResult.clear(); + mOverridenResult.append(hal_result->result); + resultOverriden = true; + } + overrideResultForPrecaptureCancelLocked(entry->second, + &mOverridenResult); + if (hal_result->partial_result == mNumPartialResults) { + mInflightAETriggerOverrides.erase(frameNumber); + } + } + + if (resultOverriden) { + const camera_metadata_t *metaBuffer = + mOverridenResult.getAndLock(); + convertToHidl(metaBuffer, &result.result); + mOverridenResult.unlock(metaBuffer); + } + } + if (hasInputBuf) { + result.inputBuffer.streamId = + static_cast(hal_result->input_buffer->stream)->mId; + result.inputBuffer.buffer = nullptr; + result.inputBuffer.status = (BufferStatus) hal_result->input_buffer->status; + // skip acquire fence since it's no use to camera service + if (hal_result->input_buffer->release_fence != -1) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = hal_result->input_buffer->release_fence; + result.inputBuffer.releaseFence = handle; + } else { + result.inputBuffer.releaseFence = nullptr; + } + } else { + result.inputBuffer.streamId = -1; + } + + result.outputBuffers.resize(numOutputBufs); + for (size_t i = 0; i < numOutputBufs; i++) { + result.outputBuffers[i].streamId = + static_cast(hal_result->output_buffers[i].stream)->mId; + result.outputBuffers[i].buffer = nullptr; + if (hal_result->output_buffers[i].buffer != nullptr) { + result.outputBuffers[i].bufferId = getCapResultBufferId( + *(hal_result->output_buffers[i].buffer), + result.outputBuffers[i].streamId); + } else { + result.outputBuffers[i].bufferId = 0; + } + + result.outputBuffers[i].status = (BufferStatus) hal_result->output_buffers[i].status; + // skip acquire fence since it's of no use to camera service + if (hal_result->output_buffers[i].release_fence != -1) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = hal_result->output_buffers[i].release_fence; + result.outputBuffers[i].releaseFence = handle; + } else { + result.outputBuffers[i].releaseFence = nullptr; + } + } + + // Free inflight record/fences. + // Do this before call back to camera service because camera service might jump to + // configure_streams right after the processCaptureResult call so we need to finish + // updating inflight queues first + if (numBufs > 0) { + Mutex::Autolock _l(mInflightLock); + if (hasInputBuf) { + int streamId = static_cast(hal_result->input_buffer->stream)->mId; + auto key = std::make_pair(streamId, frameNumber); + mInflightBuffers.erase(key); + } + + for (size_t i = 0; i < numOutputBufs; i++) { + int streamId = static_cast(hal_result->output_buffers[i].stream)->mId; + auto key = std::make_pair(streamId, frameNumber); + mInflightBuffers.erase(key); + } + + if (mInflightBuffers.empty()) { + ALOGV("%s: inflight buffer queue is now empty!", __FUNCTION__); + } + } + return OK; +} + +// Static helper method to copy/shrink capture result metadata sent by HAL +void CameraDeviceSession::sShrinkCaptureResult( + camera3_capture_result* dst, const camera3_capture_result* src, + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata>* mds, + std::vector* physCamMdArray, + bool handlePhysCam) { + *dst = *src; + // Reserve maximum number of entries to avoid metadata re-allocation. + mds->reserve(1 + (handlePhysCam ? src->num_physcam_metadata : 0)); + if (sShouldShrink(src->result)) { + mds->emplace_back(sCreateCompactCopy(src->result)); + dst->result = mds->back().getAndLock(); + } + + if (handlePhysCam) { + // First determine if we need to create new camera_metadata_t* array + bool needShrink = false; + for (uint32_t i = 0; i < src->num_physcam_metadata; i++) { + if (sShouldShrink(src->physcam_metadata[i])) { + needShrink = true; + } + } + + if (!needShrink) return; + + physCamMdArray->reserve(src->num_physcam_metadata); + dst->physcam_metadata = physCamMdArray->data(); + for (uint32_t i = 0; i < src->num_physcam_metadata; i++) { + if (sShouldShrink(src->physcam_metadata[i])) { + mds->emplace_back(sCreateCompactCopy(src->physcam_metadata[i])); + dst->physcam_metadata[i] = mds->back().getAndLock(); + } else { + dst->physcam_metadata[i] = src->physcam_metadata[i]; + } + } + } +} + +bool CameraDeviceSession::sShouldShrink(const camera_metadata_t* md) { + size_t compactSize = get_camera_metadata_compact_size(md); + size_t totalSize = get_camera_metadata_size(md); + if (totalSize >= compactSize + METADATA_SHRINK_ABS_THRESHOLD && + totalSize >= compactSize * METADATA_SHRINK_REL_THRESHOLD) { + ALOGV("Camera metadata should be shrunk from %zu to %zu", totalSize, compactSize); + return true; + } + return false; +} + +camera_metadata_t* CameraDeviceSession::sCreateCompactCopy(const camera_metadata_t* src) { + size_t compactSize = get_camera_metadata_compact_size(src); + void* buffer = calloc(1, compactSize); + if (buffer == nullptr) { + ALOGE("%s: Allocating %zu bytes failed", __FUNCTION__, compactSize); + } + return copy_camera_metadata(buffer, compactSize, src); +} + +/** + * Static callback forwarding methods from HAL to instance + */ +void CameraDeviceSession::sProcessCaptureResult( + const camera3_callback_ops *cb, + const camera3_capture_result *hal_result) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + + CaptureResult result = {}; + camera3_capture_result shadowResult; + bool handlePhysCam = (d->mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_5); + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata> compactMds; + std::vector physCamMdArray; + sShrinkCaptureResult(&shadowResult, hal_result, &compactMds, &physCamMdArray, handlePhysCam); + + status_t ret = d->constructCaptureResult(result, &shadowResult); + if (ret == OK) { + d->mResultBatcher.processCaptureResult(result); + } +} + +void CameraDeviceSession::sNotify( + const camera3_callback_ops *cb, + const camera3_notify_msg *msg) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + NotifyMsg hidlMsg; + convertToHidl(msg, &hidlMsg); + + if (hidlMsg.type == (MsgType) CAMERA3_MSG_ERROR && + hidlMsg.msg.error.errorStreamId != -1) { + if (d->mStreamMap.count(hidlMsg.msg.error.errorStreamId) != 1) { + ALOGE("%s: unknown stream ID %d reports an error!", + __FUNCTION__, hidlMsg.msg.error.errorStreamId); + return; + } + } + + if (static_cast(hidlMsg.type) == CAMERA3_MSG_ERROR) { + switch (hidlMsg.msg.error.errorCode) { + case ErrorCode::ERROR_DEVICE: + case ErrorCode::ERROR_REQUEST: + case ErrorCode::ERROR_RESULT: { + Mutex::Autolock _l(d->mInflightLock); + auto entry = d->mInflightAETriggerOverrides.find( + hidlMsg.msg.error.frameNumber); + if (d->mInflightAETriggerOverrides.end() != entry) { + d->mInflightAETriggerOverrides.erase( + hidlMsg.msg.error.frameNumber); + } + + auto boostEntry = d->mInflightRawBoostPresent.find( + hidlMsg.msg.error.frameNumber); + if (d->mInflightRawBoostPresent.end() != boostEntry) { + d->mInflightRawBoostPresent.erase( + hidlMsg.msg.error.frameNumber); + } + + } + break; + case ErrorCode::ERROR_BUFFER: + default: + break; + } + + } + + d->mResultBatcher.notify(hidlMsg); +} + +} // namespace implementation +} // namespace V3_2 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.2/default/CameraDeviceSession.h b/camera/device/3.2/default/CameraDeviceSession.h new file mode 100644 index 0000000..a96c245 --- /dev/null +++ b/camera/device/3.2/default/CameraDeviceSession.h @@ -0,0 +1,421 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE3SESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE3SESSION_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "HandleImporter.h" +#include "hardware/camera3.h" +#include "hardware/camera_common.h" +#include "utils/Mutex.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_2 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::ICameraDeviceSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +struct Camera3Stream; + +/** + * Function pointer types with C calling convention to + * use for HAL callback functions. + */ +extern "C" { + typedef void (callbacks_process_capture_result_t)( + const struct camera3_callback_ops *, + const camera3_capture_result_t *); + + typedef void (callbacks_notify_t)( + const struct camera3_callback_ops *, + const camera3_notify_msg_t *); +} + +struct CameraDeviceSession : public virtual RefBase, protected camera3_callback_ops { + + CameraDeviceSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&); + virtual ~CameraDeviceSession(); + // Call by CameraDevice to dump active device states + void dumpState(const native_handle_t* fd); + // Caller must use this method to check if CameraDeviceSession ctor failed + bool isInitFailed() { return mInitFail; } + // Used by CameraDevice to signal external camera disconnected + void disconnect(); + bool isClosed(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineSessionInterface_3_2(this); + } + +protected: + + // Methods from ::android::hardware::camera::device::V3_2::ICameraDeviceSession follow + + Return constructDefaultRequestSettings( + RequestTemplate type, + ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb); + Return configureStreams( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_cb _hidl_cb); + Return getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb); + Return getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb); + Return processCaptureRequest( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_cb _hidl_cb); + Return flush(); + Return close(); + + // Helper methods + Status constructDefaultRequestSettingsRaw(int type, CameraMetadata *outMetadata); + + bool preProcessConfigurationLocked(const StreamConfiguration& requestedConfiguration, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/); + void postProcessConfigurationLocked(const StreamConfiguration& requestedConfiguration); + + void postProcessConfigurationFailureLocked(const StreamConfiguration& requestedConfiguration); + +protected: + + // protecting mClosed/mDisconnected/mInitFail + mutable Mutex mStateLock; + // device is closed either + // - closed by user + // - init failed + // - camera disconnected + bool mClosed = false; + + // Set by CameraDevice (when external camera is disconnected) + bool mDisconnected = false; + + struct AETriggerCancelOverride { + bool applyAeLock; + uint8_t aeLock; + bool applyAePrecaptureTrigger; + uint8_t aePrecaptureTrigger; + }; + + camera3_device_t* mDevice; + const uint32_t mDeviceVersion; + const bool mFreeBufEarly; + bool mIsAELockAvailable; + bool mDerivePostRawSensKey; + uint32_t mNumPartialResults; + // Stream ID -> Camera3Stream cache + std::map mStreamMap; + + mutable Mutex mInflightLock; // protecting mInflightBuffers and mCirculatingBuffers + // (streamID, frameNumber) -> inflight buffer cache + std::map, camera3_stream_buffer_t> mInflightBuffers; + + // (frameNumber, AETriggerOverride) -> inflight request AETriggerOverrides + std::map mInflightAETriggerOverrides; + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mOverridenResult; + std::map mInflightRawBoostPresent; + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mOverridenRequest; + + static const uint64_t BUFFER_ID_NO_BUFFER = 0; + // buffers currently ciculating between HAL and camera service + // key: bufferId sent via HIDL interface + // value: imported buffer_handle_t + // Buffer will be imported during process_capture_request and will be freed + // when the its stream is deleted or camera device session is closed + typedef std::unordered_map CirculatingBuffers; + // Stream ID -> circulating buffers map + std::map mCirculatingBuffers; + + static HandleImporter sHandleImporter; + static buffer_handle_t sEmptyBuffer; + + bool mInitFail; + bool mFirstRequest = false; + + common::V1_0::helper::CameraMetadata mDeviceInfo; + + using RequestMetadataQueue = MessageQueue; + std::unique_ptr mRequestMetadataQueue; + using ResultMetadataQueue = MessageQueue; + std::shared_ptr mResultMetadataQueue; + + class ResultBatcher { + public: + ResultBatcher(const sp& callback); + void setNumPartialResults(uint32_t n); + void setBatchedStreams(const std::vector& streamsToBatch); + void setResultMetadataQueue(std::shared_ptr q); + + void registerBatch(uint32_t frameNumber, uint32_t batchSize); + void notify(NotifyMsg& msg); + void processCaptureResult(CaptureResult& result); + + protected: + struct InflightBatch { + // Protect access to entire struct. Acquire this lock before read/write any data or + // calling any methods. processCaptureResult and notify will compete for this lock + // HIDL IPCs might be issued while the lock is held + Mutex mLock; + + bool allDelivered() const; + + uint32_t mFirstFrame; + uint32_t mLastFrame; + uint32_t mBatchSize; + + bool mShutterDelivered = false; + std::vector mShutterMsgs; + + struct BufferBatch { + BufferBatch(uint32_t batchSize) { + mBuffers.reserve(batchSize); + } + bool mDelivered = false; + // This currently assumes every batched request will output to the batched stream + // and since HAL must always send buffers in order, no frameNumber tracking is + // needed + std::vector mBuffers; + }; + // Stream ID -> VideoBatch + std::unordered_map mBatchBufs; + + struct MetadataBatch { + // (frameNumber, metadata) + std::vector> mMds; + }; + // Partial result IDs that has been delivered to framework + uint32_t mNumPartialResults; + uint32_t mPartialResultProgress = 0; + // partialResult -> MetadataBatch + std::map mResultMds; + + // Set to true when batch is removed from mInflightBatches + // processCaptureResult and notify must check this flag after acquiring mLock to make + // sure this batch isn't removed while waiting for mLock + bool mRemoved = false; + }; + + + // Get the batch index and pointer to InflightBatch (nullptrt if the frame is not batched) + // Caller must acquire the InflightBatch::mLock before accessing the InflightBatch + // It's possible that the InflightBatch is removed from mInflightBatches before the + // InflightBatch::mLock is acquired (most likely caused by an error notification), so + // caller must check InflightBatch::mRemoved flag after the lock is acquried. + // This method will hold ResultBatcher::mLock briefly + std::pair> getBatch(uint32_t frameNumber); + + static const int NOT_BATCHED = -1; + + // move/push function avoids "hidl_handle& operator=(hidl_handle&)", which clones native + // handle + void moveStreamBuffer(StreamBuffer&& src, StreamBuffer& dst); + void pushStreamBuffer(StreamBuffer&& src, std::vector& dst); + + void sendBatchMetadataLocked( + std::shared_ptr batch, uint32_t lastPartialResultIdx); + + // Check if the first batch in mInflightBatches is ready to be removed, and remove it if so + // This method will hold ResultBatcher::mLock briefly + void checkAndRemoveFirstBatch(); + + // The following sendXXXX methods must be called while the InflightBatch::mLock is locked + // HIDL IPC methods will be called during these methods. + void sendBatchShutterCbsLocked(std::shared_ptr batch); + // send buffers for all batched streams + void sendBatchBuffersLocked(std::shared_ptr batch); + // send buffers for specified streams + void sendBatchBuffersLocked( + std::shared_ptr batch, const std::vector& streams); + // End of sendXXXX methods + + // helper methods + void freeReleaseFences(hidl_vec&); + void notifySingleMsg(NotifyMsg& msg); + void processOneCaptureResult(CaptureResult& result); + void invokeProcessCaptureResultCallback(hidl_vec &results, bool tryWriteFmq); + + // Protect access to mInflightBatches, mNumPartialResults and mStreamsToBatch + // processCaptureRequest, processCaptureResult, notify will compete for this lock + // Do NOT issue HIDL IPCs while holding this lock (except when HAL reports error) + mutable Mutex mLock; + std::deque> mInflightBatches; + uint32_t mNumPartialResults; + std::vector mStreamsToBatch; + const sp mCallback; + std::shared_ptr mResultMetadataQueue; + + // Protect against invokeProcessCaptureResultCallback() + Mutex mProcessCaptureResultLock; + + } mResultBatcher; + + std::vector mVideoStreamIds; + + bool initialize(); + + static bool shouldFreeBufEarly(); + + Status initStatus() const; + + // Validate and import request's input buffer and acquire fence + virtual Status importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences); + + Status importRequestImpl( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences, + // Optional argument for ICameraDeviceSession@3.5 impl + bool allowEmptyBuf = false); + + Status importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf); + + static void cleanupInflightFences( + hidl_vec& allFences, size_t numFences); + + void cleanupBuffersLocked(int id); + + void updateBufferCaches(const hidl_vec& cachesToRemove); + + android_dataspace mapToLegacyDataspace( + android_dataspace dataSpace) const; + + bool handleAePrecaptureCancelRequestLocked( + const camera3_capture_request_t &halRequest, + android::hardware::camera::common::V1_0::helper::CameraMetadata *settings /*out*/, + AETriggerCancelOverride *override /*out*/); + + void overrideResultForPrecaptureCancelLocked( + const AETriggerCancelOverride &aeTriggerCancelOverride, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata *settings /*out*/); + + Status processOneCaptureRequest(const CaptureRequest& request); + /** + * Static callback forwarding methods from HAL to instance + */ + static callbacks_process_capture_result_t sProcessCaptureResult; + static callbacks_notify_t sNotify; + + // By default camera service uses frameNumber/streamId pair to retrieve the buffer that + // was sent to HAL. Override this implementation if HAL is using buffers from buffer management + // APIs to send output buffer. + virtual uint64_t getCapResultBufferId(const buffer_handle_t& buf, int streamId); + + status_t constructCaptureResult(CaptureResult& result, + const camera3_capture_result *hal_result); + + // Static helper method to copy/shrink capture result metadata sent by HAL + // Temporarily allocated metadata copy will be hold in mds + static void sShrinkCaptureResult( + camera3_capture_result* dst, const camera3_capture_result* src, + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata>* mds, + std::vector* physCamMdArray, + bool handlePhysCam); + static bool sShouldShrink(const camera_metadata_t* md); + static camera_metadata_t* sCreateCompactCopy(const camera_metadata_t* src); + +private: + + struct TrampolineSessionInterface_3_2 : public ICameraDeviceSession { + TrampolineSessionInterface_3_2(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_2::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_2::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_2::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_2::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_2 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE3SESSION_H diff --git a/camera/device/3.2/default/CameraDevice_3_2.h b/camera/device/3.2/default/CameraDevice_3_2.h new file mode 100644 index 0000000..f474533 --- /dev/null +++ b/camera/device/3.2/default/CameraDevice_3_2.h @@ -0,0 +1,158 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraModule.h" +#include "CameraMetadata.h" +#include "CameraDeviceSession.h" + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_2 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::ICameraDevice; +using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::ICameraDeviceSession; +using ::android::hardware::camera::common::V1_0::CameraResourceCost; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct CameraDevice : public virtual RefBase { + // Called by provider HAL. Provider HAL must ensure the uniqueness of + // CameraDevice object per cameraId, or there could be multiple CameraDevice + // trying to access the same physical camera. + // Also, provider will have to keep track of all CameraDevice objects in + // order to notify CameraDevice when the underlying camera is detached + CameraDevice(sp module, + const std::string& cameraId, + const SortedVector>& cameraDeviceNames); + virtual ~CameraDevice(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineDeviceInterface_3_2(this); + } + + // Caller must use this method to check if CameraDevice ctor failed + bool isInitFailed() { return mInitFail; } + // Used by provider HAL to signal external camera disconnected + void setConnectionStatus(bool connected); + + /* Methods from ::android::hardware::camera::device::V3_2::ICameraDevice follow. */ + // The following method can be called without opening the actual camera device + Return getResourceCost(ICameraDevice::getResourceCost_cb _hidl_cb); + Return getCameraCharacteristics(ICameraDevice::getCameraCharacteristics_cb _hidl_cb); + Return setTorchMode(TorchMode mode); + + // Open the device HAL and also return a default capture session + Return open(const sp& callback, ICameraDevice::open_cb _hidl_cb); + + + // Forward the dump call to the opened session, or do nothing + Return dumpState(const ::android::hardware::hidl_handle& fd); + /* End of Methods from ::android::hardware::camera::device::V3_2::ICameraDevice */ + +protected: + + // Overridden by child implementations for returning different versions of CameraDeviceSession + virtual sp createSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&); + + const sp mModule; + const std::string mCameraId; + // const after ctor + int mCameraIdInt; + int mDeviceVersion; + bool mInitFail = false; + // Set by provider (when external camera is connected/disconnected) + bool mDisconnected; + wp mSession = nullptr; + + const SortedVector>& mCameraDeviceNames; + + // gating access to mSession and mDisconnected + mutable Mutex mLock; + + // convert conventional HAL status to HIDL Status + static Status getHidlStatus(int); + + Status initStatus() const; + +private: + struct TrampolineDeviceInterface_3_2 : public ICameraDevice { + TrampolineDeviceInterface_3_2(sp parent) : + mParent(parent) {} + + virtual Return getResourceCost(V3_2::ICameraDevice::getResourceCost_cb _hidl_cb) + override { + return mParent->getResourceCost(_hidl_cb); + } + + virtual Return getCameraCharacteristics( + V3_2::ICameraDevice::getCameraCharacteristics_cb _hidl_cb) override { + return mParent->getCameraCharacteristics(_hidl_cb); + } + + virtual Return setTorchMode(TorchMode mode) override { + return mParent->setTorchMode(mode); + } + + virtual Return open(const sp& callback, + V3_2::ICameraDevice::open_cb _hidl_cb) override { + return mParent->open(callback, _hidl_cb); + } + + virtual Return dumpState(const hidl_handle& fd) override { + return mParent->dumpState(fd); + } + + private: + sp mParent; + }; + +}; + +} // namespace implementation +} // namespace V3_2 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_2_CAMERADEVICE_H diff --git a/camera/device/3.2/default/OWNERS b/camera/device/3.2/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/device/3.2/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/device/3.2/default/convert.cpp b/camera/device/3.2/default/convert.cpp new file mode 100644 index 0000000..06ad7e9 --- /dev/null +++ b/camera/device/3.2/default/convert.cpp @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.device@3.2-convert-impl" +#include + +#include "include/convert.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_2 { +namespace implementation { + +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::camera::device::V3_2::BufferUsageFlags; + +bool convertFromHidl(const CameraMetadata &src, const camera_metadata_t** dst) { + if (src.size() == 0) { + // Special case for null metadata + *dst = nullptr; + return true; + } + + const uint8_t* data = src.data(); + // check that the size of CameraMetadata match underlying camera_metadata_t + if (get_camera_metadata_size((camera_metadata_t*)data) != src.size()) { + ALOGE("%s: input CameraMetadata is corrupt!", __FUNCTION__); + return false; + } + *dst = (camera_metadata_t*) data; + return true; +} + +// Note: existing data in dst will be gone. Caller still owns the memory of src +void convertToHidl(const camera_metadata_t *src, CameraMetadata* dst) { + if (src == nullptr) { + return; + } + size_t size = get_camera_metadata_size(src); + dst->setToExternal((uint8_t *) src, size); + return; +} + +void convertFromHidl(const Stream &src, Camera3Stream* dst) { + dst->mId = src.id; + dst->stream_type = (int) src.streamType; + dst->width = src.width; + dst->height = src.height; + dst->format = (int) src.format; + dst->data_space = (android_dataspace_t) src.dataSpace; + dst->rotation = (int) src.rotation; + dst->usage = (uint32_t) src.usage; + // Fields to be filled by HAL (max_buffers, priv) are initialized to 0 + dst->max_buffers = 0; + dst->priv = 0; + return; +} + +void convertToHidl(const Camera3Stream* src, HalStream* dst) { + dst->id = src->mId; + dst->overrideFormat = (PixelFormat) src->format; + dst->maxBuffers = src->max_buffers; + if (src->stream_type == CAMERA3_STREAM_OUTPUT) { + dst->consumerUsage = (BufferUsageFlags)0; + dst->producerUsage = (BufferUsageFlags)src->usage; + } else if (src->stream_type == CAMERA3_STREAM_INPUT) { + dst->producerUsage = (BufferUsageFlags)0; + dst->consumerUsage = (BufferUsageFlags)src->usage; + } else { + //Should not reach here per current HIDL spec, but we might end up adding + // bi-directional stream to HIDL. + ALOGW("%s: Stream type %d is not currently supported!", + __FUNCTION__, src->stream_type); + } +} + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst) { + dst->streams.resize(src.num_streams); + for (uint32_t i = 0; i < src.num_streams; i++) { + convertToHidl(static_cast(src.streams[i]), &dst->streams[i]); + } + return; +} + +void convertFromHidl( + buffer_handle_t* bufPtr, BufferStatus status, camera3_stream_t* stream, int acquireFence, + camera3_stream_buffer_t* dst) { + dst->stream = stream; + dst->buffer = bufPtr; + dst->status = (int) status; + dst->acquire_fence = acquireFence; + dst->release_fence = -1; // meant for HAL to fill in +} + +void convertToHidl(const camera3_notify_msg* src, NotifyMsg* dst) { + dst->type = (MsgType) src->type; + switch (src->type) { + case CAMERA3_MSG_ERROR: + { + // The camera3_stream_t* must be the same as what wrapper HAL passed to conventional + // HAL, or the ID lookup will return garbage. Caller should validate the ID here is + // indeed one of active stream IDs + Camera3Stream* stream = static_cast( + src->message.error.error_stream); + dst->msg.error.frameNumber = src->message.error.frame_number; + dst->msg.error.errorStreamId = (stream != nullptr) ? stream->mId : -1; + dst->msg.error.errorCode = (ErrorCode) src->message.error.error_code; + } + break; + case CAMERA3_MSG_SHUTTER: + dst->msg.shutter.frameNumber = src->message.shutter.frame_number; + dst->msg.shutter.timestamp = src->message.shutter.timestamp; + break; + default: + ALOGE("%s: HIDL type converion failed. Unknown msg type 0x%x", + __FUNCTION__, src->type); + } + return; +} + +} // namespace implementation +} // namespace V3_2 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.2/default/include/convert.h b/camera/device/3.2/default/include/convert.h new file mode 100644 index 0000000..96891f0 --- /dev/null +++ b/camera/device/3.2/default/include/convert.h @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_V3_2_DEFAULT_INCLUDE_CONVERT_H_ + +#define HARDWARE_INTERFACES_CAMERA_DEVICE_V3_2_DEFAULT_INCLUDE_CONVERT_H_ + +#include + + +#include +#include +#include "hardware/camera3.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_2 { +namespace implementation { + +// The camera3_stream_t sent to conventional HAL. Added mId fields to enable stream ID lookup +// fromt a downcasted camera3_stream +struct Camera3Stream : public camera3_stream { + int mId; +}; + +// *dst will point to the data owned by src, but src still owns the data after this call returns. +bool convertFromHidl(const CameraMetadata &src, const camera_metadata_t** dst); +void convertToHidl(const camera_metadata_t* src, CameraMetadata* dst); + +void convertFromHidl(const Stream &src, Camera3Stream* dst); +void convertToHidl(const Camera3Stream* src, HalStream* dst); + +void convertFromHidl( + buffer_handle_t*, BufferStatus, camera3_stream_t*, int acquireFence, // inputs + camera3_stream_buffer_t* dst); + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst); + +// The camera3_stream_t* in src must be the same as what wrapper HAL passed to conventional +// HAL, or the ID lookup will return garbage. Caller should validate the ID in ErrorMsg is +// indeed one of active stream IDs +void convertToHidl(const camera3_notify_msg* src, NotifyMsg* dst); + +} // namespace implementation +} // namespace V3_2 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_V3_2_DEFAULT_INCLUDE_CONVERT_H_ diff --git a/camera/device/3.2/types.hal b/camera/device/3.2/types.hal new file mode 100644 index 0000000..276e92a --- /dev/null +++ b/camera/device/3.2/types.hal @@ -0,0 +1,998 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.2; + +import android.hardware.graphics.common@1.0::types; + +typedef vec CameraMetadata; +typedef bitfield BufferUsageFlags; +typedef bitfield DataspaceFlags; + +/** + * StreamType: + * + * The type of the camera stream, which defines whether the camera HAL device is + * the producer or the consumer for that stream, and how the buffers of the + * stream relate to the other streams. + */ +enum StreamType : uint32_t { + /** + * This stream is an output stream; the camera HAL device must fill buffers + * from this stream with newly captured or reprocessed image data. + */ + OUTPUT = 0, + + /** + * This stream is an input stream; the camera HAL device must read buffers + * from this stream and send them through the camera processing pipeline, + * as if the buffer was a newly captured image from the imager. + * + * The pixel format for input stream can be any format reported by + * android.scaler.availableInputOutputFormatsMap. The pixel format of the + * output stream that is used to produce the reprocessing data may be any + * format reported by android.scaler.availableStreamConfigurations. The + * supported input/output stream combinations depends the camera device + * capabilities, see android.scaler.availableInputOutputFormatsMap for + * stream map details. + * + * This kind of stream is generally used to reprocess data into higher + * quality images (that otherwise would cause a frame rate performance + * loss), or to do off-line reprocessing. + * + * The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing, + * see S8.2, S8.3 and S10 for more details. + */ + INPUT = 1 + +}; + +/** + * StreamRotation: + * + * The required counterclockwise rotation of camera stream. + */ +enum StreamRotation : uint32_t { + /** No rotation */ + ROTATION_0 = 0, + + /** Rotate by 90 degree counterclockwise */ + ROTATION_90 = 1, + + /** Rotate by 180 degree counterclockwise */ + ROTATION_180 = 2, + + /** Rotate by 270 degree counterclockwise */ + ROTATION_270 = 3 + +}; + +/** + * StreamConfigurationMode: + * + * This defines the general operation mode for the HAL (for a given stream + * configuration) where modes besides NORMAL have different semantics, and + * usually limit the generality of the API in exchange for higher performance in + * some particular area. + */ +enum StreamConfigurationMode : uint32_t { + /** + * Normal stream configuration operation mode. This is the default camera + * operation mode, where all semantics of HAL APIs and metadata controls + * apply. + */ + NORMAL_MODE = 0, + + /** + * Special constrained high speed operation mode for devices that can not + * support high speed output in NORMAL mode. All streams in this + * configuration are operating at high speed mode and have different + * characteristics and limitations to achieve high speed output. The NORMAL + * mode can still be used for high speed output if the HAL can support high + * speed output while satisfying all the semantics of HAL APIs and metadata + * controls. It is recommended for the HAL to support high speed output in + * NORMAL mode (by advertising the high speed FPS ranges in + * android.control.aeAvailableTargetFpsRanges) if possible. + * + * This mode has below limitations/requirements: + * + * 1. The HAL must support up to 2 streams with sizes reported by + * android.control.availableHighSpeedVideoConfigurations. + * 2. In this mode, the HAL is expected to output up to 120fps or + * higher. This mode must support the targeted FPS range and size + * configurations reported by + * android.control.availableHighSpeedVideoConfigurations. + * 3. The HAL must support IMPLEMENTATION_DEFINED output + * stream format. + * 4. To achieve efficient high speed streaming, the HAL may have to + * aggregate multiple frames together and send to camera device for + * processing where the request controls are same for all the frames in + * this batch (batch mode). The HAL must support max batch size and the + * max batch size requirements defined by + * android.control.availableHighSpeedVideoConfigurations. + * 5. In this mode, the HAL must override aeMode, awbMode, and afMode to + * ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing + * block mode controls must be overridden to be FAST. Therefore, no + * manual control of capture and post-processing parameters is + * possible. All other controls operate the same as when + * android.control.mode == AUTO. This means that all other + * android.control.* fields must continue to work, such as + * + * android.control.aeTargetFpsRange + * android.control.aeExposureCompensation + * android.control.aeLock + * android.control.awbLock + * android.control.effectMode + * android.control.aeRegions + * android.control.afRegions + * android.control.awbRegions + * android.control.afTrigger + * android.control.aePrecaptureTrigger + * + * Outside of android.control.*, the following controls must work: + * + * android.flash.mode (TORCH mode only, automatic flash for still + * capture must not work since aeMode is ON) + * android.lens.opticalStabilizationMode (if it is supported) + * android.scaler.cropRegion + * android.statistics.faceDetectMode (if it is supported) + * 6. To reduce the amount of data passed across process boundaries at + * high frame rate, within one batch, camera framework only propagates + * the last shutter notify and the last capture results (including partial + * results and final result) to the app. The shutter notifies and capture + * results for the other requests in the batch are derived by + * the camera framework. As a result, the HAL can return empty metadata + * except for the last result in the batch. + * + * For more details about high speed stream requirements, see + * android.control.availableHighSpeedVideoConfigurations and + * CONSTRAINED_HIGH_SPEED_VIDEO capability defined in + * android.request.availableCapabilities. + * + * This mode only needs to be supported by HALs that include + * CONSTRAINED_HIGH_SPEED_VIDEO in the android.request.availableCapabilities + * static metadata. + */ + CONSTRAINED_HIGH_SPEED_MODE = 1, + + /** + * A set of vendor-defined operating modes, for custom default camera + * application features that can't be implemented in the fully flexible fashion + * required for NORMAL_MODE. + */ + VENDOR_MODE_0 = 0x8000, + VENDOR_MODE_1, + VENDOR_MODE_2, + VENDOR_MODE_3, + VENDOR_MODE_4, + VENDOR_MODE_5, + VENDOR_MODE_6, + VENDOR_MODE_7 +}; + +/** + * Stream: + * + * A descriptor for a single camera input or output stream. A stream is defined + * by the framework by its buffer resolution and format, and additionally by the + * HAL with the gralloc usage flags and the maximum in-flight buffer count. + * + * If a configureStreams() call returns a non-fatal error, all active streams + * remain valid as if configureStreams() had not been called. + * + */ +struct Stream { + /** + * Stream ID - a nonnegative integer identifier for a stream. + * + * The identical stream ID must reference the same stream, with the same + * width/height/format, across consecutive calls to configureStreams. + * + * If previously-used stream ID is not used in a new call to + * configureStreams, then that stream is no longer active. Such a stream ID + * may be reused in a future configureStreams with a new + * width/height/format. + * + */ + int32_t id; + + /** + * The type of the stream (input vs output, etc). + */ + StreamType streamType; + + /** + * The width in pixels of the buffers in this stream + */ + uint32_t width; + + /** + * The height in pixels of the buffers in this stream + */ + uint32_t height; + + /** + * The pixel format for the buffers in this stream. + * + * If IMPLEMENTATION_DEFINED is used, then the platform + * gralloc module must select a format based on the usage flags provided by + * the camera device and the other endpoint of the stream. + * + */ + android.hardware.graphics.common@1.0::PixelFormat format; + + /** + * The gralloc usage flags for this stream, as needed by the consumer of + * the stream. + * + * The usage flags from the producer and the consumer must be combined + * together and then passed to the platform gralloc HAL module for + * allocating the gralloc buffers for each stream. + * + * The HAL may use these consumer flags to decide stream configuration. For + * streamType INPUT, the value of this field is always 0. For all streams + * passed via configureStreams(), the HAL must set its own + * additional usage flags in its output HalStreamConfiguration. + * + * The usage flag for an output stream may be bitwise combination of usage + * flags for multiple consumers, for the purpose of sharing one camera + * stream between those consumers. The HAL must fail configureStreams call + * with ILLEGAL_ARGUMENT if the combined flags cannot be supported due to + * imcompatible buffer format, dataSpace, or other hardware limitations. + */ + BufferUsageFlags usage; + + /** + * A field that describes the contents of the buffer. The format and buffer + * dimensions define the memory layout and structure of the stream buffers, + * while dataSpace defines the meaning of the data within the buffer. + * + * For most formats, dataSpace defines the color space of the image data. + * In addition, for some formats, dataSpace indicates whether image- or + * depth-based data is requested. See + * android.hardware.graphics.common@1.0::types for details of formats and + * valid dataSpace values for each format. + * + * The HAL must use this dataSpace to configure the stream to the correct + * colorspace, or to select between color and depth outputs if + * supported. The dataspace values are set using the V0 dataspace + * definitions. + */ + DataspaceFlags dataSpace; + + /** + * The required output rotation of the stream. + * + * This must be inspected by HAL along with stream width and height. For + * example, if the rotation is 90 degree and the stream width and height is + * 720 and 1280 respectively, camera service must supply buffers of size + * 720x1280, and HAL must capture a 1280x720 image and rotate the image by + * 90 degree counterclockwise. The rotation field must be ignored when the + * stream type is input. + * + * The HAL must inspect this field during stream configuration and return + * IllegalArgument if HAL cannot perform such rotation. HAL must always + * support ROTATION_0, so a configureStreams() call must not fail for + * unsupported rotation if rotation field of all streams is ROTATION_0. + * + */ + StreamRotation rotation; + +}; + +/** + * StreamConfiguration: + * + * A structure of stream definitions, used by configureStreams(). This + * structure defines all the output streams and the reprocessing input + * stream for the current camera use case. + */ +struct StreamConfiguration { + /** + * An array of camera stream pointers, defining the input/output + * configuration for the camera HAL device. + * + * At most one input-capable stream may be defined. + * At least one output-capable stream must be defined. + */ + vec streams; + + /** + * The operation mode of streams in this configuration. The HAL can use this + * mode as an indicator to set the stream property (e.g., + * HalStream::maxBuffers) appropriately. For example, if the + * configuration is + * CONSTRAINED_HIGH_SPEED_MODE, the HAL may + * want to set aside more buffers for batch mode operation (see + * android.control.availableHighSpeedVideoConfigurations for batch mode + * definition). + * + */ + StreamConfigurationMode operationMode; + +}; + +/** + * HalStream: + * + * The camera HAL's response to each requested stream configuration. + * + * The HAL may specify the desired format, maximum buffers, and + * usage flags for each stream. + * + */ +struct HalStream { + /** + * Stream ID - a nonnegative integer identifier for a stream. + * + * The ID must be one of the stream IDs passed into configureStreams. + */ + int32_t id; + + /** + * An override pixel format for the buffers in this stream. + * + * The HAL must respect the requested format in Stream unless it is + * IMPLEMENTATION_DEFINED, in which case the override format here must be + * used by the client instead, for this stream. This allows cross-platform + * HALs to use a standard format since IMPLEMENTATION_DEFINED formats often + * require device-specific information. In all other cases, the + * overrideFormat must match the requested format. + * + * When HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform + * gralloc module must select a format based on the usage flags provided by + * the camera device and the other endpoint of the stream. + */ + android.hardware.graphics.common@1.0::PixelFormat overrideFormat; + + /** + * The gralloc usage flags for this stream, as needed by the HAL. + * + * For output streams, these are the HAL's producer usage flags. For input + * streams, these are the HAL's consumer usage flags. The usage flags from + * the producer and the consumer must be combined together and then passed + * to the platform graphics allocator HAL for allocating the gralloc buffers + * for each stream. + * + * If the stream's type is INPUT, then producerUsage must be 0, and + * consumerUsage must be set. For other types, producerUsage must be set, + * and consumerUsage must be 0. + */ + BufferUsageFlags producerUsage; + BufferUsageFlags consumerUsage; + + /** + * The maximum number of buffers the HAL device may need to have dequeued at + * the same time. The HAL device may not have more buffers in-flight from + * this stream than this value. + */ + uint32_t maxBuffers; + +}; + +/** + * HalStreamConfiguration: + * + * A structure of stream definitions, returned by configureStreams(). This + * structure defines the HAL's desired parameters for each stream. + * + * All streams that were defined in the input to configureStreams() must have a + * corresponding entry in this structure when returned by configureStreams(). + */ +struct HalStreamConfiguration { + vec streams; +}; + +/** + * BufferStatus: + * + * The current status of a single stream buffer. + */ +enum BufferStatus : uint32_t { + /** + * The buffer is in a normal state, and can be used after waiting on its + * sync fence. + */ + OK = 0, + + /** + * The buffer does not contain valid data, and the data in it must not be + * used. The sync fence must still be waited on before reusing the buffer. + */ + ERROR = 1 +}; + +/** + * StreamBuffer: + * + * A single buffer from a camera3 stream. It includes a handle to its parent + * stream, the handle to the gralloc buffer itself, and sync fences + * + * The buffer does not specify whether it is to be used for input or output; + * that is determined by its parent stream type and how the buffer is passed to + * the HAL device. + */ +struct StreamBuffer { + /** + * The ID of the stream this buffer is associated with. -1 indicates an + * invalid (empty) StreamBuffer, in which case buffer must also point to + * null and bufferId must be 0. + */ + int32_t streamId; + + /** + * The unique ID of the buffer within this StreamBuffer. 0 indicates this + * StreamBuffer contains no buffer. + * For StreamBuffers sent to the HAL in a CaptureRequest, this ID uniquely + * identifies a buffer. When a buffer is sent to HAL for the first time, + * both bufferId and buffer handle must be filled. HAL must keep track of + * the mapping between bufferId and corresponding buffer until the + * corresponding stream is removed from stream configuration or until camera + * device session is closed. After the first time a buffer is introduced to + * HAL, in the future camera service must refer to the same buffer using + * only bufferId, and keep the buffer handle null. + */ + uint64_t bufferId; + + /** + * The graphics buffer handle to the buffer. + * + * For StreamBuffers sent to the HAL in a CaptureRequest, if the bufferId + * is not seen by the HAL before, this buffer handle is guaranteed to be a + * valid handle to a graphics buffer, with dimensions and format matching + * that of the stream. If the bufferId has been sent to the HAL before, this + * buffer handle must be null and HAL must look up the actual buffer handle + * to use from its own bufferId to buffer handle map. + * + * For StreamBuffers returned in a CaptureResult, this must be null, since + * the handle to the buffer is already known to the client (since the client + * sent it in the matching CaptureRequest), and the handle can be identified + * by the combination of frame number and stream ID. + */ + handle buffer; + + /** + * Current state of the buffer. The framework must not pass buffers to the + * HAL that are in an error state. In case a buffer could not be filled by + * the HAL, it must have its status set to ERROR when returned to the + * framework with processCaptureResult(). + */ + BufferStatus status; + + /** + * The acquire sync fence for this buffer. The HAL must wait on this fence + * fd before attempting to read from or write to this buffer. + * + * In a buffer included in a CaptureRequest, the client may set this to null + * to indicate that no waiting is necessary for this buffer. + * + * When the HAL returns an input or output buffer to the framework with + * processCaptureResult(), the acquireFence must be set to null. If the HAL + * never waits on the acquireFence due to an error in filling or reading a + * buffer, when calling processCaptureResult() the HAL must set the + * releaseFence of the buffer to be the acquireFence passed to it by the + * client. This allows the client to wait on the fence before reusing the + * buffer. + */ + handle acquireFence; + + /** + * The release sync fence for this buffer. The HAL must set this to a valid + * fence fd when returning the input buffer or output buffers to the client + * in a CaptureResult, or set it to null to indicate that no waiting is + * required for this buffer. + * + * The client must set this to be null for all buffers included in a + * processCaptureRequest call. + * + * After signaling the releaseFence for this buffer, the HAL + * must not make any further attempts to access this buffer as the + * ownership has been fully transferred back to the client. + * + * If this is null, then the ownership of this buffer is transferred back + * immediately upon the call of processCaptureResult. + */ + handle releaseFence; + +}; + +/** + * CameraBlob: + * + * Transport header for camera blob types; generally compressed JPEG buffers in + * output streams. + * + * To capture JPEG images, a stream is created using the pixel format + * HAL_PIXEL_FORMAT_BLOB and dataspace HAL_DATASPACE_V0_JFIF. The buffer size + * for the stream is calculated by the framework, based on the static metadata + * field android.jpeg.maxSize. Since compressed JPEG images are of variable + * size, the HAL needs to include the final size of the compressed image using + * this structure inside the output stream buffer. The camera blob ID field must + * be set to CameraBlobId::JPEG. + * + * The transport header must be at the end of the JPEG output stream + * buffer. That means the jpegBlobId must start at byte[buffer_size - + * sizeof(CameraBlob)], where the buffer_size is the size of gralloc + * buffer. Any HAL using this transport header must account for it in + * android.jpeg.maxSize. The JPEG data itself starts at the beginning of the + * buffer and must be blobSize bytes long. + */ +enum CameraBlobId : uint16_t { + JPEG = 0x00FF, +}; + +struct CameraBlob { + CameraBlobId blobId; + + uint32_t blobSize; +}; + +/** + * MsgType: + * + * Indicates the type of message sent, which specifies which member of the + * message union is valid. + * + */ +enum MsgType : uint32_t { + /** + * An error has occurred. NotifyMsg::Message::Error contains the + * error information. + */ + ERROR = 1, + + /** + * The exposure of a given request or processing a reprocess request has + * begun. NotifyMsg::Message::Shutter contains the information + * the capture. + */ + SHUTTER = 2 +}; + +/** + * Defined error codes for MsgType::ERROR + */ +enum ErrorCode : uint32_t { + /** + * A serious failure occured. No further frames or buffer streams must + * be produced by the device. Device must be treated as closed. The + * client must reopen the device to use it again. The frameNumber field + * is unused. + */ + ERROR_DEVICE = 1, + + /** + * An error has occurred in processing a request. No output (metadata or + * buffers) must be produced for this request. The frameNumber field + * specifies which request has been dropped. Subsequent requests are + * unaffected, and the device remains operational. + */ + ERROR_REQUEST = 2, + + /** + * An error has occurred in producing an output result metadata buffer + * for a request, but output stream buffers for it must still be + * available. Subsequent requests are unaffected, and the device remains + * operational. The frameNumber field specifies the request for which + * result metadata won't be available. + */ + ERROR_RESULT = 3, + + /** + * An error has occurred in placing an output buffer into a stream for a + * request. The frame metadata and other buffers may still be + * available. Subsequent requests are unaffected, and the device remains + * operational. The frameNumber field specifies the request for which the + * buffer was dropped, and errorStreamId indicates the stream + * that dropped the frame. + */ + ERROR_BUFFER = 4, +}; + +/** + * ErrorMsg: + * + * Message contents for MsgType::ERROR + */ +struct ErrorMsg { + /** + * Frame number of the request the error applies to. 0 if the frame number + * isn't applicable to the error. + */ + uint32_t frameNumber; + + /** + * Pointer to the stream that had a failure. -1 if the stream isn't + * applicable to the error. + */ + int32_t errorStreamId; + + /** + * The code for this error. + */ + ErrorCode errorCode; + +}; + +/** + * ShutterMsg: + * + * Message contents for MsgType::SHUTTER + */ +struct ShutterMsg { + /** + * Frame number of the request that has begun exposure or reprocessing. + */ + uint32_t frameNumber; + + /** + * Timestamp for the start of capture. For a reprocess request, this must + * be input image's start of capture. This must match the capture result + * metadata's sensor exposure start timestamp. + */ + uint64_t timestamp; + +}; + +/** + * NotifyMsg: + * + * The message structure sent to ICameraDevice3Callback::notify() + */ +struct NotifyMsg { + /** + * The message type. + */ + MsgType type; + + union Message { + /** + * Error message contents. Valid if type is MsgType::ERROR + */ + ErrorMsg error; + + /** + * Shutter message contents. Valid if type is MsgType::SHUTTER + */ + ShutterMsg shutter; + } msg; + +}; + +/** + * RequestTemplate: + * + * Available template types for + * ICameraDevice::constructDefaultRequestSettings() + */ +enum RequestTemplate : uint32_t { + /** + * Standard camera preview operation with 3A on auto. + */ + PREVIEW = 1, + + /** + * Standard camera high-quality still capture with 3A and flash on auto. + */ + STILL_CAPTURE = 2, + + /** + * Standard video recording plus preview with 3A on auto, torch off. + */ + VIDEO_RECORD = 3, + + /** + * High-quality still capture while recording video. Applications typically + * include preview, video record, and full-resolution YUV or JPEG streams in + * request. Must not cause stuttering on video stream. 3A on auto. + */ + VIDEO_SNAPSHOT = 4, + + /** + * Zero-shutter-lag mode. Application typically request preview and + * full-resolution data for each frame, and reprocess it to JPEG when a + * still image is requested by user. Settings must provide highest-quality + * full-resolution images without compromising preview frame rate. 3A on + * auto. + */ + ZERO_SHUTTER_LAG = 5, + + /** + * A basic template for direct application control of capture + * parameters. All automatic control is disabled (auto-exposure, auto-white + * balance, auto-focus), and post-processing parameters are set to preview + * quality. The manual capture parameters (exposure, sensitivity, etc.) + * are set to reasonable defaults, but may be overridden by the + * application depending on the intended use case. + */ + MANUAL = 6, + + /** + * First value for vendor-defined request templates + */ + VENDOR_TEMPLATE_START = 0x40000000, + +}; + +/** + * CaptureRequest: + * + * A single request for image capture/buffer reprocessing, sent to the Camera + * HAL device by the framework in processCaptureRequest(). + * + * The request contains the settings to be used for this capture, and the set of + * output buffers to write the resulting image data in. It may optionally + * contain an input buffer, in which case the request is for reprocessing that + * input buffer instead of capturing a new image with the camera sensor. The + * capture is identified by the frameNumber. + * + * In response, the camera HAL device must send a CaptureResult + * structure asynchronously to the framework, using the processCaptureResult() + * callback. + */ +struct CaptureRequest { + /** + * The frame number is an incrementing integer set by the framework to + * uniquely identify this capture. It needs to be returned in the result + * call, and is also used to identify the request in asynchronous + * notifications sent to ICameraDevice3Callback::notify(). + */ + uint32_t frameNumber; + + /** + * If non-zero, read settings from request queue instead + * (see ICameraDeviceSession.getCaptureRequestMetadataQueue). + * If zero, read settings from .settings field. + */ + uint64_t fmqSettingsSize; + + /** + * If fmqSettingsSize is zero, + * the settings buffer contains the capture and processing parameters for + * the request. As a special case, an empty settings buffer indicates that + * the settings are identical to the most-recently submitted capture + * request. A empty buffer cannot be used as the first submitted request + * after a configureStreams() call. + * + * This field must be used if fmqSettingsSize is zero. It must not be used + * if fmqSettingsSize is non-zero. + */ + CameraMetadata settings; + + /** + * The input stream buffer to use for this request, if any. + * + * An invalid inputBuffer is signified by a null inputBuffer::buffer, in + * which case the value of all other members of inputBuffer must be ignored. + * + * If inputBuffer is invalid, then the request is for a new capture from the + * imager. If inputBuffer is valid, the request is for reprocessing the + * image contained in inputBuffer, and the HAL must release the inputBuffer + * back to the client in a subsequent processCaptureResult call. + * + * The HAL is required to wait on the acquire sync fence of the input buffer + * before accessing it. + * + */ + StreamBuffer inputBuffer; + + /** + * An array of at least 1 stream buffers, to be filled with image + * data from this capture/reprocess. The HAL must wait on the acquire fences + * of each stream buffer before writing to them. + * + * The HAL takes ownership of the handles in outputBuffers; the client + * must not access them until they are returned in a CaptureResult. + * + * Any or all of the buffers included here may be brand new in this + * request (having never before seen by the HAL). + */ + vec outputBuffers; + +}; + +/** + * CaptureResult: + * + * The result of a single capture/reprocess by the camera HAL device. This is + * sent to the framework asynchronously with processCaptureResult(), in + * response to a single capture request sent to the HAL with + * processCaptureRequest(). Multiple processCaptureResult() calls may be + * performed by the HAL for each request. + * + * Each call, all with the same frame + * number, may contain some subset of the output buffers, and/or the result + * metadata. + * + * The result structure contains the output metadata from this capture, and the + * set of output buffers that have been/will be filled for this capture. Each + * output buffer may come with a release sync fence that the framework must wait + * on before reading, in case the buffer has not yet been filled by the HAL. + * + * The metadata may be provided multiple times for a single frame number. The + * framework must accumulate together the final result set by combining each + * partial result together into the total result set. + * + * If an input buffer is given in a request, the HAL must return it in one of + * the processCaptureResult calls, and the call may be to just return the + * input buffer, without metadata and output buffers; the sync fences must be + * handled the same way they are done for output buffers. + * + * Performance considerations: + * + * Applications receive these partial results immediately, so sending partial + * results is a highly recommended performance optimization to avoid the total + * pipeline latency before sending the results for what is known very early on + * in the pipeline. + * + * A typical use case might be calculating the AF state halfway through the + * pipeline; by sending the state back to the framework immediately, we get a + * 50% performance increase and perceived responsiveness of the auto-focus. + * + */ +struct CaptureResult { + /** + * The frame number is an incrementing integer set by the framework in the + * submitted request to uniquely identify this capture. It is also used to + * identify the request in asynchronous notifications sent to + * ICameraDevice3Callback::notify(). + */ + uint32_t frameNumber; + + /** + * If non-zero, read result from result queue instead + * (see ICameraDeviceSession.getCaptureResultMetadataQueue). + * If zero, read result from .result field. + */ + uint64_t fmqResultSize; + + /** + * The result metadata for this capture. This contains information about the + * final capture parameters, the state of the capture and post-processing + * hardware, the state of the 3A algorithms, if enabled, and the output of + * any enabled statistics units. + * + * If there was an error producing the result metadata, result must be an + * empty metadata buffer, and notify() must be called with + * ErrorCode::ERROR_RESULT. + * + * Multiple calls to processCaptureResult() with a given frameNumber + * may include (partial) result metadata. + * + * Partial metadata submitted must not include any metadata key returned + * in a previous partial result for a given frame. Each new partial result + * for that frame must also set a distinct partialResult value. + * + * If notify has been called with ErrorCode::ERROR_RESULT, all further + * partial results for that frame are ignored by the framework. + */ + CameraMetadata result; + + /** + * The completed output stream buffers for this capture. + * + * They may not yet be filled at the time the HAL calls + * processCaptureResult(); the framework must wait on the release sync + * fences provided by the HAL before reading the buffers. + * + * The StreamBuffer::buffer handle must be null for all returned buffers; + * the client must cache the handle and look it up via the combination of + * frame number and stream ID. + * + * The number of output buffers returned must be less than or equal to the + * matching capture request's count. If this is less than the buffer count + * in the capture request, at least one more call to processCaptureResult + * with the same frameNumber must be made, to return the remaining output + * buffers to the framework. This may only be zero if the structure includes + * valid result metadata or an input buffer is returned in this result. + * + * The HAL must set the stream buffer's release sync fence to a valid sync + * fd, or to null if the buffer has already been filled. + * + * If the HAL encounters an error while processing the buffer, and the + * buffer is not filled, the buffer's status field must be set to ERROR. If + * the HAL did not wait on the acquire fence before encountering the error, + * the acquire fence must be copied into the release fence, to allow the + * framework to wait on the fence before reusing the buffer. + * + * The acquire fence must be set to null for all output buffers. + * + * This vector may be empty; if so, at least one other processCaptureResult + * call must be made (or have been made) by the HAL to provide the filled + * output buffers. + * + * When processCaptureResult is called with a new buffer for a frame, + * all previous frames' buffers for that corresponding stream must have been + * already delivered (the fences need not have yet been signaled). + * + * Buffers for a frame may be sent to framework before the corresponding + * SHUTTER-notify call is made by the HAL. + * + * Performance considerations: + * + * Buffers delivered to the framework are not dispatched to the + * application layer until a start of exposure timestamp has been received + * via a SHUTTER notify() call. It is highly recommended to + * dispatch that call as early as possible. + */ + vec outputBuffers; + + /** + * The handle for the input stream buffer for this capture, if any. + * + * It may not yet be consumed at the time the HAL calls + * processCaptureResult(); the framework must wait on the release sync fence + * provided by the HAL before reusing the buffer. + * + * The HAL must handle the sync fences the same way they are done for + * outputBuffers. + * + * Only one input buffer is allowed to be sent per request. Similarly to + * output buffers, the ordering of returned input buffers must be + * maintained by the HAL. + * + * Performance considerations: + * + * The input buffer should be returned as early as possible. If the HAL + * supports sync fences, it can call processCaptureResult to hand it back + * with sync fences being set appropriately. If the sync fences are not + * supported, the buffer can only be returned when it is consumed, which + * may take long time; the HAL may choose to copy this input buffer to make + * the buffer return sooner. + */ + StreamBuffer inputBuffer; + + /** + * In order to take advantage of partial results, the HAL must set the + * static metadata android.request.partialResultCount to the number of + * partial results it sends for each frame. + * + * Each new capture result with a partial result must set + * this field to a distinct inclusive value between + * 1 and android.request.partialResultCount. + * + * HALs not wishing to take advantage of this feature must not + * set an android.request.partialResultCount or partial_result to a value + * other than 1. + * + * This value must be set to 0 when a capture result contains buffers only + * and no metadata. + */ + uint32_t partialResult; + +}; + +/** + * BufferCache: + * + * A list of cached bufferIds associated with a certain stream. + * Buffers are passed between camera service and camera HAL via bufferId except + * the first time a new buffer is being passed to HAL in CaptureRequest. Camera + * service and camera HAL therefore need to maintain a cached map of bufferId + * and corresponing native handle. + * + */ +struct BufferCache { + /** + * The ID of the stream this list is associated with. + */ + int32_t streamId; + + /** + * A cached buffer ID associated with streamId. + */ + uint64_t bufferId; +}; diff --git a/camera/device/3.3/ICameraDeviceSession.hal b/camera/device/3.3/ICameraDeviceSession.hal new file mode 100644 index 0000000..764392f --- /dev/null +++ b/camera/device/3.3/ICameraDeviceSession.hal @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.3; + +import android.hardware.camera.common@1.0::Status; +import android.hardware.camera.device@3.2::ICameraDeviceSession; +import android.hardware.camera.device@3.2::StreamConfiguration; + +/** + * Camera device active session interface. + * + * Obtained via ICameraDevice::open(), this interface contains the methods to + * configure and request captures from an active camera device. + * + */ +interface ICameraDeviceSession extends @3.2::ICameraDeviceSession { + + /** + * configureStreams_3_3: + * + * Identical to @3.2::ICameraDeviceSession.configureStreams, except that: + * + * - The output HalStreamConfiguration now contains an overrideDataspace + * field, to be used by the HAL to select a different dataspace for some + * use cases when dealing with the IMPLEMENTATION_DEFINED pixel format. + * + * Clients may invoke either this method or + * @3.2::ICameraDeviceSession.configureStreams() for stream configuration. + * This method is recommended for clients to use since it provides more + * flexibility. + */ + configureStreams_3_3(StreamConfiguration requestedConfiguration) + generates (Status status, + @3.3::HalStreamConfiguration halConfiguration); + +}; diff --git a/camera/device/3.3/default/Android.bp b/camera/device/3.3/default/Android.bp new file mode 100644 index 0000000..1c7eed6 --- /dev/null +++ b/camera/device/3.3/default/Android.bp @@ -0,0 +1,35 @@ + +cc_library_shared { + name: "vendor.camera.device@3.3-impl", + defaults: ["hidl_defaults"], + proprietary: true, + srcs: [ + "CameraDevice.cpp", + "CameraDeviceSession.cpp", + "convert.cpp", + ], + shared_libs: [ + "libhidlbase", + "libutils", + "libcutils", + "vendor.camera.device@3.2-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + "libfmq", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + export_include_dirs: ["."], + export_shared_lib_headers: [ + "libfmq", + ], +} diff --git a/camera/device/3.3/default/CameraDevice.cpp b/camera/device/3.3/default/CameraDevice.cpp new file mode 100644 index 0000000..b4d279e --- /dev/null +++ b/camera/device/3.3/default/CameraDevice.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDev@3.3-impl" +#include + +#include +#include +#include "CameraDevice_3_3.h" +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_3 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::Status; +using namespace ::android::hardware::camera::device; + +CameraDevice::CameraDevice( + sp module, const std::string& cameraId, + const SortedVector>& cameraDeviceNames) : + V3_2::implementation::CameraDevice(module, cameraId, cameraDeviceNames) { +} + +CameraDevice::~CameraDevice() { +} + +sp CameraDevice::createSession(camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) { + sp session = new CameraDeviceSession(device, deviceInfo, callback); + IF_ALOGV() { + session->getInterface()->interfaceChain([]( + ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) { + ALOGV("Session interface chain:"); + for (const auto& iface : interfaceChain) { + ALOGV(" %s", iface.c_str()); + } + }); + } + return session; +} + +// End of methods from ::android::hardware::camera::device::V3_2::ICameraDevice. + +} // namespace implementation +} // namespace V3_3 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.3/default/CameraDeviceSession.cpp b/camera/device/3.3/default/CameraDeviceSession.cpp new file mode 100644 index 0000000..60174fb --- /dev/null +++ b/camera/device/3.3/default/CameraDeviceSession.cpp @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDevSession@3.3-impl" +#include + +#include +#include +#include +#include +#include "CameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_3 { +namespace implementation { + +CameraDeviceSession::CameraDeviceSession( + camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) : + V3_2::implementation::CameraDeviceSession(device, deviceInfo, callback) { +} + +CameraDeviceSession::~CameraDeviceSession() { +} + +Return CameraDeviceSession::configureStreams_3_3( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) { + Status status = initStatus(); + HalStreamConfiguration outStreams; + + // hold the inflight lock for entire configureStreams scope since there must not be any + // inflight request/results during stream configuration. + Mutex::Autolock _l(mInflightLock); + if (!mInflightBuffers.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight buffers!", + __FUNCTION__, mInflightBuffers.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (!mInflightAETriggerOverrides.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " trigger overrides!", __FUNCTION__, + mInflightAETriggerOverrides.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (!mInflightRawBoostPresent.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " boost overrides!", __FUNCTION__, + mInflightRawBoostPresent.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (status != Status::OK) { + _hidl_cb(status, outStreams); + return Void(); + } + + camera3_stream_configuration_t stream_list{}; + hidl_vec streams; + if (!preProcessConfigurationLocked(requestedConfiguration, &stream_list, &streams)) { + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + ATRACE_BEGIN("camera3->configure_streams"); + status_t ret = mDevice->ops->configure_streams(mDevice, &stream_list); + ATRACE_END(); + + // In case Hal returns error most likely it was not able to release + // the corresponding resources of the deleted streams. + if (ret == OK) { + postProcessConfigurationLocked(requestedConfiguration); + } else { + postProcessConfigurationFailureLocked(requestedConfiguration); + } + + if (ret == -EINVAL) { + status = Status::ILLEGAL_ARGUMENT; + } else if (ret != OK) { + status = Status::INTERNAL_ERROR; + } else { + convertToHidl(stream_list, &outStreams); + mFirstRequest = true; + } + + _hidl_cb(status, outStreams); + return Void(); +} + +} // namespace implementation +} // namespace V3_3 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.3/default/CameraDeviceSession.h b/camera/device/3.3/default/CameraDeviceSession.h new file mode 100644 index 0000000..dd52b35 --- /dev/null +++ b/camera/device/3.3/default/CameraDeviceSession.h @@ -0,0 +1,138 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_3_CAMERADEVICE3SESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_3_CAMERADEVICE3SESSION_H + +#include +#include +#include <../../3.2/default/CameraDeviceSession.h> +#include +#include +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "HandleImporter.h" +#include "hardware/camera3.h" +#include "hardware/camera_common.h" +#include "utils/Mutex.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_3 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::StreamConfiguration; +using ::android::hardware::camera::device::V3_3::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_3::ICameraDeviceSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +struct CameraDeviceSession : public V3_2::implementation::CameraDeviceSession { + + CameraDeviceSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&); + virtual ~CameraDeviceSession(); + + virtual sp getInterface() override { + return new TrampolineSessionInterface_3_3(this); + } + +protected: + // Methods from v3.2 and earlier will trampoline to inherited implementation + + // New methods for v3.3 + + Return configureStreams_3_3( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb); +private: + + struct TrampolineSessionInterface_3_3 : public ICameraDeviceSession { + TrampolineSessionInterface_3_3(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const StreamConfiguration& requestedConfiguration, configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_3 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_3_CAMERADEVICE3SESSION_H diff --git a/camera/device/3.3/default/CameraDevice_3_3.h b/camera/device/3.3/default/CameraDevice_3_3.h new file mode 100644 index 0000000..18b3fe8 --- /dev/null +++ b/camera/device/3.3/default/CameraDevice_3_3.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_3_CAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_3_CAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraModule.h" +#include "CameraMetadata.h" +#include "CameraDeviceSession.h" +#include <../../3.2/default/CameraDevice_3_2.h> + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_3 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct CameraDevice : public V3_2::implementation::CameraDevice { + + // Called by provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + // Delegates nearly all work to CameraDevice_3_2 + CameraDevice(sp module, + const std::string& cameraId, + const SortedVector>& cameraDeviceNames); + ~CameraDevice(); + +protected: + virtual sp createSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&) override; + +}; + +} // namespace implementation +} // namespace V3_3 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_3_CAMERADEVICE_H diff --git a/camera/device/3.3/default/OWNERS b/camera/device/3.3/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/device/3.3/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/device/3.3/default/convert.cpp b/camera/device/3.3/default/convert.cpp new file mode 100644 index 0000000..dae190b --- /dev/null +++ b/camera/device/3.3/default/convert.cpp @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.device@3.3-convert-impl" +#include + +#include "include/convert.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_3 { +namespace implementation { + +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::camera::device::V3_2::BufferUsageFlags; + +void convertToHidl(const Camera3Stream* src, HalStream* dst) { + dst->overrideDataSpace = src->data_space; + dst->v3_2.id = src->mId; + dst->v3_2.overrideFormat = (PixelFormat) src->format; + dst->v3_2.maxBuffers = src->max_buffers; + if (src->stream_type == CAMERA3_STREAM_OUTPUT) { + dst->v3_2.consumerUsage = (BufferUsageFlags)0; + dst->v3_2.producerUsage = (BufferUsageFlags)src->usage; + } else if (src->stream_type == CAMERA3_STREAM_INPUT) { + dst->v3_2.producerUsage = (BufferUsageFlags)0; + dst->v3_2.consumerUsage = (BufferUsageFlags)src->usage; + } else { + //Should not reach here per current HIDL spec, but we might end up adding + // bi-directional stream to HIDL. + ALOGW("%s: Stream type %d is not currently supported!", + __FUNCTION__, src->stream_type); + } +} + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst) { + dst->streams.resize(src.num_streams); + for (uint32_t i = 0; i < src.num_streams; i++) { + convertToHidl(static_cast(src.streams[i]), &dst->streams[i]); + } + return; +} + +} // namespace implementation +} // namespace V3_3 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.3/default/include/convert.h b/camera/device/3.3/default/include/convert.h new file mode 100644 index 0000000..23bb797 --- /dev/null +++ b/camera/device/3.3/default/include/convert.h @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_V3_3_DEFAULT_INCLUDE_CONVERT_H_ + +#define HARDWARE_INTERFACES_CAMERA_DEVICE_V3_3_DEFAULT_INCLUDE_CONVERT_H_ + +#include + + +#include +#include +#include "hardware/camera3.h" +#include "../../3.2/default/include/convert.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_3 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::implementation::Camera3Stream; + +void convertToHidl(const Camera3Stream* src, HalStream* dst); + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst); + +} // namespace implementation +} // namespace V3_3 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_V3_3_DEFAULT_INCLUDE_CONVERT_H_ diff --git a/camera/device/3.3/types.hal b/camera/device/3.3/types.hal new file mode 100644 index 0000000..b4ad702 --- /dev/null +++ b/camera/device/3.3/types.hal @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.3; + +import android.hardware.camera.device@3.2::DataspaceFlags; +import android.hardware.camera.device@3.2::HalStream; + +/** + * HalStream: + * + * The camera HAL's response to each requested stream configuration. + * + * This version extends the @3.2 HalStream with the overrideDataspace + * field + */ +struct HalStream { + /** + * The definition of HalStream from the prior version. + */ + @3.2::HalStream v3_2; + + /** + * An override dataSpace for the buffers in this stream. + * + * The HAL must respect the requested dataSpace in Stream unless it is + * IMPLEMENTATION_DEFINED, in which case the override dataSpace here must be + * used by the client instead, for this stream. This allows cross-platform + * HALs to use a specific dataSpace since IMPLEMENTATION_DEFINED formats often + * require device-specific information for correct selection. In all other cases, the + * overrideFormat must match the requested format. + */ + DataspaceFlags overrideDataSpace; +}; + +/** + * HalStreamConfiguration: + * + * Identical to @3.2::HalStreamConfiguration, except that it contains @3.3::HalStream entries. + * + */ +struct HalStreamConfiguration { + vec streams; +}; diff --git a/camera/device/3.4/ICameraDeviceCallback.hal b/camera/device/3.4/ICameraDeviceCallback.hal new file mode 100644 index 0000000..8ce8d4b --- /dev/null +++ b/camera/device/3.4/ICameraDeviceCallback.hal @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.4; + +import @3.2::ICameraDeviceCallback; + +/** + * + * Callback methods for the HAL to call into the framework. + * + * These methods are used to return metadata and image buffers for a completed + * or failed captures, and to notify the framework of asynchronous events such + * as errors. + * + * The framework must not call back into the HAL from within these callbacks, + * and these calls must not block for extended periods. + * + */ +interface ICameraDeviceCallback extends @3.2::ICameraDeviceCallback { + /** + * processCaptureResult_3_4: + * + * Identical to @3.2::ICameraDeviceCallback.processCaptureResult, except + * that it takes a list of @3.4::CaptureResult, which could contain + * physical camera metadata for logical multi-camera. + * + */ + processCaptureResult_3_4(vec<@3.4::CaptureResult> results); +}; diff --git a/camera/device/3.4/ICameraDeviceSession.hal b/camera/device/3.4/ICameraDeviceSession.hal new file mode 100644 index 0000000..e1663e6 --- /dev/null +++ b/camera/device/3.4/ICameraDeviceSession.hal @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2017-2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.4; + +import android.hardware.camera.common@1.0::Status; +import @3.2::CameraMetadata; +import @3.3::ICameraDeviceSession; +import @3.3::HalStreamConfiguration; +import @3.2::BufferCache; + +/** + * Camera device active session interface. + * + * Obtained via ICameraDevice::open(), this interface contains the methods to + * configure and request captures from an active camera device. + */ +interface ICameraDeviceSession extends @3.3::ICameraDeviceSession { + + /** + * configureStreams_3_4: + * + * Identical to @3.3::ICameraDeviceSession.configureStreams, except that: + * + * - The requested configuration includes session parameters. + * + * @return Status Status code for the operation, one of: + * OK: + * On successful stream configuration. + * INTERNAL_ERROR: + * If there has been a fatal error and the device is no longer + * operational. Only close() can be called successfully by the + * framework after this error is returned. + * ILLEGAL_ARGUMENT: + * If the requested stream configuration is invalid. Some examples + * of invalid stream configurations include: + * - Including more than 1 INPUT stream + * - Not including any OUTPUT streams + * - Including streams with unsupported formats, or an unsupported + * size for that format. + * - Including too many output streams of a certain format. + * - Unsupported rotation configuration + * - Stream sizes/formats don't satisfy the + * StreamConfigurationMode requirements + * for non-NORMAL mode, or the requested operation_mode is not + * supported by the HAL. + * - Unsupported usage flag + * The camera service cannot filter out all possible illegal stream + * configurations, since some devices may support more simultaneous + * streams or larger stream resolutions than the minimum required + * for a given camera device hardware level. The HAL must return an + * ILLEGAL_ARGUMENT for any unsupported stream set, and then be + * ready to accept a future valid stream configuration in a later + * configureStreams call. + * @return halConfiguration The stream parameters desired by the HAL for + * each stream, including maximum buffers, the usage flags, and the + * override format. + */ + configureStreams_3_4(@3.4::StreamConfiguration requestedConfiguration) + generates (Status status, + @3.4::HalStreamConfiguration halConfiguration); + + /** + * processCaptureRequest_3_4: + * + * Identical to @3.2::ICameraDeviceSession.processCaptureRequest, except that: + * + * - The capture request can include individual settings for physical camera devices + * backing a logical multi-camera. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful start to processing the capture request + * ILLEGAL_ARGUMENT: + * If the input is malformed (the settings are empty when not + * allowed, the physical camera settings are invalid, there are 0 + * output buffers, etc) and capture processing + * cannot start. Failures during request processing must be + * handled by calling ICameraDeviceCallback::notify(). In case of + * this error, the framework retains responsibility for the + * stream buffers' fences and the buffer handles; the HAL must not + * close the fences or return these buffers with + * ICameraDeviceCallback::processCaptureResult(). + * INTERNAL_ERROR: + * If the camera device has encountered a serious error. After this + * error is returned, only the close() method can be successfully + * called by the framework. + * @return numRequestProcessed Number of requests successfully processed by + * camera HAL. When status is OK, this must be equal to the size of + * requests. When the call fails, this number is the number of requests + * that HAL processed successfully before HAL runs into an error. + * + */ + processCaptureRequest_3_4(vec requests, vec cachesToRemove) + generates (Status status, uint32_t numRequestProcessed); +}; diff --git a/camera/device/3.4/default/Android.bp b/camera/device/3.4/default/Android.bp new file mode 100644 index 0000000..df6ec65 --- /dev/null +++ b/camera/device/3.4/default/Android.bp @@ -0,0 +1,108 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_library_headers { + name: "vendor.camera.device@3.4-impl_headers", + vendor: true, + export_include_dirs: ["include/device_v3_4_impl"], +} + +cc_library_headers { + name: "vendor.camera.device@3.4-external-impl_headers", + vendor: true, + export_include_dirs: ["include/ext_device_v3_4_impl"], +} + +cc_library_shared { + name: "vendor.camera.device@3.4-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "CameraDevice.cpp", + "CameraDeviceSession.cpp", + "convert.cpp", + ], + shared_libs: [ + "libhidlbase", + "libutils", + "libcutils", + "vendor.camera.device@3.2-impl", + "vendor.camera.device@3.3-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + "libfmq", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/device_v3_4_impl"], + export_shared_lib_headers: [ + "libfmq", + ], +} + +cc_library_shared { + name: "vendor.camera.device@3.4-external-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "ExternalCameraDevice.cpp", + "ExternalCameraDeviceSession.cpp", + "ExternalCameraUtils.cpp", + ], + shared_libs: [ + "libhidlbase", + "libutils", + "libcutils", + "vendor.camera.device@3.2-impl", + "vendor.camera.device@3.3-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + "libfmq", + "libsync", + "libyuv", + "libjpeg", + "libexif", + "libtinyxml2", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/ext_device_v3_4_impl"], + export_shared_lib_headers: [ + "libfmq", + ], +} diff --git a/camera/device/3.4/default/CameraDevice.cpp b/camera/device/3.4/default/CameraDevice.cpp new file mode 100644 index 0000000..bc443de --- /dev/null +++ b/camera/device/3.4/default/CameraDevice.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDev@3.4-impl" +#include + +#include +#include +#include "CameraDevice_3_4.h" +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::Status; +using namespace ::android::hardware::camera::device; + +CameraDevice::CameraDevice( + sp module, const std::string& cameraId, + const SortedVector>& cameraDeviceNames) : + V3_2::implementation::CameraDevice(module, cameraId, cameraDeviceNames) { +} + +CameraDevice::~CameraDevice() { +} + +sp CameraDevice::createSession(camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) { + sp session = new CameraDeviceSession(device, deviceInfo, callback); + IF_ALOGV() { + session->getInterface()->interfaceChain([]( + ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) { + ALOGV("Session interface chain:"); + for (const auto& iface : interfaceChain) { + ALOGV(" %s", iface.c_str()); + } + }); + } + return session; +} + +// End of methods from ::android::hardware::camera::device::V3_2::ICameraDevice. + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/CameraDeviceSession.cpp b/camera/device/3.4/default/CameraDeviceSession.cpp new file mode 100644 index 0000000..3f088a3 --- /dev/null +++ b/camera/device/3.4/default/CameraDeviceSession.cpp @@ -0,0 +1,773 @@ +/* + * Copyright (C) 2017-2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDevSession@3.4-impl" +#include + +#include +#include +#include +#include +#include "CameraDeviceSession.h" +#include "CameraModule.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::helper::CameraModule; + +CameraDeviceSession::CameraDeviceSession( + camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) : + V3_3::implementation::CameraDeviceSession(device, deviceInfo, callback), + mResultBatcher_3_4(callback) { + + mHasCallback_3_4 = false; + + auto castResult = ICameraDeviceCallback::castFrom(callback); + if (castResult.isOk()) { + sp callback3_4 = castResult; + if (callback3_4 != nullptr) { + process_capture_result = sProcessCaptureResult_3_4; + notify = sNotify_3_4; + mHasCallback_3_4 = true; + if (!mInitFail) { + mResultBatcher_3_4.setResultMetadataQueue(mResultMetadataQueue); + } + } + } + + mResultBatcher_3_4.setNumPartialResults(mNumPartialResults); + + // Parse and store current logical camera's physical ids. + (void)CameraModule::isLogicalMultiCamera(mDeviceInfo, &mPhysicalCameraIds); + +} + +CameraDeviceSession::~CameraDeviceSession() { +} + +Return CameraDeviceSession::configureStreams_3_4( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { + configureStreams_3_4_Impl(requestedConfiguration, _hidl_cb); + return Void(); +} + +void CameraDeviceSession::configureStreams_3_4_Impl( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb, + uint32_t streamConfigCounter, bool useOverriddenFields) { + Status status = initStatus(); + HalStreamConfiguration outStreams; + + // If callback is 3.2, make sure no physical stream is configured + if (!mHasCallback_3_4) { + for (size_t i = 0; i < requestedConfiguration.streams.size(); i++) { + if (requestedConfiguration.streams[i].physicalCameraId.size() > 0) { + ALOGE("%s: trying to configureStreams with physical camera id with V3.2 callback", + __FUNCTION__); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return; + } + } + } + + // hold the inflight lock for entire configureStreams scope since there must not be any + // inflight request/results during stream configuration. + Mutex::Autolock _l(mInflightLock); + if (!mInflightBuffers.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight buffers!", + __FUNCTION__, mInflightBuffers.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return; + } + + if (!mInflightAETriggerOverrides.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " trigger overrides!", __FUNCTION__, + mInflightAETriggerOverrides.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return; + } + + if (!mInflightRawBoostPresent.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " boost overrides!", __FUNCTION__, + mInflightRawBoostPresent.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return; + } + + if (status != Status::OK) { + _hidl_cb(status, outStreams); + return; + } + + const camera_metadata_t *paramBuffer = nullptr; + if (0 < requestedConfiguration.sessionParams.size()) { + V3_2::implementation::convertFromHidl(requestedConfiguration.sessionParams, ¶mBuffer); + } + + camera3_stream_configuration_t stream_list{}; + // Block reading mStreamConfigCounter until configureStream returns + Mutex::Autolock _sccl(mStreamConfigCounterLock); + mStreamConfigCounter = streamConfigCounter; + hidl_vec streams; + stream_list.session_parameters = paramBuffer; + if (!preProcessConfigurationLocked_3_4(requestedConfiguration, + useOverriddenFields, &stream_list, &streams)) { + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return; + } + + ATRACE_BEGIN("camera3->configure_streams"); + status_t ret = mDevice->ops->configure_streams(mDevice, &stream_list); + ATRACE_END(); + + // In case Hal returns error most likely it was not able to release + // the corresponding resources of the deleted streams. + if (ret == OK) { + postProcessConfigurationLocked_3_4(requestedConfiguration); + } else { + postProcessConfigurationFailureLocked_3_4(requestedConfiguration); + } + + if (ret == -EINVAL) { + status = Status::ILLEGAL_ARGUMENT; + } else if (ret != OK) { + status = Status::INTERNAL_ERROR; + } else { + V3_4::implementation::convertToHidl(stream_list, &outStreams); + mFirstRequest = true; + } + + _hidl_cb(status, outStreams); + return; +} + +bool CameraDeviceSession::preProcessConfigurationLocked_3_4( + const StreamConfiguration& requestedConfiguration, bool useOverriddenFields, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/) { + + if ((stream_list == nullptr) || (streams == nullptr)) { + return false; + } + + stream_list->operation_mode = (uint32_t) requestedConfiguration.operationMode; + stream_list->num_streams = requestedConfiguration.streams.size(); + streams->resize(stream_list->num_streams); + stream_list->streams = streams->data(); + + for (uint32_t i = 0; i < stream_list->num_streams; i++) { + int id = requestedConfiguration.streams[i].v3_2.id; + + if (mStreamMap.count(id) == 0) { + Camera3Stream stream; + convertFromHidl(requestedConfiguration.streams[i], &stream); + mStreamMap[id] = stream; + mPhysicalCameraIdMap[id] = requestedConfiguration.streams[i].physicalCameraId; + mStreamMap[id].data_space = mapToLegacyDataspace( + mStreamMap[id].data_space); + mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{}); + } else { + // width/height/format must not change, but usage/rotation might need to change. + // format and data_space may change. + if (mStreamMap[id].stream_type != + (int) requestedConfiguration.streams[i].v3_2.streamType || + mStreamMap[id].width != requestedConfiguration.streams[i].v3_2.width || + mStreamMap[id].height != requestedConfiguration.streams[i].v3_2.height || + mPhysicalCameraIdMap[id] != requestedConfiguration.streams[i].physicalCameraId) { + ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id); + return false; + } + if (useOverriddenFields) { + android_dataspace_t requestedDataSpace = + mapToLegacyDataspace(static_cast( + requestedConfiguration.streams[i].v3_2.dataSpace)); + if (mStreamMap[id].format != (int) requestedConfiguration.streams[i].v3_2.format || + mStreamMap[id].data_space != requestedDataSpace) { + ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id); + return false; + } + } else { + mStreamMap[id].format = + (int) requestedConfiguration.streams[i].v3_2.format; + mStreamMap[id].data_space = (android_dataspace_t) + requestedConfiguration.streams[i].v3_2.dataSpace; + } + mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].v3_2.rotation; + mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].v3_2.usage; + } + // It is possible for the entry in 'mStreamMap' to get initialized by an older + // HIDL API. Make sure that the physical id is always initialized when using + // a more recent API call. + mStreamMap[id].physical_camera_id = mPhysicalCameraIdMap[id].c_str(); + + (*streams)[i] = &mStreamMap[id]; + } + + if (mFreeBufEarly) { + // Remove buffers of deleted streams + for(auto it = mStreamMap.begin(); it != mStreamMap.end(); it++) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.v3_2.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + cleanupBuffersLocked(id); + } + } + } + return true; +} + +void CameraDeviceSession::postProcessConfigurationLocked_3_4( + const StreamConfiguration& requestedConfiguration) { + // delete unused streams, note we do this after adding new streams to ensure new stream + // will not have the same address as deleted stream, and HAL has a chance to reference + // the to be deleted stream in configure_streams call + for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.v3_2.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + // in case the configuration call succeeds and HAL + // is able to release the corresponding resources too. + if (!mFreeBufEarly) { + cleanupBuffersLocked(id); + } + it = mStreamMap.erase(it); + } else { + ++it; + } + } + + // Track video streams + mVideoStreamIds.clear(); + for (const auto& stream : requestedConfiguration.streams) { + if (stream.v3_2.streamType == StreamType::OUTPUT && + stream.v3_2.usage & + graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) { + mVideoStreamIds.push_back(stream.v3_2.id); + } + } + mResultBatcher_3_4.setBatchedStreams(mVideoStreamIds); +} + +void CameraDeviceSession::postProcessConfigurationFailureLocked_3_4( + const StreamConfiguration& requestedConfiguration) { + if (mFreeBufEarly) { + // Re-build the buf cache entry for deleted streams + for(auto it = mStreamMap.begin(); it != mStreamMap.end(); it++) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.v3_2.id) { + found = true; + break; + } + } + if (!found) { + mCirculatingBuffers.emplace(id, CirculatingBuffers{}); + } + } + } +} + +Return CameraDeviceSession::processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest_3_4(requests[i]); + if (s != Status::OK) { + break; + } + } + + if (s == Status::OK && requests.size() > 1) { + mResultBatcher_3_4.registerBatch(requests[0].v3_2.frameNumber, requests.size()); + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Status CameraDeviceSession::processOneCaptureRequest_3_4(const V3_4::CaptureRequest& request) { + Status status = initStatus(); + if (status != Status::OK) { + ALOGE("%s: camera init failed or disconnected", __FUNCTION__); + return status; + } + // If callback is 3.2, make sure there are no physical settings. + if (!mHasCallback_3_4) { + if (request.physicalCameraSettings.size() > 0) { + ALOGE("%s: trying to call processCaptureRequest_3_4 with physical camera id " + "and V3.2 callback", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + + camera3_capture_request_t halRequest; + halRequest.frame_number = request.v3_2.frameNumber; + + bool converted = true; + V3_2::CameraMetadata settingsFmq; // settings from FMQ + if (request.v3_2.fmqSettingsSize > 0) { + // non-blocking read; client must write metadata before calling + // processOneCaptureRequest + settingsFmq.resize(request.v3_2.fmqSettingsSize); + bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.v3_2.fmqSettingsSize); + if (read) { + converted = V3_2::implementation::convertFromHidl(settingsFmq, &halRequest.settings); + } else { + ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); + converted = false; + } + } else { + converted = V3_2::implementation::convertFromHidl(request.v3_2.settings, + &halRequest.settings); + } + + if (!converted) { + ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && halRequest.settings == nullptr) { + ALOGE("%s: capture request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + hidl_vec allBufPtrs; + hidl_vec allFences; + bool hasInputBuf = (request.v3_2.inputBuffer.streamId != -1 && + request.v3_2.inputBuffer.bufferId != 0); + size_t numOutputBufs = request.v3_2.outputBuffers.size(); + size_t numBufs = numOutputBufs + (hasInputBuf ? 1 : 0); + + if (numOutputBufs == 0) { + ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + status = importRequest(request.v3_2, allBufPtrs, allFences); + if (status != Status::OK) { + return status; + } + + hidl_vec outHalBufs; + outHalBufs.resize(numOutputBufs); + bool aeCancelTriggerNeeded = false; + ::android::hardware::camera::common::V1_0::helper::CameraMetadata settingsOverride; + { + Mutex::Autolock _l(mInflightLock); + if (hasInputBuf) { + auto streamId = request.v3_2.inputBuffer.streamId; + auto key = std::make_pair(request.v3_2.inputBuffer.streamId, request.v3_2.frameNumber); + auto& bufCache = mInflightBuffers[key] = camera3_stream_buffer_t{}; + convertFromHidl( + allBufPtrs[numOutputBufs], request.v3_2.inputBuffer.status, + &mStreamMap[request.v3_2.inputBuffer.streamId], allFences[numOutputBufs], + &bufCache); + bufCache.stream->physical_camera_id = mPhysicalCameraIdMap[streamId].c_str(); + halRequest.input_buffer = &bufCache; + } else { + halRequest.input_buffer = nullptr; + } + + halRequest.num_output_buffers = numOutputBufs; + for (size_t i = 0; i < numOutputBufs; i++) { + auto streamId = request.v3_2.outputBuffers[i].streamId; + auto key = std::make_pair(streamId, request.v3_2.frameNumber); + auto& bufCache = mInflightBuffers[key] = camera3_stream_buffer_t{}; + convertFromHidl( + allBufPtrs[i], request.v3_2.outputBuffers[i].status, + &mStreamMap[streamId], allFences[i], + &bufCache); + bufCache.stream->physical_camera_id = mPhysicalCameraIdMap[streamId].c_str(); + outHalBufs[i] = bufCache; + } + halRequest.output_buffers = outHalBufs.data(); + + AETriggerCancelOverride triggerOverride; + aeCancelTriggerNeeded = handleAePrecaptureCancelRequestLocked( + halRequest, &settingsOverride /*out*/, &triggerOverride/*out*/); + if (aeCancelTriggerNeeded) { + mInflightAETriggerOverrides[halRequest.frame_number] = + triggerOverride; + halRequest.settings = settingsOverride.getAndLock(); + } + } + + std::vector physicalCameraIds; + std::vector physicalCameraSettings; + std::vector physicalFmq; + size_t settingsCount = request.physicalCameraSettings.size(); + if (settingsCount > 0) { + physicalCameraIds.reserve(settingsCount); + physicalCameraSettings.reserve(settingsCount); + physicalFmq.reserve(settingsCount); + + for (size_t i = 0; i < settingsCount; i++) { + uint64_t settingsSize = request.physicalCameraSettings[i].fmqSettingsSize; + const camera_metadata_t *settings = nullptr; + if (settingsSize > 0) { + physicalFmq.push_back(V3_2::CameraMetadata(settingsSize)); + bool read = mRequestMetadataQueue->read(physicalFmq[i].data(), settingsSize); + if (read) { + converted = V3_2::implementation::convertFromHidl(physicalFmq[i], &settings); + physicalCameraSettings.push_back(settings); + } else { + ALOGE("%s: physical camera settings metadata couldn't be read from fmq!", + __FUNCTION__); + converted = false; + } + } else { + converted = V3_2::implementation::convertFromHidl( + request.physicalCameraSettings[i].settings, &settings); + physicalCameraSettings.push_back(settings); + } + + if (!converted) { + ALOGE("%s: physical camera settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && settings == nullptr) { + ALOGE("%s: Individual request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + physicalCameraIds.push_back(request.physicalCameraSettings[i].physicalCameraId.c_str()); + } + } + halRequest.num_physcam_settings = settingsCount; + halRequest.physcam_id = physicalCameraIds.data(); + halRequest.physcam_settings = physicalCameraSettings.data(); + + ATRACE_ASYNC_BEGIN("frame capture", request.v3_2.frameNumber); + ATRACE_BEGIN("camera3->process_capture_request"); + status_t ret = mDevice->ops->process_capture_request(mDevice, &halRequest); + ATRACE_END(); + if (aeCancelTriggerNeeded) { + settingsOverride.unlock(halRequest.settings); + } + if (ret != OK) { + Mutex::Autolock _l(mInflightLock); + ALOGE("%s: HAL process_capture_request call failed!", __FUNCTION__); + + cleanupInflightFences(allFences, numBufs); + if (hasInputBuf) { + auto key = std::make_pair(request.v3_2.inputBuffer.streamId, request.v3_2.frameNumber); + mInflightBuffers.erase(key); + } + for (size_t i = 0; i < numOutputBufs; i++) { + auto key = std::make_pair(request.v3_2.outputBuffers[i].streamId, + request.v3_2.frameNumber); + mInflightBuffers.erase(key); + } + if (aeCancelTriggerNeeded) { + mInflightAETriggerOverrides.erase(request.v3_2.frameNumber); + } + + if (ret == BAD_VALUE) { + return Status::ILLEGAL_ARGUMENT; + } else { + return Status::INTERNAL_ERROR; + } + } + + mFirstRequest = false; + return Status::OK; +} + +/** + * Static callback forwarding methods from HAL to instance + */ +void CameraDeviceSession::sProcessCaptureResult_3_4( + const camera3_callback_ops *cb, + const camera3_capture_result *hal_result) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + + CaptureResult result = {}; + camera3_capture_result shadowResult; + bool handlePhysCam = (d->mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_5); + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata> compactMds; + std::vector physCamMdArray; + sShrinkCaptureResult(&shadowResult, hal_result, &compactMds, &physCamMdArray, handlePhysCam); + + status_t ret = d->constructCaptureResult(result.v3_2, &shadowResult); + if (ret != OK) { + return; + } + + if (handlePhysCam) { + if (shadowResult.num_physcam_metadata > d->mPhysicalCameraIds.size()) { + ALOGE("%s: Fatal: Invalid num_physcam_metadata %u", __FUNCTION__, + shadowResult.num_physcam_metadata); + return; + } + result.physicalCameraMetadata.resize(shadowResult.num_physcam_metadata); + for (uint32_t i = 0; i < shadowResult.num_physcam_metadata; i++) { + std::string physicalId = shadowResult.physcam_ids[i]; + if (d->mPhysicalCameraIds.find(physicalId) == d->mPhysicalCameraIds.end()) { + ALOGE("%s: Fatal: Invalid physcam_ids[%u]: %s", __FUNCTION__, + i, shadowResult.physcam_ids[i]); + return; + } + V3_2::CameraMetadata physicalMetadata; + V3_2::implementation::convertToHidl( + shadowResult.physcam_metadata[i], &physicalMetadata); + PhysicalCameraMetadata physicalCameraMetadata = { + .fmqMetadataSize = 0, + .physicalCameraId = physicalId, + .metadata = physicalMetadata }; + result.physicalCameraMetadata[i] = physicalCameraMetadata; + } + } + d->mResultBatcher_3_4.processCaptureResult_3_4(result); +} + +void CameraDeviceSession::sNotify_3_4( + const camera3_callback_ops *cb, + const camera3_notify_msg *msg) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + V3_2::NotifyMsg hidlMsg; + V3_2::implementation::convertToHidl(msg, &hidlMsg); + + if (hidlMsg.type == (V3_2::MsgType) CAMERA3_MSG_ERROR && + hidlMsg.msg.error.errorStreamId != -1) { + if (d->mStreamMap.count(hidlMsg.msg.error.errorStreamId) != 1) { + ALOGE("%s: unknown stream ID %d reports an error!", + __FUNCTION__, hidlMsg.msg.error.errorStreamId); + return; + } + } + + if (static_cast(hidlMsg.type) == CAMERA3_MSG_ERROR) { + switch (hidlMsg.msg.error.errorCode) { + case V3_2::ErrorCode::ERROR_DEVICE: + case V3_2::ErrorCode::ERROR_REQUEST: + case V3_2::ErrorCode::ERROR_RESULT: { + Mutex::Autolock _l(d->mInflightLock); + auto entry = d->mInflightAETriggerOverrides.find( + hidlMsg.msg.error.frameNumber); + if (d->mInflightAETriggerOverrides.end() != entry) { + d->mInflightAETriggerOverrides.erase( + hidlMsg.msg.error.frameNumber); + } + + auto boostEntry = d->mInflightRawBoostPresent.find( + hidlMsg.msg.error.frameNumber); + if (d->mInflightRawBoostPresent.end() != boostEntry) { + d->mInflightRawBoostPresent.erase( + hidlMsg.msg.error.frameNumber); + } + + } + break; + case V3_2::ErrorCode::ERROR_BUFFER: + default: + break; + } + + } + + d->mResultBatcher_3_4.notify(hidlMsg); +} + +CameraDeviceSession::ResultBatcher_3_4::ResultBatcher_3_4( + const sp& callback) : + V3_3::implementation::CameraDeviceSession::ResultBatcher(callback) { + auto castResult = ICameraDeviceCallback::castFrom(callback); + if (castResult.isOk()) { + mCallback_3_4 = castResult; + } +} + +void CameraDeviceSession::ResultBatcher_3_4::processCaptureResult_3_4(CaptureResult& result) { + auto pair = getBatch(result.v3_2.frameNumber); + int batchIdx = pair.first; + if (batchIdx == NOT_BATCHED) { + processOneCaptureResult_3_4(result); + return; + } + std::shared_ptr batch = pair.second; + { + Mutex::Autolock _l(batch->mLock); + // Check if the batch is removed (mostly by notify error) before lock was acquired + if (batch->mRemoved) { + // Fall back to non-batch path + processOneCaptureResult_3_4(result); + return; + } + + // queue metadata + if (result.v3_2.result.size() != 0) { + // Save a copy of metadata + batch->mResultMds[result.v3_2.partialResult].mMds.push_back( + std::make_pair(result.v3_2.frameNumber, result.v3_2.result)); + } + + // queue buffer + std::vector filledStreams; + std::vector nonBatchedBuffers; + for (auto& buffer : result.v3_2.outputBuffers) { + auto it = batch->mBatchBufs.find(buffer.streamId); + if (it != batch->mBatchBufs.end()) { + InflightBatch::BufferBatch& bb = it->second; + auto id = buffer.streamId; + pushStreamBuffer(std::move(buffer), bb.mBuffers); + filledStreams.push_back(id); + } else { + pushStreamBuffer(std::move(buffer), nonBatchedBuffers); + } + } + + // send non-batched buffers up + if (nonBatchedBuffers.size() > 0 || result.v3_2.inputBuffer.streamId != -1) { + CaptureResult nonBatchedResult; + nonBatchedResult.v3_2.frameNumber = result.v3_2.frameNumber; + nonBatchedResult.v3_2.fmqResultSize = 0; + nonBatchedResult.v3_2.outputBuffers.resize(nonBatchedBuffers.size()); + for (size_t i = 0; i < nonBatchedBuffers.size(); i++) { + moveStreamBuffer( + std::move(nonBatchedBuffers[i]), nonBatchedResult.v3_2.outputBuffers[i]); + } + moveStreamBuffer(std::move(result.v3_2.inputBuffer), nonBatchedResult.v3_2.inputBuffer); + nonBatchedResult.v3_2.partialResult = 0; // 0 for buffer only results + processOneCaptureResult_3_4(nonBatchedResult); + } + + if (result.v3_2.frameNumber == batch->mLastFrame) { + // Send data up + if (result.v3_2.partialResult > 0) { + sendBatchMetadataLocked(batch, result.v3_2.partialResult); + } + // send buffer up + if (filledStreams.size() > 0) { + sendBatchBuffersLocked(batch, filledStreams); + } + } + } // end of batch lock scope + + // see if the batch is complete + if (result.v3_2.frameNumber == batch->mLastFrame) { + checkAndRemoveFirstBatch(); + } +} + +void CameraDeviceSession::ResultBatcher_3_4::processOneCaptureResult_3_4(CaptureResult& result) { + hidl_vec results; + results.resize(1); + results[0] = std::move(result); + invokeProcessCaptureResultCallback_3_4(results, /* tryWriteFmq */true); + freeReleaseFences_3_4(results); + return; +} + +void CameraDeviceSession::ResultBatcher_3_4::invokeProcessCaptureResultCallback_3_4( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.v3_2.result.size() > 0) { + if (mResultMetadataQueue->write(result.v3_2.result.data(), + result.v3_2.result.size())) { + result.v3_2.fmqResultSize = result.v3_2.result.size(); + result.v3_2.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + result.v3_2.fmqResultSize = 0; + } + } + + for (auto& onePhysMetadata : result.physicalCameraMetadata) { + if (mResultMetadataQueue->write(onePhysMetadata.metadata.data(), + onePhysMetadata.metadata.size())) { + onePhysMetadata.fmqMetadataSize = onePhysMetadata.metadata.size(); + onePhysMetadata.metadata.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + onePhysMetadata.fmqMetadataSize = 0; + } + } + } + } + mCallback_3_4->processCaptureResult_3_4(results); + mProcessCaptureResultLock.unlock(); +} + +void CameraDeviceSession::ResultBatcher_3_4::freeReleaseFences_3_4(hidl_vec& results) { + for (auto& result : results) { + if (result.v3_2.inputBuffer.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + result.v3_2.inputBuffer.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + for (auto& buf : result.v3_2.outputBuffers) { + if (buf.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + buf.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + } + } + return; +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/ExternalCameraDevice.cpp b/camera/device/3.4/default/ExternalCameraDevice.cpp new file mode 100644 index 0000000..311c688 --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraDevice.cpp @@ -0,0 +1,1029 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamDev@3.4" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include +#include +#include "android-base/macros.h" +#include "CameraMetadata.h" +#include "../../3.2/default/include/convert.h" +#include "ExternalCameraDevice_3_4.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +namespace { +// Only support MJPEG for now as it seems to be the one supports higher fps +// Other formats to consider in the future: +// * V4L2_PIX_FMT_YVU420 (== YV12) +// * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats) +const std::array kSupportedFourCCs{ + {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}}; // double braces required in C++11 + +constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times. +constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds + +} // anonymous namespace + +const std::regex kDevicePathRE("/dev/video([0-9]+)"); + +ExternalCameraDevice::ExternalCameraDevice( + const std::string& devicePath, const ExternalCameraConfig& cfg) : + mCameraId("-1"), + mDevicePath(devicePath), + mCfg(cfg) { + std::smatch sm; + if (std::regex_match(mDevicePath, sm, kDevicePathRE)) { + mCameraId = std::to_string(mCfg.cameraIdOffset + std::stoi(sm[1])); + } else { + ALOGE("%s: device path match failed for %s", __FUNCTION__, mDevicePath.c_str()); + } +} + +ExternalCameraDevice::~ExternalCameraDevice() {} + +bool ExternalCameraDevice::isInitFailed() { + Mutex::Autolock _l(mLock); + return isInitFailedLocked(); +} + +bool ExternalCameraDevice::isInitFailedLocked() { + if (!mInitialized) { + status_t ret = initCameraCharacteristics(); + if (ret != OK) { + ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret); + mInitFailed = true; + } + mInitialized = true; + } + return mInitFailed; +} + +Return ExternalCameraDevice::getResourceCost( + ICameraDevice::getResourceCost_cb _hidl_cb) { + CameraResourceCost resCost; + resCost.resourceCost = 100; + _hidl_cb(Status::OK, resCost); + return Void(); +} + +Return ExternalCameraDevice::getCameraCharacteristics( + ICameraDevice::getCameraCharacteristics_cb _hidl_cb) { + Mutex::Autolock _l(mLock); + V3_2::CameraMetadata hidlChars; + + if (isInitFailedLocked()) { + _hidl_cb(Status::INTERNAL_ERROR, hidlChars); + return Void(); + } + + const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock(); + V3_2::implementation::convertToHidl(rawMetadata, &hidlChars); + _hidl_cb(Status::OK, hidlChars); + mCameraCharacteristics.unlock(rawMetadata); + return Void(); +} + +Return ExternalCameraDevice::setTorchMode(TorchMode) { + return Status::OPERATION_NOT_SUPPORTED; +} + +Return ExternalCameraDevice::open( + const sp& callback, ICameraDevice::open_cb _hidl_cb) { + Status status = Status::OK; + sp session = nullptr; + + if (callback == nullptr) { + ALOGE("%s: cannot open camera %s. callback is null!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + if (isInitFailed()) { + ALOGE("%s: cannot open camera %s. camera init failed!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + mLock.lock(); + + ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str()); + session = mSession.promote(); + if (session != nullptr && !session->isClosed()) { + ALOGE("%s: cannot open an already opened camera!", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::CAMERA_IN_USE, nullptr); + return Void(); + } + + unique_fd fd(::open(mDevicePath.c_str(), O_RDWR)); + if (fd.get() < 0) { + int numAttempt = 0; + do { + ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again", + __FUNCTION__, mDevicePath.c_str()); + usleep(OPEN_RETRY_SLEEP_US); // sleep and try again + fd.reset(::open(mDevicePath.c_str(), O_RDWR)); + numAttempt++; + } while (fd.get() < 0 && numAttempt <= MAX_RETRY); + + if (fd.get() < 0) { + ALOGE("%s: v4l2 device open %s failed: %s", + __FUNCTION__, mDevicePath.c_str(), strerror(errno)); + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + } + + session = createSession( + callback, mCfg, mSupportedFormats, mCroppingType, + mCameraCharacteristics, mCameraId, std::move(fd)); + if (session == nullptr) { + ALOGE("%s: camera device session allocation failed", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + if (session->isInitFailed()) { + ALOGE("%s: camera device session init failed", __FUNCTION__); + session = nullptr; + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + mSession = session; + + mLock.unlock(); + + _hidl_cb(status, session->getInterface()); + return Void(); +} + +Return ExternalCameraDevice::dumpState(const ::android::hardware::hidl_handle& handle) { + Mutex::Autolock _l(mLock); + if (handle.getNativeHandle() == nullptr) { + ALOGE("%s: handle must not be null", __FUNCTION__); + return Void(); + } + if (handle->numFds != 1 || handle->numInts != 0) { + ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", + __FUNCTION__, handle->numFds, handle->numInts); + return Void(); + } + int fd = handle->data[0]; + if (mSession == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + auto session = mSession.promote(); + if (session == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + // Call into active session to dump states + session->dumpState(handle); + return Void(); +} + + +status_t ExternalCameraDevice::initCameraCharacteristics() { + if (mCameraCharacteristics.isEmpty()) { + // init camera characteristics + unique_fd fd(::open(mDevicePath.c_str(), O_RDWR)); + if (fd.get() < 0) { + ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mDevicePath.c_str()); + return DEAD_OBJECT; + } + + status_t ret; + ret = initDefaultCharsKeys(&mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + + ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + + ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + + ret = initAvailableCapabilities(&mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init available capabilities key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + } + return OK; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(tag, data, size) \ +do { \ + if (metadata->update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return -EINVAL; \ + } \ +} while (0) + +status_t ExternalCameraDevice::initAvailableCapabilities( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + + if (mSupportedFormats.empty()) { + ALOGE("%s: Supported formats list is empty", __FUNCTION__); + return UNKNOWN_ERROR; + } + + bool hasDepth = false; + bool hasColor = false; + for (const auto& fmt : mSupportedFormats) { + switch (fmt.fourcc) { + case V4L2_PIX_FMT_Z16: hasDepth = true; break; + case V4L2_PIX_FMT_MJPEG: hasColor = true; break; + default: ALOGW("%s: Unsupported format found", __FUNCTION__); + } + } + + std::vector availableCapabilities; + if (hasDepth) { + availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT); + } + if (hasColor) { + availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE); + } + if(!availableCapabilities.empty()) { + UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities.data(), + availableCapabilities.size()); + } + + return OK; +} + +status_t ExternalCameraDevice::initDefaultCharsKeys( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL; + UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1); + + // android.colorCorrection + const uint8_t availableAberrationModes[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; + UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + availableAberrationModes, ARRAY_SIZE(availableAberrationModes)); + + // android.control + const uint8_t antibandingMode = + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + &antibandingMode, 1); + + const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0}; + UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, + ARRAY_SIZE(controlMaxRegions)); + + const uint8_t videoStabilizationMode = + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + &videoStabilizationMode, 1); + + const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO; + UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1); + + const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON; + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1); + + const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF; + UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1); + + const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF, + ANDROID_CONTROL_MODE_AUTO}; + UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes, + ARRAY_SIZE(controlAvailableModes)); + + // android.edge + const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF; + UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1); + + // android.flash + const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1); + + // android.hotPixel + const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF; + UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1); + + // android.jpeg + const int32_t jpegAvailableThumbnailSizes[] = {0, 0, + 176, 144, + 240, 144, + 256, 144, + 240, 160, + 256, 154, + 240, 180}; + UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, + ARRAY_SIZE(jpegAvailableThumbnailSizes)); + + const int32_t jpegMaxSize = mCfg.maxJpegBufSize; + UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); + + // android.lens + const uint8_t focusDistanceCalibration = + ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; + UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1); + + const uint8_t opticalStabilizationMode = + ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + &opticalStabilizationMode, 1); + + const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL; + UPDATE(ANDROID_LENS_FACING, &facing, 1); + + // android.noiseReduction + const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF; + UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + &noiseReductionMode, 1); + UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1); + + const int32_t partialResultCount = 1; + UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1); + + // This means pipeline latency of X frame intervals. The maximum number is 4. + const uint8_t requestPipelineMaxDepth = 4; + UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1); + + // Three numbers represent the maximum numbers of different types of output + // streams simultaneously. The types are raw sensor, processed (but not + // stalling), and processed (but stalling). For usb limited mode, raw sensor + // is not supported. Stalling stream is JPEG. Non-stalling streams are + // YUV_420_888 or YV12. + const int32_t requestMaxNumOutputStreams[] = { + /*RAW*/0, + /*Processed*/ExternalCameraDeviceSession::kMaxProcessedStream, + /*Stall*/ExternalCameraDeviceSession::kMaxStallStream}; + UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams, + ARRAY_SIZE(requestMaxNumOutputStreams)); + + // Limited mode doesn't support reprocessing. + const int32_t requestMaxNumInputStreams = 0; + UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams, + 1); + + // android.scaler + // TODO: b/72263447 V4L2_CID_ZOOM_* + const float scalerAvailableMaxDigitalZoom[] = {1}; + UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + scalerAvailableMaxDigitalZoom, + ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); + + const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); + + const int32_t testPatternModes[] = {ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, + ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR}; + UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes, + ARRAY_SIZE(testPatternModes)); + + const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; + UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); + + // Orientation is a bit odd for external camera, but consider it as the orientation + // between the external camera sensor (which is usually landscape) and the device's + // natural display orientation. For devices with natural landscape display (ex: tablet/TV), the + // orientation should be 0. For devices with natural portrait display (phone), the orientation + // should be 270. + const int32_t orientation = mCfg.orientation; + UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1); + + // android.shading + const uint8_t availabeMode = ANDROID_SHADING_MODE_OFF; + UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availabeMode, 1); + + // android.statistics + const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode, + 1); + + const int32_t maxFaceCount = 0; + UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1); + + const uint8_t availableHotpixelMode = + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + &availableHotpixelMode, 1); + + const uint8_t lensShadingMapMode = + ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + &lensShadingMapMode, 1); + + // android.sync + const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; + UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1); + + /* Other sensor/RAW realted keys: + * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW + * android.sensor.info.physicalSize -> not available + * android.sensor.info.whiteLevel -> not available/not needed + * android.sensor.info.lensShadingApplied -> not needed + * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed + * android.sensor.blackLevelPattern -> not available/not needed + */ + + const int32_t availableRequestKeys[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + ANDROID_CONTROL_AE_ANTIBANDING_MODE, + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + ANDROID_CONTROL_AE_LOCK, + ANDROID_CONTROL_AE_MODE, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + ANDROID_CONTROL_AF_MODE, + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AWB_LOCK, + ANDROID_CONTROL_AWB_MODE, + ANDROID_CONTROL_CAPTURE_INTENT, + ANDROID_CONTROL_EFFECT_MODE, + ANDROID_CONTROL_MODE, + ANDROID_CONTROL_SCENE_MODE, + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + ANDROID_FLASH_MODE, + ANDROID_JPEG_ORIENTATION, + ANDROID_JPEG_QUALITY, + ANDROID_JPEG_THUMBNAIL_QUALITY, + ANDROID_JPEG_THUMBNAIL_SIZE, + ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + ANDROID_NOISE_REDUCTION_MODE, + ANDROID_SCALER_CROP_REGION, + ANDROID_SENSOR_TEST_PATTERN_MODE, + ANDROID_STATISTICS_FACE_DETECT_MODE, + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE}; + UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys, + ARRAY_SIZE(availableRequestKeys)); + + const int32_t availableResultKeys[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + ANDROID_CONTROL_AE_ANTIBANDING_MODE, + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + ANDROID_CONTROL_AE_LOCK, + ANDROID_CONTROL_AE_MODE, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_STATE, + ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + ANDROID_CONTROL_AF_MODE, + ANDROID_CONTROL_AF_STATE, + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AWB_LOCK, + ANDROID_CONTROL_AWB_MODE, + ANDROID_CONTROL_AWB_STATE, + ANDROID_CONTROL_CAPTURE_INTENT, + ANDROID_CONTROL_EFFECT_MODE, + ANDROID_CONTROL_MODE, + ANDROID_CONTROL_SCENE_MODE, + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + ANDROID_FLASH_MODE, + ANDROID_FLASH_STATE, + ANDROID_JPEG_ORIENTATION, + ANDROID_JPEG_QUALITY, + ANDROID_JPEG_THUMBNAIL_QUALITY, + ANDROID_JPEG_THUMBNAIL_SIZE, + ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + ANDROID_NOISE_REDUCTION_MODE, + ANDROID_REQUEST_PIPELINE_DEPTH, + ANDROID_SCALER_CROP_REGION, + ANDROID_SENSOR_TIMESTAMP, + ANDROID_STATISTICS_FACE_DETECT_MODE, + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, + ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, + ANDROID_STATISTICS_SCENE_FLICKER}; + UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys, + ARRAY_SIZE(availableResultKeys)); + + UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, + AVAILABLE_CHARACTERISTICS_KEYS_3_4.data(), + AVAILABLE_CHARACTERISTICS_KEYS_3_4.size()); + + return OK; +} + +status_t ExternalCameraDevice::initCameraControlsCharsKeys(int, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + /** + * android.sensor.info.sensitivityRange -> V4L2_CID_ISO_SENSITIVITY + * android.sensor.info.exposureTimeRange -> V4L2_CID_EXPOSURE_ABSOLUTE + * android.sensor.info.maxFrameDuration -> TBD + * android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE + * android.lens.info.hyperfocalDistance + * android.lens.info.availableFocalLengths -> not available? + */ + + // android.control + // No AE compensation support for now. + // TODO: V4L2_CID_EXPOSURE_BIAS + const int32_t controlAeCompensationRange[] = {0, 0}; + UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, + ARRAY_SIZE(controlAeCompensationRange)); + const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}}; + UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep, + ARRAY_SIZE(controlAeCompensationStep)); + + + // TODO: Check V4L2_CID_AUTO_FOCUS_*. + const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO, + ANDROID_CONTROL_AF_MODE_OFF}; + UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes, + ARRAY_SIZE(afAvailableModes)); + + // TODO: V4L2_CID_SCENE_MODE + const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; + UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1); + + // TODO: V4L2_CID_3A_LOCK + const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; + UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1); + const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; + UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1); + + // TODO: V4L2_CID_ZOOM_* + const float scalerAvailableMaxDigitalZoom[] = {1}; + UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + scalerAvailableMaxDigitalZoom, + ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); + + return OK; +} + +template +status_t ExternalCameraDevice::initOutputCharskeysByFormat( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata, + uint32_t fourcc, const std::array& halFormats, + int streamConfigTag, int streamConfiguration, int minFrameDuration, int stallDuration) { + if (mSupportedFormats.empty()) { + ALOGE("%s: Init supported format list failed", __FUNCTION__); + return UNKNOWN_ERROR; + } + + std::vector streamConfigurations; + std::vector minFrameDurations; + std::vector stallDurations; + + for (const auto& supportedFormat : mSupportedFormats) { + if (supportedFormat.fourcc != fourcc) { + // Skip 4CCs not meant for the halFormats + continue; + } + for (const auto& format : halFormats) { + streamConfigurations.push_back(format); + streamConfigurations.push_back(supportedFormat.width); + streamConfigurations.push_back(supportedFormat.height); + streamConfigurations.push_back(streamConfigTag); + } + + int64_t minFrameDuration = std::numeric_limits::max(); + for (const auto& fr : supportedFormat.frameRates) { + // 1000000000LL < (2^32 - 1) and + // fr.durationNumerator is uint32_t, so no overflow here + int64_t frameDuration = 1000000000LL * fr.durationNumerator / + fr.durationDenominator; + if (frameDuration < minFrameDuration) { + minFrameDuration = frameDuration; + } + } + + for (const auto& format : halFormats) { + minFrameDurations.push_back(format); + minFrameDurations.push_back(supportedFormat.width); + minFrameDurations.push_back(supportedFormat.height); + minFrameDurations.push_back(minFrameDuration); + } + + // The stall duration is 0 for non-jpeg formats. For JPEG format, stall + // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG. + // TODO: b/72261675. Maybe set this dynamically + for (const auto& format : halFormats) { + const int64_t NS_TO_SECOND = 1000000000; + int64_t stall_duration = + (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0; + stallDurations.push_back(format); + stallDurations.push_back(supportedFormat.width); + stallDurations.push_back(supportedFormat.height); + stallDurations.push_back(stall_duration); + } + } + + UPDATE(streamConfiguration, streamConfigurations.data(), streamConfigurations.size()); + + UPDATE(minFrameDuration, minFrameDurations.data(), minFrameDurations.size()); + + UPDATE(stallDuration, stallDurations.data(), stallDurations.size()); + + return true; +} + +bool ExternalCameraDevice::calculateMinFps( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + std::set framerates; + int32_t minFps = std::numeric_limits::max(); + + for (const auto& supportedFormat : mSupportedFormats) { + for (const auto& fr : supportedFormat.frameRates) { + int32_t frameRateInt = static_cast(fr.getDouble()); + if (minFps > frameRateInt) { + minFps = frameRateInt; + } + framerates.insert(frameRateInt); + } + } + + std::vector fpsRanges; + // FPS ranges + for (const auto& framerate : framerates) { + // Empirical: webcams often have close to 2x fps error and cannot support fixed fps range + fpsRanges.push_back(framerate / 2); + fpsRanges.push_back(framerate); + } + minFps /= 2; + int64_t maxFrameDuration = 1000000000LL / minFps; + + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), + fpsRanges.size()); + + UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1); + + return true; +} + +status_t ExternalCameraDevice::initOutputCharsKeys( + int fd, ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + initSupportedFormatsLocked(fd); + if (mSupportedFormats.empty()) { + ALOGE("%s: Init supported format list failed", __FUNCTION__); + return UNKNOWN_ERROR; + } + + bool hasDepth = false; + bool hasColor = false; + + // For V4L2_PIX_FMT_Z16 + std::array halDepthFormats{{HAL_PIXEL_FORMAT_Y16}}; + // For V4L2_PIX_FMT_MJPEG + std::array halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}}; + + for (const auto& supportedFormat : mSupportedFormats) { + switch (supportedFormat.fourcc) { + case V4L2_PIX_FMT_Z16: + hasDepth = true; + break; + case V4L2_PIX_FMT_MJPEG: + hasColor = true; + break; + default: + ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__, + supportedFormat.fourcc & 0xFF, (supportedFormat.fourcc >> 8) & 0xFF, + (supportedFormat.fourcc >> 16) & 0xFF, (supportedFormat.fourcc >> 24) & 0xFF); + } + } + + if (hasDepth) { + initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_Z16, halDepthFormats, + ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT, + ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, + ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, + ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); + } + if (hasColor) { + initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, + ANDROID_SCALER_AVAILABLE_STALL_DURATIONS); + } + + calculateMinFps(metadata); + + SupportedV4L2Format maximumFormat {.width = 0, .height = 0}; + for (const auto& supportedFormat : mSupportedFormats) { + if (supportedFormat.width >= maximumFormat.width && + supportedFormat.height >= maximumFormat.height) { + maximumFormat = supportedFormat; + } + } + int32_t activeArraySize[] = {0, 0, + static_cast(maximumFormat.width), + static_cast(maximumFormat.height)}; + UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, + activeArraySize, ARRAY_SIZE(activeArraySize)); + UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize, + ARRAY_SIZE(activeArraySize)); + + int32_t pixelArraySize[] = {static_cast(maximumFormat.width), + static_cast(maximumFormat.height)}; + UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize, + ARRAY_SIZE(pixelArraySize)); + return OK; +} + +#undef ARRAY_SIZE +#undef UPDATE + +void ExternalCameraDevice::getFrameRateList( + int fd, double fpsUpperBound, SupportedV4L2Format* format) { + format->frameRates.clear(); + + v4l2_frmivalenum frameInterval{ + .index = 0, + .pixel_format = format->fourcc, + .width = format->width, + .height = format->height, + }; + + for (frameInterval.index = 0; + TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0; + ++frameInterval.index) { + if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) { + if (frameInterval.discrete.numerator != 0) { + SupportedV4L2Format::FrameRate fr = { + frameInterval.discrete.numerator, + frameInterval.discrete.denominator}; + double framerate = fr.getDouble(); + if (framerate > fpsUpperBound) { + continue; + } + ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f", + frameInterval.index, + frameInterval.pixel_format & 0xFF, + (frameInterval.pixel_format >> 8) & 0xFF, + (frameInterval.pixel_format >> 16) & 0xFF, + (frameInterval.pixel_format >> 24) & 0xFF, + frameInterval.width, frameInterval.height, framerate); + format->frameRates.push_back(fr); + } + } + } + + if (format->frameRates.empty()) { + ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d", + __FUNCTION__, + frameInterval.pixel_format & 0xFF, + (frameInterval.pixel_format >> 8) & 0xFF, + (frameInterval.pixel_format >> 16) & 0xFF, + (frameInterval.pixel_format >> 24) & 0xFF, + frameInterval.width, frameInterval.height); + } +} + +void ExternalCameraDevice::trimSupportedFormats( + CroppingType cropType, + /*inout*/std::vector* pFmts) { + std::vector& sortedFmts = *pFmts; + if (cropType == VERTICAL) { + std::sort(sortedFmts.begin(), sortedFmts.end(), + [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { + if (a.width == b.width) { + return a.height < b.height; + } + return a.width < b.width; + }); + } else { + std::sort(sortedFmts.begin(), sortedFmts.end(), + [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { + if (a.height == b.height) { + return a.width < b.width; + } + return a.height < b.height; + }); + } + + if (sortedFmts.size() == 0) { + ALOGE("%s: input format list is empty!", __FUNCTION__); + return; + } + + const auto& maxSize = sortedFmts[sortedFmts.size() - 1]; + float maxSizeAr = ASPECT_RATIO(maxSize); + + // Remove formats that has aspect ratio not croppable from largest size + std::vector out; + for (const auto& fmt : sortedFmts) { + float ar = ASPECT_RATIO(fmt); + if (isAspectRatioClose(ar, maxSizeAr)) { + out.push_back(fmt); + } else if (cropType == HORIZONTAL && ar < maxSizeAr) { + out.push_back(fmt); + } else if (cropType == VERTICAL && ar > maxSizeAr) { + out.push_back(fmt); + } else { + ALOGV("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)", + __FUNCTION__, fmt.width, fmt.height, + cropType == VERTICAL ? "vertically" : "horizontally", + maxSize.width, maxSize.height); + } + } + sortedFmts = out; +} + +std::vector ExternalCameraDevice::getCandidateSupportedFormatsLocked( + int fd, CroppingType cropType, + const std::vector& fpsLimits, + const std::vector& depthFpsLimits, + const Size& minStreamSize, + bool depthEnabled) { + std::vector outFmts; + struct v4l2_fmtdesc fmtdesc { + .index = 0, + .type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; + int ret = 0; + while (ret == 0) { + ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)); + ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret, + fmtdesc.pixelformat & 0xFF, + (fmtdesc.pixelformat >> 8) & 0xFF, + (fmtdesc.pixelformat >> 16) & 0xFF, + (fmtdesc.pixelformat >> 24) & 0xFF); + if (ret == 0 && !(fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) { + auto it = std::find ( + kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat); + if (it != kSupportedFourCCs.end()) { + // Found supported format + v4l2_frmsizeenum frameSize { + .index = 0, + .pixel_format = fmtdesc.pixelformat}; + for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0; + ++frameSize.index) { + if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) { + ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index, + fmtdesc.pixelformat & 0xFF, + (fmtdesc.pixelformat >> 8) & 0xFF, + (fmtdesc.pixelformat >> 16) & 0xFF, + (fmtdesc.pixelformat >> 24) & 0xFF, + frameSize.discrete.width, frameSize.discrete.height); + // Disregard h > w formats so all aspect ratio (h/w) <= 1.0 + // This will simplify the crop/scaling logic down the road + if (frameSize.discrete.height > frameSize.discrete.width) { + continue; + } + // Discard all formats which is smaller than minStreamSize + if (frameSize.discrete.width < minStreamSize.width + || frameSize.discrete.height < minStreamSize.height) { + continue; + } + SupportedV4L2Format format { + .width = frameSize.discrete.width, + .height = frameSize.discrete.height, + .fourcc = fmtdesc.pixelformat + }; + + if (format.fourcc == V4L2_PIX_FMT_Z16 && depthEnabled) { + updateFpsBounds(fd, cropType, depthFpsLimits, format, outFmts); + } else { + updateFpsBounds(fd, cropType, fpsLimits, format, outFmts); + } + } + } + } + } + fmtdesc.index++; + } + trimSupportedFormats(cropType, &outFmts); + return outFmts; +} + +void ExternalCameraDevice::updateFpsBounds( + int fd, CroppingType cropType, + const std::vector& fpsLimits, SupportedV4L2Format format, + std::vector& outFmts) { + double fpsUpperBound = -1.0; + for (const auto& limit : fpsLimits) { + if (cropType == VERTICAL) { + if (format.width <= limit.size.width) { + fpsUpperBound = limit.fpsUpperBound; + break; + } + } else { // HORIZONTAL + if (format.height <= limit.size.height) { + fpsUpperBound = limit.fpsUpperBound; + break; + } + } + } + if (fpsUpperBound < 0.f) { + return; + } + + getFrameRateList(fd, fpsUpperBound, &format); + if (!format.frameRates.empty()) { + outFmts.push_back(format); + } +} + +void ExternalCameraDevice::initSupportedFormatsLocked(int fd) { + std::vector horizontalFmts = getCandidateSupportedFormatsLocked( + fd, HORIZONTAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled); + std::vector verticalFmts = getCandidateSupportedFormatsLocked( + fd, VERTICAL, mCfg.fpsLimits, mCfg.depthFpsLimits, mCfg.minStreamSize, mCfg.depthEnabled); + + size_t horiSize = horizontalFmts.size(); + size_t vertSize = verticalFmts.size(); + + if (horiSize == 0 && vertSize == 0) { + ALOGE("%s: cannot find suitable cropping type!", __FUNCTION__); + return; + } + + if (horiSize == 0) { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + return; + } else if (vertSize == 0) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + return; + } + + const auto& maxHoriSize = horizontalFmts[horizontalFmts.size() - 1]; + const auto& maxVertSize = verticalFmts[verticalFmts.size() - 1]; + + // Try to keep largest possible output size + // When they are the same or ambiguous, pick the one support more sizes + if (maxHoriSize.width == maxVertSize.width && + maxHoriSize.height == maxVertSize.height) { + if (horiSize > vertSize) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + } else { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + } + } else if (maxHoriSize.width >= maxVertSize.width && + maxHoriSize.height >= maxVertSize.height) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + } else if (maxHoriSize.width <= maxVertSize.width && + maxHoriSize.height <= maxVertSize.height) { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + } else { + if (horiSize > vertSize) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + } else { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + } + } +} + +sp ExternalCameraDevice::createSession( + const sp& cb, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) { + return new ExternalCameraDeviceSession( + cb, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd)); +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + diff --git a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp new file mode 100644 index 0000000..ca7186b --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp @@ -0,0 +1,2652 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "ExtCamDevSsn@3.4" +//#define LOG_NDEBUG 0 +#define ATRACE_TAG ATRACE_TAG_CAMERA +#include + +#include +#include "ExternalCameraDeviceSession.h" + +#include "android-base/macros.h" +#include +#include +#include +#include + +#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs +#include + +#include + + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +namespace { +// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. +static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; + +const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial + // bad frames. TODO: develop a better bad frame detection + // method +constexpr int MAX_RETRY = 15; // Allow retry some ioctl failures a few times to account for some + // webcam showing temporarily ioctl failures. +constexpr int IOCTL_RETRY_SLEEP_US = 33000; // 33ms * MAX_RETRY = 0.5 seconds + +// Constants for tryLock during dumpstate +static constexpr int kDumpLockRetries = 50; +static constexpr int kDumpLockSleep = 60000; + +bool tryLock(Mutex& mutex) +{ + bool locked = false; + for (int i = 0; i < kDumpLockRetries; ++i) { + if (mutex.tryLock() == NO_ERROR) { + locked = true; + break; + } + usleep(kDumpLockSleep); + } + return locked; +} + +bool tryLock(std::mutex& mutex) +{ + bool locked = false; + for (int i = 0; i < kDumpLockRetries; ++i) { + if (mutex.try_lock()) { + locked = true; + break; + } + usleep(kDumpLockSleep); + } + return locked; +} + +} // Anonymous namespace + +// Static instances +const int ExternalCameraDeviceSession::kMaxProcessedStream; +const int ExternalCameraDeviceSession::kMaxStallStream; +HandleImporter ExternalCameraDeviceSession::sHandleImporter; + +ExternalCameraDeviceSession::ExternalCameraDeviceSession( + const sp& callback, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) : + mCallback(callback), + mCfg(cfg), + mCameraCharacteristics(chars), + mSupportedFormats(sortedFormats), + mCroppingType(croppingType), + mCameraId(cameraId), + mV4l2Fd(std::move(v4l2Fd)), + mMaxThumbResolution(getMaxThumbResolution()), + mMaxJpegResolution(getMaxJpegResolution()) {} + +bool ExternalCameraDeviceSession::initialize() { + if (mV4l2Fd.get() < 0) { + ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get()); + return true; + } + + struct v4l2_capability capability; + int ret = ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability); + std::string make, model; + if (ret < 0) { + ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__); + mExifMake = "Generic UVC webcam"; + mExifModel = "Generic UVC webcam"; + } else { + // capability.card is UTF-8 encoded + char card[32]; + int j = 0; + for (int i = 0; i < 32; i++) { + if (capability.card[i] < 128) { + card[j++] = capability.card[i]; + } + if (capability.card[i] == '\0') { + break; + } + } + if (j == 0 || card[j - 1] != '\0') { + mExifMake = "Generic UVC webcam"; + mExifModel = "Generic UVC webcam"; + } else { + mExifMake = card; + mExifModel = card; + } + } + + initOutputThread(); + if (mOutputThread == nullptr) { + ALOGE("%s: init OutputThread failed!", __FUNCTION__); + return true; + } + mOutputThread->setExifMakeModel(mExifMake, mExifModel); + + status_t status = initDefaultRequests(); + if (status != OK) { + ALOGE("%s: init default requests failed!", __FUNCTION__); + return true; + } + + mRequestMetadataQueue = std::make_unique( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mRequestMetadataQueue->isValid()) { + ALOGE("%s: invalid request fmq", __FUNCTION__); + return true; + } + mResultMetadataQueue = std::make_shared( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mResultMetadataQueue->isValid()) { + ALOGE("%s: invalid result fmq", __FUNCTION__); + return true; + } + + // TODO: check is PRIORITY_DISPLAY enough? + mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY); + return false; +} + +bool ExternalCameraDeviceSession::isInitFailed() { + Mutex::Autolock _l(mLock); + if (!mInitialized) { + mInitFail = initialize(); + mInitialized = true; + } + return mInitFail; +} + +void ExternalCameraDeviceSession::initOutputThread() { + mOutputThread = new OutputThread(this, mCroppingType, mCameraCharacteristics); +} + +void ExternalCameraDeviceSession::closeOutputThread() { + closeOutputThreadImpl(); +} + +void ExternalCameraDeviceSession::closeOutputThreadImpl() { + if (mOutputThread) { + mOutputThread->flush(); + mOutputThread->requestExit(); + mOutputThread->join(); + mOutputThread.clear(); + } +} + +Status ExternalCameraDeviceSession::initStatus() const { + Mutex::Autolock _l(mLock); + Status status = Status::OK; + if (mInitFail || mClosed) { + ALOGI("%s: sesssion initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed); + status = Status::INTERNAL_ERROR; + } + return status; +} + +ExternalCameraDeviceSession::~ExternalCameraDeviceSession() { + if (!isClosed()) { + ALOGE("ExternalCameraDeviceSession deleted before close!"); + close(/*callerIsDtor*/true); + } +} + + +void ExternalCameraDeviceSession::dumpState(const native_handle_t* handle) { + if (handle->numFds != 1 || handle->numInts != 0) { + ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", + __FUNCTION__, handle->numFds, handle->numInts); + return; + } + int fd = handle->data[0]; + + bool intfLocked = tryLock(mInterfaceLock); + if (!intfLocked) { + dprintf(fd, "!! ExternalCameraDeviceSession interface may be deadlocked !!\n"); + } + + if (isClosed()) { + dprintf(fd, "External camera %s is closed\n", mCameraId.c_str()); + return; + } + + bool streaming = false; + size_t v4L2BufferCount = 0; + SupportedV4L2Format streamingFmt; + { + bool sessionLocked = tryLock(mLock); + if (!sessionLocked) { + dprintf(fd, "!! ExternalCameraDeviceSession mLock may be deadlocked !!\n"); + } + streaming = mV4l2Streaming; + streamingFmt = mV4l2StreamingFmt; + v4L2BufferCount = mV4L2BufferCount; + + if (sessionLocked) { + mLock.unlock(); + } + } + + std::unordered_set inflightFrames; + { + bool iffLocked = tryLock(mInflightFramesLock); + if (!iffLocked) { + dprintf(fd, + "!! ExternalCameraDeviceSession mInflightFramesLock may be deadlocked !!\n"); + } + inflightFrames = mInflightFrames; + if (iffLocked) { + mInflightFramesLock.unlock(); + } + } + + dprintf(fd, "External camera %s V4L2 FD %d, cropping type %s, %s\n", + mCameraId.c_str(), mV4l2Fd.get(), + (mCroppingType == VERTICAL) ? "vertical" : "horizontal", + streaming ? "streaming" : "not streaming"); + if (streaming) { + // TODO: dump fps later + dprintf(fd, "Current V4L2 format %c%c%c%c %dx%d @ %ffps\n", + streamingFmt.fourcc & 0xFF, + (streamingFmt.fourcc >> 8) & 0xFF, + (streamingFmt.fourcc >> 16) & 0xFF, + (streamingFmt.fourcc >> 24) & 0xFF, + streamingFmt.width, streamingFmt.height, + mV4l2StreamingFps); + + size_t numDequeuedV4l2Buffers = 0; + { + std::lock_guard lk(mV4l2BufferLock); + numDequeuedV4l2Buffers = mNumDequeuedV4l2Buffers; + } + dprintf(fd, "V4L2 buffer queue size %zu, dequeued %zu\n", + v4L2BufferCount, numDequeuedV4l2Buffers); + } + + dprintf(fd, "In-flight frames (not sorted):"); + for (const auto& frameNumber : inflightFrames) { + dprintf(fd, "%d, ", frameNumber); + } + dprintf(fd, "\n"); + mOutputThread->dump(fd); + dprintf(fd, "\n"); + + if (intfLocked) { + mInterfaceLock.unlock(); + } + + return; +} + +Return ExternalCameraDeviceSession::constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { + V3_2::CameraMetadata outMetadata; + Status status = constructDefaultRequestSettingsRaw( + static_cast(type), &outMetadata); + _hidl_cb(status, outMetadata); + return Void(); +} + +Status ExternalCameraDeviceSession::constructDefaultRequestSettingsRaw(RequestTemplate type, + V3_2::CameraMetadata *outMetadata) { + CameraMetadata emptyMd; + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + switch (type) { + case RequestTemplate::PREVIEW: + case RequestTemplate::STILL_CAPTURE: + case RequestTemplate::VIDEO_RECORD: + case RequestTemplate::VIDEO_SNAPSHOT: { + *outMetadata = mDefaultRequests[type]; + break; + } + case RequestTemplate::MANUAL: + case RequestTemplate::ZERO_SHUTTER_LAG: + // Don't support MANUAL, ZSL templates + status = Status::ILLEGAL_ARGUMENT; + break; + default: + ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast(type)); + status = Status::ILLEGAL_ARGUMENT; + break; + } + return status; +} + +Return ExternalCameraDeviceSession::configureStreams( + const V3_2::StreamConfiguration& streams, + ICameraDeviceSession::configureStreams_cb _hidl_cb) { + V3_2::HalStreamConfiguration outStreams; + V3_3::HalStreamConfiguration outStreams_v33; + Mutex::Autolock _il(mInterfaceLock); + + Status status = configureStreams(streams, &outStreams_v33); + size_t size = outStreams_v33.streams.size(); + outStreams.streams.resize(size); + for (size_t i = 0; i < size; i++) { + outStreams.streams[i] = outStreams_v33.streams[i].v3_2; + } + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::configureStreams_3_3( + const V3_2::StreamConfiguration& streams, + ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) { + V3_3::HalStreamConfiguration outStreams; + Mutex::Autolock _il(mInterfaceLock); + + Status status = configureStreams(streams, &outStreams); + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { + V3_2::StreamConfiguration config_v32; + V3_3::HalStreamConfiguration outStreams_v33; + V3_4::HalStreamConfiguration outStreams; + Mutex::Autolock _il(mInterfaceLock); + + config_v32.operationMode = requestedConfiguration.operationMode; + config_v32.streams.resize(requestedConfiguration.streams.size()); + uint32_t blobBufferSize = 0; + int numStallStream = 0; + for (size_t i = 0; i < config_v32.streams.size(); i++) { + config_v32.streams[i] = requestedConfiguration.streams[i].v3_2; + if (config_v32.streams[i].format == PixelFormat::BLOB) { + blobBufferSize = requestedConfiguration.streams[i].bufferSize; + numStallStream++; + } + } + + // Fail early if there are multiple BLOB streams + if (numStallStream > kMaxStallStream) { + ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, + kMaxStallStream, numStallStream); + _hidl_cb(Status::ILLEGAL_ARGUMENT, outStreams); + return Void(); + } + + Status status = configureStreams(config_v32, &outStreams_v33, blobBufferSize); + + outStreams.streams.resize(outStreams_v33.streams.size()); + for (size_t i = 0; i < outStreams.streams.size(); i++) { + outStreams.streams[i].v3_3 = outStreams_v33.streams[i]; + } + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mRequestMetadataQueue->getDesc()); + return Void(); +} + +Return ExternalCameraDeviceSession::getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mResultMetadataQueue->getDesc()); + return Void(); +} + +Return ExternalCameraDeviceSession::processCaptureRequest( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest(requests[i]); + if (s != Status::OK) { + break; + } + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Return ExternalCameraDeviceSession::processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest(requests[i].v3_2); + if (s != Status::OK) { + break; + } + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Return ExternalCameraDeviceSession::flush() { + ATRACE_CALL(); + Mutex::Autolock _il(mInterfaceLock); + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + mOutputThread->flush(); + return Status::OK; +} + +Return ExternalCameraDeviceSession::close(bool callerIsDtor) { + Mutex::Autolock _il(mInterfaceLock); + bool closed = isClosed(); + if (!closed) { + if (callerIsDtor) { + closeOutputThreadImpl(); + } else { + closeOutputThread(); + } + + Mutex::Autolock _l(mLock); + // free all buffers + { + Mutex::Autolock _l(mCbsLock); + for(auto pair : mStreamMap) { + cleanupBuffersLocked(/*Stream ID*/pair.first); + } + } + v4l2StreamOffLocked(); + ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get()); + mV4l2Fd.reset(); + mClosed = true; + } + return Void(); +} + +Status ExternalCameraDeviceSession::importRequestLocked( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) { + return importRequestLockedImpl(request, allBufPtrs, allFences); +} + +Status ExternalCameraDeviceSession::importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) { + Mutex::Autolock _l(mCbsLock); + return importBufferLocked(streamId, bufId, buf, outBufPtr, allowEmptyBuf); +} + +Status ExternalCameraDeviceSession::importBufferLocked(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) { + return importBufferImpl( + mCirculatingBuffers, sHandleImporter, streamId, + bufId, buf, outBufPtr, allowEmptyBuf); +} + +Status ExternalCameraDeviceSession::importRequestLockedImpl( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences, + bool allowEmptyBuf) { + size_t numOutputBufs = request.outputBuffers.size(); + size_t numBufs = numOutputBufs; + // Validate all I/O buffers + hidl_vec allBufs; + hidl_vec allBufIds; + allBufs.resize(numBufs); + allBufIds.resize(numBufs); + allBufPtrs.resize(numBufs); + allFences.resize(numBufs); + std::vector streamIds(numBufs); + + for (size_t i = 0; i < numOutputBufs; i++) { + allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle(); + allBufIds[i] = request.outputBuffers[i].bufferId; + allBufPtrs[i] = &allBufs[i]; + streamIds[i] = request.outputBuffers[i].streamId; + } + + { + Mutex::Autolock _l(mCbsLock); + for (size_t i = 0; i < numBufs; i++) { + Status st = importBufferLocked( + streamIds[i], allBufIds[i], allBufs[i], &allBufPtrs[i], + allowEmptyBuf); + if (st != Status::OK) { + // Detailed error logs printed in importBuffer + return st; + } + } + } + + // All buffers are imported. Now validate output buffer acquire fences + for (size_t i = 0; i < numOutputBufs; i++) { + if (!sHandleImporter.importFence( + request.outputBuffers[i].acquireFence, allFences[i])) { + ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i); + cleanupInflightFences(allFences, i); + return Status::INTERNAL_ERROR; + } + } + return Status::OK; +} + +void ExternalCameraDeviceSession::cleanupInflightFences( + hidl_vec& allFences, size_t numFences) { + for (size_t j = 0; j < numFences; j++) { + sHandleImporter.closeFence(allFences[j]); + } +} + +int ExternalCameraDeviceSession::waitForV4L2BufferReturnLocked(std::unique_lock& lk) { + ATRACE_CALL(); + std::chrono::seconds timeout = std::chrono::seconds(kBufferWaitTimeoutSec); + mLock.unlock(); + auto st = mV4L2BufferReturned.wait_for(lk, timeout); + // Here we introduce a order where mV4l2BufferLock is acquired before mLock, while + // the normal lock acquisition order is reversed. This is fine because in most of + // cases we are protected by mInterfaceLock. The only thread that can cause deadlock + // is the OutputThread, where we do need to make sure we don't acquire mLock then + // mV4l2BufferLock + mLock.lock(); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__); + return -1; + } + return 0; +} + +Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) { + ATRACE_CALL(); + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + if (request.inputBuffer.streamId != -1) { + ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + Mutex::Autolock _l(mLock); + if (!mV4l2Streaming) { + ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + const camera_metadata_t *rawSettings = nullptr; + bool converted = true; + CameraMetadata settingsFmq; // settings from FMQ + if (request.fmqSettingsSize > 0) { + // non-blocking read; client must write metadata before calling + // processOneCaptureRequest + settingsFmq.resize(request.fmqSettingsSize); + bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize); + if (read) { + converted = V3_2::implementation::convertFromHidl(settingsFmq, &rawSettings); + } else { + ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); + converted = false; + } + } else { + converted = V3_2::implementation::convertFromHidl(request.settings, &rawSettings); + } + + if (converted && rawSettings != nullptr) { + mLatestReqSetting = rawSettings; + } + + if (!converted) { + ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && rawSettings == nullptr) { + ALOGE("%s: capture request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + hidl_vec allBufPtrs; + hidl_vec allFences; + size_t numOutputBufs = request.outputBuffers.size(); + + if (numOutputBufs == 0) { + ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_entry fpsRange = mLatestReqSetting.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE); + if (fpsRange.count == 2) { + double requestFpsMax = fpsRange.data.i32[1]; + double closestFps = 0.0; + double fpsError = 1000.0; + bool fpsSupported = false; + for (const auto& fr : mV4l2StreamingFmt.frameRates) { + double f = fr.getDouble(); + if (std::fabs(requestFpsMax - f) < 1.0) { + fpsSupported = true; + break; + } + if (std::fabs(requestFpsMax - f) < fpsError) { + fpsError = std::fabs(requestFpsMax - f); + closestFps = f; + } + } + if (!fpsSupported) { + /* This can happen in a few scenarios: + * 1. The application is sending a FPS range not supported by the configured outputs. + * 2. The application is sending a valid FPS range for all cofigured outputs, but + * the selected V4L2 size can only run at slower speed. This should be very rare + * though: for this to happen a sensor needs to support at least 3 different aspect + * ratio outputs, and when (at least) two outputs are both not the main aspect ratio + * of the webcam, a third size that's larger might be picked and runs into this + * issue. + */ + ALOGW("%s: cannot reach fps %d! Will do %f instead", + __FUNCTION__, fpsRange.data.i32[1], closestFps); + requestFpsMax = closestFps; + } + + if (requestFpsMax != mV4l2StreamingFps) { + { + std::unique_lock lk(mV4l2BufferLock); + while (mNumDequeuedV4l2Buffers != 0) { + // Wait until pipeline is idle before reconfigure stream + int waitRet = waitForV4L2BufferReturnLocked(lk); + if (waitRet != 0) { + ALOGE("%s: wait for pipeline idle failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + } + configureV4l2StreamLocked(mV4l2StreamingFmt, requestFpsMax); + } + } + + status = importRequestLocked(request, allBufPtrs, allFences); + if (status != Status::OK) { + return status; + } + + nsecs_t shutterTs = 0; + sp frameIn = dequeueV4l2FrameLocked(&shutterTs); + if ( frameIn == nullptr) { + ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + std::shared_ptr halReq = std::make_shared(); + halReq->frameNumber = request.frameNumber; + halReq->setting = mLatestReqSetting; + halReq->frameIn = frameIn; + halReq->shutterTs = shutterTs; + halReq->buffers.resize(numOutputBufs); + for (size_t i = 0; i < numOutputBufs; i++) { + HalStreamBuffer& halBuf = halReq->buffers[i]; + int streamId = halBuf.streamId = request.outputBuffers[i].streamId; + halBuf.bufferId = request.outputBuffers[i].bufferId; + const Stream& stream = mStreamMap[streamId]; + halBuf.width = stream.width; + halBuf.height = stream.height; + halBuf.format = stream.format; + halBuf.usage = stream.usage; + halBuf.bufPtr = allBufPtrs[i]; + halBuf.acquireFence = allFences[i]; + halBuf.fenceTimeout = false; + } + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.insert(halReq->frameNumber); + } + // Send request to OutputThread for the rest of processing + mOutputThread->submitRequest(halReq); + mFirstRequest = false; + return Status::OK; +} + +void ExternalCameraDeviceSession::notifyShutter(uint32_t frameNumber, nsecs_t shutterTs) { + NotifyMsg msg; + msg.type = MsgType::SHUTTER; + msg.msg.shutter.frameNumber = frameNumber; + msg.msg.shutter.timestamp = shutterTs; + mCallback->notify({msg}); +} + +void ExternalCameraDeviceSession::notifyError( + uint32_t frameNumber, int32_t streamId, ErrorCode ec) { + NotifyMsg msg; + msg.type = MsgType::ERROR; + msg.msg.error.frameNumber = frameNumber; + msg.msg.error.errorStreamId = streamId; + msg.msg.error.errorCode = ec; + mCallback->notify({msg}); +} + +//TODO: refactor with processCaptureResult +Status ExternalCameraDeviceSession::processCaptureRequestError( + const std::shared_ptr& req, + /*out*/std::vector* outMsgs, + /*out*/std::vector* outResults) { + ATRACE_CALL(); + // Return V4L2 buffer to V4L2 buffer queue + sp v4l2Frame = + static_cast(req->frameIn.get()); + enqueueV4l2Frame(v4l2Frame); + + if (outMsgs == nullptr) { + notifyShutter(req->frameNumber, req->shutterTs); + notifyError(/*frameNum*/req->frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); + } else { + NotifyMsg shutter; + shutter.type = MsgType::SHUTTER; + shutter.msg.shutter.frameNumber = req->frameNumber; + shutter.msg.shutter.timestamp = req->shutterTs; + + NotifyMsg error; + error.type = MsgType::ERROR; + error.msg.error.frameNumber = req->frameNumber; + error.msg.error.errorStreamId = -1; + error.msg.error.errorCode = ErrorCode::ERROR_REQUEST; + outMsgs->push_back(shutter); + outMsgs->push_back(error); + } + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + + // update inflight records + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.erase(req->frameNumber); + } + + if (outResults == nullptr) { + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + } else { + outResults->push_back(result); + } + return Status::OK; +} + +Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr& req) { + ATRACE_CALL(); + // Return V4L2 buffer to V4L2 buffer queue + sp v4l2Frame = + static_cast(req->frameIn.get()); + enqueueV4l2Frame(v4l2Frame); + + // NotifyShutter + notifyShutter(req->frameNumber, req->shutterTs); + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + if (req->buffers[i].fenceTimeout) { + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER); + } else { + result.outputBuffers[i].status = BufferStatus::OK; + // TODO: refactor + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + } + + // Fill capture result metadata + fillCaptureResult(req->setting, req->shutterTs); + const camera_metadata_t *rawResult = req->setting.getAndLock(); + V3_2::implementation::convertToHidl(rawResult, &result.result); + req->setting.unlock(rawResult); + + // update inflight records + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.erase(req->frameNumber); + } + + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + return Status::OK; +} + +void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + const nsecs_t NS_TO_SECOND = 1000000000; + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.result.size() > 0) { + if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { + result.fmqResultSize = result.result.size(); + result.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + result.fmqResultSize = 0; + } + } else { + result.fmqResultSize = 0; + } + } + } + auto status = mCallback->processCaptureResult(results); + if (!status.isOk()) { + ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__, + status.description().c_str()); + } + + mProcessCaptureResultLock.unlock(); +} + +ExternalCameraDeviceSession::OutputThread::OutputThread( + wp parent, CroppingType ct, + const common::V1_0::helper::CameraMetadata& chars) : + mParent(parent), mCroppingType(ct), mCameraCharacteristics(chars) {} + +ExternalCameraDeviceSession::OutputThread::~OutputThread() {} + +void ExternalCameraDeviceSession::OutputThread::setExifMakeModel( + const std::string& make, const std::string& model) { + mExifMake = make; + mExifModel = model; +} + +int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked( + sp& in, const Size& outSz, YCbCrLayout* out) { + Size inSz = {in->mWidth, in->mHeight}; + + int ret; + if (inSz == outSz) { + ret = in->getLayout(out); + if (ret != 0) { + ALOGE("%s: failed to get input image layout", __FUNCTION__); + return ret; + } + return ret; + } + + // Cropping to output aspect ratio + IMapper::Rect inputCrop; + ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop); + if (ret != 0) { + ALOGE("%s: failed to compute crop rect for output size %dx%d", + __FUNCTION__, outSz.width, outSz.height); + return ret; + } + + YCbCrLayout croppedLayout; + ret = in->getCroppedLayout(inputCrop, &croppedLayout); + if (ret != 0) { + ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + return ret; + } + + if ((mCroppingType == VERTICAL && inSz.width == outSz.width) || + (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) { + // No scale is needed + *out = croppedLayout; + return 0; + } + + auto it = mScaledYu12Frames.find(outSz); + sp scaledYu12Buf; + if (it != mScaledYu12Frames.end()) { + scaledYu12Buf = it->second; + } else { + it = mIntermediateBuffers.find(outSz); + if (it == mIntermediateBuffers.end()) { + ALOGE("%s: failed to find intermediate buffer size %dx%d", + __FUNCTION__, outSz.width, outSz.height); + return -1; + } + scaledYu12Buf = it->second; + } + // Scale + YCbCrLayout outLayout; + ret = scaledYu12Buf->getLayout(&outLayout); + if (ret != 0) { + ALOGE("%s: failed to get output buffer layout", __FUNCTION__); + return ret; + } + + ret = libyuv::I420Scale( + static_cast(croppedLayout.y), + croppedLayout.yStride, + static_cast(croppedLayout.cb), + croppedLayout.cStride, + static_cast(croppedLayout.cr), + croppedLayout.cStride, + inputCrop.width, + inputCrop.height, + static_cast(outLayout.y), + outLayout.yStride, + static_cast(outLayout.cb), + outLayout.cStride, + static_cast(outLayout.cr), + outLayout.cStride, + outSz.width, + outSz.height, + // TODO: b/72261744 see if we can use better filter without losing too much perf + libyuv::FilterMode::kFilterNone); + + if (ret != 0) { + ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", + __FUNCTION__, inputCrop.width, inputCrop.height, + outSz.width, outSz.height, ret); + return ret; + } + + *out = outLayout; + mScaledYu12Frames.insert({outSz, scaledYu12Buf}); + return 0; +} + + +int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked( + sp& in, const Size &outSz, YCbCrLayout* out) { + Size inSz {in->mWidth, in->mHeight}; + + if ((outSz.width * outSz.height) > + (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) { + ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)", + __FUNCTION__, outSz.width, outSz.height, + mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight); + return -1; + } + + int ret; + + /* This will crop-and-zoom the input YUV frame to the thumbnail size + * Based on the following logic: + * 1) Square pixels come in, square pixels come out, therefore single + * scale factor is computed to either make input bigger or smaller + * depending on if we are upscaling or downscaling + * 2) That single scale factor would either make height too tall or width + * too wide so we need to crop the input either horizontally or vertically + * but not both + */ + + /* Convert the input and output dimensions into floats for ease of math */ + float fWin = static_cast(inSz.width); + float fHin = static_cast(inSz.height); + float fWout = static_cast(outSz.width); + float fHout = static_cast(outSz.height); + + /* Compute the one scale factor from (1) above, it will be the smaller of + * the two possibilities. */ + float scaleFactor = std::min( fHin / fHout, fWin / fWout ); + + /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can + * simply multiply the output by our scaleFactor to get the cropped input + * size. Note that at least one of {fWcrop, fHcrop} is going to wind up + * being {fWin, fHin} respectively because fHout or fWout cancels out the + * scaleFactor calculation above. + * + * Specifically: + * if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off + * input, in which case + * scaleFactor = fHin / fHout + * fWcrop = fHin / fHout * fWout + * fHcrop = fHin + * + * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which + * is just the inequality above with both sides multiplied by fWout + * + * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top + * and the bottom off of input, and + * scaleFactor = fWin / fWout + * fWcrop = fWin + * fHCrop = fWin / fWout * fHout + */ + float fWcrop = scaleFactor * fWout; + float fHcrop = scaleFactor * fHout; + + /* Convert to integer and truncate to an even number */ + Size cropSz = { 2*static_cast(fWcrop/2.0f), + 2*static_cast(fHcrop/2.0f) }; + + /* Convert to a centered rectange with even top/left */ + IMapper::Rect inputCrop { + 2*static_cast((inSz.width - cropSz.width)/4), + 2*static_cast((inSz.height - cropSz.height)/4), + static_cast(cropSz.width), + static_cast(cropSz.height) }; + + if ((inputCrop.top < 0) || + (inputCrop.top >= static_cast(inSz.height)) || + (inputCrop.left < 0) || + (inputCrop.left >= static_cast(inSz.width)) || + (inputCrop.width <= 0) || + (inputCrop.width + inputCrop.left > static_cast(inSz.width)) || + (inputCrop.height <= 0) || + (inputCrop.height + inputCrop.top > static_cast(inSz.height))) + { + ALOGE("%s: came up with really wrong crop rectangle",__FUNCTION__); + ALOGE("%s: input layout %dx%d to for output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + ALOGE("%s: computed input crop +%d,+%d %dx%d", + __FUNCTION__, inputCrop.left, inputCrop.top, + inputCrop.width, inputCrop.height); + return -1; + } + + YCbCrLayout inputLayout; + ret = in->getCroppedLayout(inputCrop, &inputLayout); + if (ret != 0) { + ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + ALOGE("%s: computed input crop +%d,+%d %dx%d", + __FUNCTION__, inputCrop.left, inputCrop.top, + inputCrop.width, inputCrop.height); + return ret; + } + ALOGV("%s: crop input layout %dx%d to for output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + ALOGV("%s: computed input crop +%d,+%d %dx%d", + __FUNCTION__, inputCrop.left, inputCrop.top, + inputCrop.width, inputCrop.height); + + + // Scale + YCbCrLayout outFullLayout; + + ret = mYu12ThumbFrame->getLayout(&outFullLayout); + if (ret != 0) { + ALOGE("%s: failed to get output buffer layout", __FUNCTION__); + return ret; + } + + + ret = libyuv::I420Scale( + static_cast(inputLayout.y), + inputLayout.yStride, + static_cast(inputLayout.cb), + inputLayout.cStride, + static_cast(inputLayout.cr), + inputLayout.cStride, + inputCrop.width, + inputCrop.height, + static_cast(outFullLayout.y), + outFullLayout.yStride, + static_cast(outFullLayout.cb), + outFullLayout.cStride, + static_cast(outFullLayout.cr), + outFullLayout.cStride, + outSz.width, + outSz.height, + libyuv::FilterMode::kFilterNone); + + if (ret != 0) { + ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", + __FUNCTION__, inputCrop.width, inputCrop.height, + outSz.width, outSz.height, ret); + return ret; + } + + *out = outFullLayout; + return 0; +} + +/* + * TODO: There needs to be a mechanism to discover allocated buffer size + * in the HAL. + * + * This is very fragile because it is duplicated computation from: + * frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp + * + */ + +/* This assumes mSupportedFormats have all been declared as supporting + * HAL_PIXEL_FORMAT_BLOB to the framework */ +Size ExternalCameraDeviceSession::getMaxJpegResolution() const { + Size ret { 0, 0 }; + for(auto & fmt : mSupportedFormats) { + if(fmt.width * fmt.height > ret.width * ret.height) { + ret = Size { fmt.width, fmt.height }; + } + } + return ret; +} + +Size ExternalCameraDeviceSession::getMaxThumbResolution() const { + return getMaxThumbnailResolution(mCameraCharacteristics); +} + +ssize_t ExternalCameraDeviceSession::getJpegBufferSize( + uint32_t width, uint32_t height) const { + // Constant from camera3.h + const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob); + // Get max jpeg size (area-wise). + if (mMaxJpegResolution.width == 0) { + ALOGE("%s: Do not have a single supported JPEG stream", + __FUNCTION__); + return BAD_VALUE; + } + + // Get max jpeg buffer size + ssize_t maxJpegBufferSize = 0; + camera_metadata_ro_entry jpegBufMaxSize = + mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE); + if (jpegBufMaxSize.count == 0) { + ALOGE("%s: Can't find maximum JPEG size in static metadata!", + __FUNCTION__); + return BAD_VALUE; + } + maxJpegBufferSize = jpegBufMaxSize.data.i32[0]; + + if (maxJpegBufferSize <= kMinJpegBufferSize) { + ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)", + __FUNCTION__, maxJpegBufferSize, kMinJpegBufferSize); + return BAD_VALUE; + } + + // Calculate final jpeg buffer size for the given resolution. + float scaleFactor = ((float) (width * height)) / + (mMaxJpegResolution.width * mMaxJpegResolution.height); + ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) + + kMinJpegBufferSize; + if (jpegBufferSize > maxJpegBufferSize) { + jpegBufferSize = maxJpegBufferSize; + } + + return jpegBufferSize; +} + +int ExternalCameraDeviceSession::OutputThread::createJpegLocked( + HalStreamBuffer &halBuf, + const common::V1_0::helper::CameraMetadata& setting) +{ + ATRACE_CALL(); + int ret; + auto lfail = [&](auto... args) { + ALOGE(args...); + + return 1; + }; + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return 1; + } + + ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u", + __FUNCTION__, halBuf.streamId, static_cast(halBuf.bufferId), + halBuf.width, halBuf.height); + ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p", + __FUNCTION__, halBuf.format, static_cast(halBuf.usage), + halBuf.bufPtr); + ALOGV("%s: YV12 buffer %d x %d", + __FUNCTION__, + mYu12Frame->mWidth, mYu12Frame->mHeight); + + int jpegQuality, thumbQuality; + Size thumbSize; + bool outputThumbnail = true; + + if (setting.exists(ANDROID_JPEG_QUALITY)) { + camera_metadata_ro_entry entry = + setting.find(ANDROID_JPEG_QUALITY); + jpegQuality = entry.data.u8[0]; + } else { + return lfail("%s: ANDROID_JPEG_QUALITY not set",__FUNCTION__); + } + + if (setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { + camera_metadata_ro_entry entry = + setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY); + thumbQuality = entry.data.u8[0]; + } else { + return lfail( + "%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set", + __FUNCTION__); + } + + if (setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { + camera_metadata_ro_entry entry = + setting.find(ANDROID_JPEG_THUMBNAIL_SIZE); + thumbSize = Size { static_cast(entry.data.i32[0]), + static_cast(entry.data.i32[1]) + }; + if (thumbSize.width == 0 && thumbSize.height == 0) { + outputThumbnail = false; + } + } else { + return lfail( + "%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__); + } + + /* Cropped and scaled YU12 buffer for main and thumbnail */ + YCbCrLayout yu12Main; + Size jpegSize { halBuf.width, halBuf.height }; + + /* Compute temporary buffer sizes accounting for the following: + * thumbnail can't exceed APP1 size of 64K + * main image needs to hold APP1, headers, and at most a poorly + * compressed image */ + const ssize_t maxThumbCodeSize = 64 * 1024; + const ssize_t maxJpegCodeSize = mBlobBufferSize == 0 ? + parent->getJpegBufferSize(jpegSize.width, jpegSize.height) : + mBlobBufferSize; + + /* Check that getJpegBufferSize did not return an error */ + if (maxJpegCodeSize < 0) { + return lfail( + "%s: getJpegBufferSize returned %zd",__FUNCTION__,maxJpegCodeSize); + } + + + /* Hold actual thumbnail and main image code sizes */ + size_t thumbCodeSize = 0, jpegCodeSize = 0; + /* Temporary thumbnail code buffer */ + std::vector thumbCode(outputThumbnail ? maxThumbCodeSize : 0); + + YCbCrLayout yu12Thumb; + if (outputThumbnail) { + ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb); + + if (ret != 0) { + return lfail( + "%s: crop and scale thumbnail failed!", __FUNCTION__); + } + } + + /* Scale and crop main jpeg */ + ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main); + + if (ret != 0) { + return lfail("%s: crop and scale main failed!", __FUNCTION__); + } + + /* Encode the thumbnail image */ + if (outputThumbnail) { + ret = encodeJpegYU12(thumbSize, yu12Thumb, + thumbQuality, 0, 0, + &thumbCode[0], maxThumbCodeSize, thumbCodeSize); + + if (ret != 0) { + return lfail("%s: thumbnail encodeJpegYU12 failed with %d",__FUNCTION__, ret); + } + } + + /* Combine camera characteristics with request settings to form EXIF + * metadata */ + common::V1_0::helper::CameraMetadata meta(mCameraCharacteristics); + meta.append(setting); + + /* Generate EXIF object */ + std::unique_ptr utils(ExifUtils::create()); + /* Make sure it's initialized */ + utils->initialize(); + + utils->setFromMetadata(meta, jpegSize.width, jpegSize.height); + utils->setMake(mExifMake); + utils->setModel(mExifModel); + + ret = utils->generateApp1(outputThumbnail ? &thumbCode[0] : 0, thumbCodeSize); + + if (!ret) { + return lfail("%s: generating APP1 failed", __FUNCTION__); + } + + /* Get internal buffer */ + size_t exifDataSize = utils->getApp1Length(); + const uint8_t* exifData = utils->getApp1Buffer(); + + /* Lock the HAL jpeg code buffer */ + void *bufPtr = sHandleImporter.lock( + *(halBuf.bufPtr), halBuf.usage, maxJpegCodeSize); + + if (!bufPtr) { + return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize); + } + + /* Encode the main jpeg image */ + ret = encodeJpegYU12(jpegSize, yu12Main, + jpegQuality, exifData, exifDataSize, + bufPtr, maxJpegCodeSize, jpegCodeSize); + + /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out + * and do this when returning buffer to parent */ + CameraBlob blob { CameraBlobId::JPEG, static_cast(jpegCodeSize) }; + void *blobDst = + reinterpret_cast(reinterpret_cast(bufPtr) + + maxJpegCodeSize - + sizeof(CameraBlob)); + memcpy(blobDst, &blob, sizeof(CameraBlob)); + + /* Unlock the HAL jpeg code buffer */ + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence >= 0) { + halBuf.acquireFence = relFence; + } + + /* Check if our JPEG actually succeeded */ + if (ret != 0) { + return lfail( + "%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); + } + + ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu", + __FUNCTION__, ret, jpegQuality, maxJpegCodeSize); + + return 0; +} + +bool ExternalCameraDeviceSession::OutputThread::threadLoop() { + std::shared_ptr req; + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return false; + } + + // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames + // regularly to prevent v4l buffer queue filled with stale buffers + // when app doesn't program a preveiw request + waitForNextRequest(&req); + if (req == nullptr) { + // No new request, wait again + return true; + } + + auto onDeviceError = [&](auto... args) { + ALOGE(args...); + parent->notifyError( + req->frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + signalRequestDone(); + return false; + }; + + if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) { + return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, + req->frameIn->mFourcc & 0xFF, + (req->frameIn->mFourcc >> 8) & 0xFF, + (req->frameIn->mFourcc >> 16) & 0xFF, + (req->frameIn->mFourcc >> 24) & 0xFF); + } + + int res = requestBufferStart(req->buffers); + if (res != 0) { + ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res); + return onDeviceError("%s: failed to send buffer request!", __FUNCTION__); + } + + std::unique_lock lk(mBufferLock); + // Convert input V4L2 frame to YU12 of the same size + // TODO: see if we can save some computation by converting to YV12 here + uint8_t* inData; + size_t inDataSize; + if (req->frameIn->getData(&inData, &inDataSize) != 0) { + lk.unlock(); + return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__); + } + + // Process camera mute state + auto testPatternMode = req->setting.find(ANDROID_SENSOR_TEST_PATTERN_MODE); + if (testPatternMode.count == 1) { + if (mCameraMuted != (testPatternMode.data.u8[0] != ANDROID_SENSOR_TEST_PATTERN_MODE_OFF)) { + mCameraMuted = !mCameraMuted; + // Get solid color for test pattern, if any was set + if (testPatternMode.data.u8[0] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR) { + auto entry = req->setting.find(ANDROID_SENSOR_TEST_PATTERN_DATA); + if (entry.count == 4) { + // Update the mute frame if the pattern color has changed + if (memcmp(entry.data.i32, mTestPatternData, sizeof(mTestPatternData)) != 0) { + memcpy(mTestPatternData, entry.data.i32, sizeof(mTestPatternData)); + // Fill the mute frame with the solid color, use only 8 MSB of RGGB as RGB + for (int i = 0; i < mMuteTestPatternFrame.size(); i += 3) { + mMuteTestPatternFrame[i] = entry.data.i32[0] >> 24; + mMuteTestPatternFrame[i + 1] = entry.data.i32[1] >> 24; + mMuteTestPatternFrame[i + 2] = entry.data.i32[3] >> 24; + } + } + } + } + } + } + + // TODO: in some special case maybe we can decode jpg directly to gralloc output? + if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) { + ATRACE_BEGIN("MJPGtoI420"); + int res = 0; + if (mCameraMuted) { + res = libyuv::ConvertToI420( + mMuteTestPatternFrame.data(), mMuteTestPatternFrame.size(), + static_cast(mYu12FrameLayout.y), mYu12FrameLayout.yStride, + static_cast(mYu12FrameLayout.cb), mYu12FrameLayout.cStride, + static_cast(mYu12FrameLayout.cr), mYu12FrameLayout.cStride, 0, 0, + mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, + mYu12Frame->mHeight, libyuv::kRotate0, libyuv::FOURCC_RAW); + } else { + res = libyuv::MJPGToI420( + inData, inDataSize, static_cast(mYu12FrameLayout.y), + mYu12FrameLayout.yStride, static_cast(mYu12FrameLayout.cb), + mYu12FrameLayout.cStride, static_cast(mYu12FrameLayout.cr), + mYu12FrameLayout.cStride, mYu12Frame->mWidth, mYu12Frame->mHeight, + mYu12Frame->mWidth, mYu12Frame->mHeight); + } + ATRACE_END(); + + if (res != 0) { + // For some webcam, the first few V4L2 frames might be malformed... + ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); + lk.unlock(); + Status st = parent->processCaptureRequestError(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture request error!", __FUNCTION__); + } + signalRequestDone(); + return true; + } + } + + ATRACE_BEGIN("Wait for BufferRequest done"); + res = waitForBufferRequestDone(&req->buffers); + ATRACE_END(); + + if (res != 0) { + ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res); + lk.unlock(); + return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__); + } + + ALOGV("%s processing new request", __FUNCTION__); + const int kSyncWaitTimeoutMs = 500; + for (auto& halBuf : req->buffers) { + if (*(halBuf.bufPtr) == nullptr) { + ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId); + halBuf.fenceTimeout = true; + } else if (halBuf.acquireFence >= 0) { + int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); + if (ret) { + halBuf.fenceTimeout = true; + } else { + ::close(halBuf.acquireFence); + halBuf.acquireFence = -1; + } + } + + if (halBuf.fenceTimeout) { + continue; + } + + // Gralloc lockYCbCr the buffer + switch (halBuf.format) { + case PixelFormat::BLOB: { + int ret = createJpegLocked(halBuf, req->setting); + + if(ret != 0) { + lk.unlock(); + return onDeviceError("%s: createJpegLocked failed with %d", + __FUNCTION__, ret); + } + } break; + case PixelFormat::Y16: { + void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize); + + std::memcpy(outLayout, inData, inDataSize); + + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence >= 0) { + halBuf.acquireFence = relFence; + } + } break; + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: { + IMapper::Rect outRect {0, 0, + static_cast(halBuf.width), + static_cast(halBuf.height)}; + YCbCrLayout outLayout = sHandleImporter.lockYCbCr( + *(halBuf.bufPtr), halBuf.usage, outRect); + ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, + outLayout.yStride, outLayout.cStride, outLayout.chromaStep); + + // Convert to output buffer size/format + uint32_t outputFourcc = getFourCcFromLayout(outLayout); + ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, + outputFourcc & 0xFF, + (outputFourcc >> 8) & 0xFF, + (outputFourcc >> 16) & 0xFF, + (outputFourcc >> 24) & 0xFF); + + YCbCrLayout cropAndScaled; + ATRACE_BEGIN("cropAndScaleLocked"); + int ret = cropAndScaleLocked( + mYu12Frame, + Size { halBuf.width, halBuf.height }, + &cropAndScaled); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: crop and scale failed!", __FUNCTION__); + } + + Size sz {halBuf.width, halBuf.height}; + ATRACE_BEGIN("formatConvert"); + ret = formatConvert(cropAndScaled, outLayout, sz, outputFourcc); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: format coversion failed!", __FUNCTION__); + } + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence >= 0) { + halBuf.acquireFence = relFence; + } + } break; + default: + lk.unlock(); + return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format); + } + } // for each buffer + mScaledYu12Frames.clear(); + + // Don't hold the lock while calling back to parent + lk.unlock(); + Status st = parent->processCaptureResult(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture result!", __FUNCTION__); + } + signalRequestDone(); + return true; +} + +Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers( + const Size& v4lSize, const Size& thumbSize, + const hidl_vec& streams, + uint32_t blobBufferSize) { + std::lock_guard lk(mBufferLock); + if (mScaledYu12Frames.size() != 0) { + ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", + __FUNCTION__, mScaledYu12Frames.size()); + return Status::INTERNAL_ERROR; + } + + // Allocating intermediate YU12 frame + if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width || + mYu12Frame->mHeight != v4lSize.height) { + mYu12Frame.clear(); + mYu12Frame = new AllocatedFrame(v4lSize.width, v4lSize.height); + int ret = mYu12Frame->allocate(&mYu12FrameLayout); + if (ret != 0) { + ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + + // Allocating intermediate YU12 thumbnail frame + if (mYu12ThumbFrame == nullptr || + mYu12ThumbFrame->mWidth != thumbSize.width || + mYu12ThumbFrame->mHeight != thumbSize.height) { + mYu12ThumbFrame.clear(); + mYu12ThumbFrame = new AllocatedFrame(thumbSize.width, thumbSize.height); + int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout); + if (ret != 0) { + ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + + // Allocating scaled buffers + for (const auto& stream : streams) { + Size sz = {stream.width, stream.height}; + if (sz == v4lSize) { + continue; // Don't need an intermediate buffer same size as v4lBuffer + } + if (mIntermediateBuffers.count(sz) == 0) { + // Create new intermediate buffer + sp buf = new AllocatedFrame(stream.width, stream.height); + int ret = buf->allocate(); + if (ret != 0) { + ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", + __FUNCTION__, stream.width, stream.height); + return Status::INTERNAL_ERROR; + } + mIntermediateBuffers[sz] = buf; + } + } + + // Remove unconfigured buffers + auto it = mIntermediateBuffers.begin(); + while (it != mIntermediateBuffers.end()) { + bool configured = false; + auto sz = it->first; + for (const auto& stream : streams) { + if (stream.width == sz.width && stream.height == sz.height) { + configured = true; + break; + } + } + if (configured) { + it++; + } else { + it = mIntermediateBuffers.erase(it); + } + } + + // Allocate mute test pattern frame + mMuteTestPatternFrame.resize(mYu12Frame->mWidth * mYu12Frame->mHeight * 3); + + mBlobBufferSize = blobBufferSize; + return Status::OK; +} + +void ExternalCameraDeviceSession::OutputThread::clearIntermediateBuffers() { + std::lock_guard lk(mBufferLock); + mYu12Frame.clear(); + mYu12ThumbFrame.clear(); + mIntermediateBuffers.clear(); + mMuteTestPatternFrame.clear(); + mBlobBufferSize = 0; +} + +Status ExternalCameraDeviceSession::OutputThread::submitRequest( + const std::shared_ptr& req) { + std::unique_lock lk(mRequestListLock); + mRequestList.push_back(req); + lk.unlock(); + mRequestCond.notify_one(); + return Status::OK; +} + +void ExternalCameraDeviceSession::OutputThread::flush() { + ATRACE_CALL(); + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return; + } + + std::unique_lock lk(mRequestListLock); + std::list> reqs = std::move(mRequestList); + mRequestList.clear(); + if (mProcessingRequest) { + std::chrono::seconds timeout = std::chrono::seconds(kFlushWaitTimeoutSec); + auto st = mRequestDoneCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__); + } + } + + ALOGV("%s: flusing inflight requests", __FUNCTION__); + lk.unlock(); + for (const auto& req : reqs) { + parent->processCaptureRequestError(req); + } +} + +std::list> +ExternalCameraDeviceSession::OutputThread::switchToOffline() { + ATRACE_CALL(); + std::list> emptyList; + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return emptyList; + } + + std::unique_lock lk(mRequestListLock); + std::list> reqs = std::move(mRequestList); + mRequestList.clear(); + if (mProcessingRequest) { + std::chrono::seconds timeout = std::chrono::seconds(kFlushWaitTimeoutSec); + auto st = mRequestDoneCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__); + } + } + lk.unlock(); + clearIntermediateBuffers(); + ALOGV("%s: returning %zu request for offline processing", __FUNCTION__, reqs.size()); + return reqs; +} + +void ExternalCameraDeviceSession::OutputThread::waitForNextRequest( + std::shared_ptr* out) { + ATRACE_CALL(); + if (out == nullptr) { + ALOGE("%s: out is null", __FUNCTION__); + return; + } + + std::unique_lock lk(mRequestListLock); + int waitTimes = 0; + while (mRequestList.empty()) { + if (exitPending()) { + return; + } + std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqWaitTimeoutMs); + auto st = mRequestCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + waitTimes++; + if (waitTimes == kReqWaitTimesMax) { + // no new request, return + return; + } + } + } + *out = mRequestList.front(); + mRequestList.pop_front(); + mProcessingRequest = true; + mProcessingFrameNumer = (*out)->frameNumber; +} + +void ExternalCameraDeviceSession::OutputThread::signalRequestDone() { + std::unique_lock lk(mRequestListLock); + mProcessingRequest = false; + mProcessingFrameNumer = 0; + lk.unlock(); + mRequestDoneCond.notify_one(); +} + +void ExternalCameraDeviceSession::OutputThread::dump(int fd) { + std::lock_guard lk(mRequestListLock); + if (mProcessingRequest) { + dprintf(fd, "OutputThread processing frame %d\n", mProcessingFrameNumer); + } else { + dprintf(fd, "OutputThread not processing any frames\n"); + } + dprintf(fd, "OutputThread request list contains frame: "); + for (const auto& req : mRequestList) { + dprintf(fd, "%d, ", req->frameNumber); + } + dprintf(fd, "\n"); +} + +void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) { + for (auto& pair : mCirculatingBuffers.at(id)) { + sHandleImporter.freeBuffer(pair.second); + } + mCirculatingBuffers[id].clear(); + mCirculatingBuffers.erase(id); +} + +void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec& cachesToRemove) { + Mutex::Autolock _l(mCbsLock); + for (auto& cache : cachesToRemove) { + auto cbsIt = mCirculatingBuffers.find(cache.streamId); + if (cbsIt == mCirculatingBuffers.end()) { + // The stream could have been removed + continue; + } + CirculatingBuffers& cbs = cbsIt->second; + auto it = cbs.find(cache.bufferId); + if (it != cbs.end()) { + sHandleImporter.freeBuffer(it->second); + cbs.erase(it); + } else { + ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", + __FUNCTION__, cache.streamId, cache.bufferId); + } + } +} + +bool ExternalCameraDeviceSession::isSupported(const Stream& stream, + const std::vector& supportedFormats, + const ExternalCameraConfig& devCfg) { + int32_t ds = static_cast(stream.dataSpace); + PixelFormat fmt = stream.format; + uint32_t width = stream.width; + uint32_t height = stream.height; + // TODO: check usage flags + + if (stream.streamType != StreamType::OUTPUT) { + ALOGE("%s: does not support non-output stream type", __FUNCTION__); + return false; + } + + if (stream.rotation != StreamRotation::ROTATION_0) { + ALOGE("%s: does not support stream rotation", __FUNCTION__); + return false; + } + + switch (fmt) { + case PixelFormat::BLOB: + if (ds != static_cast(Dataspace::V0_JFIF)) { + ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds); + return false; + } + break; + case PixelFormat::IMPLEMENTATION_DEFINED: + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: + // TODO: check what dataspace we can support here. + // intentional no-ops. + break; + case PixelFormat::Y16: + if (!devCfg.depthEnabled) { + ALOGI("%s: Depth is not Enabled", __FUNCTION__); + return false; + } + if (!(ds & Dataspace::DEPTH)) { + ALOGI("%s: Y16 supports only dataSpace DEPTH", __FUNCTION__); + return false; + } + break; + default: + ALOGI("%s: does not support format %x", __FUNCTION__, fmt); + return false; + } + + // Assume we can convert any V4L2 format to any of supported output format for now, i.e, + // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format + // in the futrue. + for (const auto& v4l2Fmt : supportedFormats) { + if (width == v4l2Fmt.width && height == v4l2Fmt.height) { + return true; + } + } + ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height); + return false; +} + +int ExternalCameraDeviceSession::v4l2StreamOffLocked() { + if (!mV4l2Streaming) { + return OK; + } + + { + std::lock_guard lk(mV4l2BufferLock); + if (mNumDequeuedV4l2Buffers != 0) { + ALOGE("%s: there are %zu inflight V4L buffers", + __FUNCTION__, mNumDequeuedV4l2Buffers); + return -1; + } + } + mV4L2BufferCount = 0; + + // VIDIOC_STREAMOFF + v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) { + ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + // VIDIOC_REQBUFS: clear buffers + v4l2_requestbuffers req_buffers{}; + req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req_buffers.memory = V4L2_MEMORY_MMAP; + req_buffers.count = 0; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { + ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + mV4l2Streaming = false; + return OK; +} + +int ExternalCameraDeviceSession::setV4l2FpsLocked(double fps) { + // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps + v4l2_streamparm streamparm = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; + // The following line checks that the driver knows about framerate get/set. + int ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm)); + if (ret != 0) { + if (errno == -EINVAL) { + ALOGW("%s: device does not support VIDIOC_G_PARM", __FUNCTION__); + } + return -errno; + } + // Now check if the device is able to accept a capture framerate set. + if (!(streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME)) { + ALOGW("%s: device does not support V4L2_CAP_TIMEPERFRAME", __FUNCTION__); + return -EINVAL; + } + + // fps is float, approximate by a fraction. + const int kFrameRatePrecision = 10000; + streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision; + streamparm.parm.capture.timeperframe.denominator = + (fps * kFrameRatePrecision); + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) { + ALOGE("%s: failed to set framerate to %f: %s", __FUNCTION__, fps, strerror(errno)); + return -1; + } + + double retFps = streamparm.parm.capture.timeperframe.denominator / + static_cast(streamparm.parm.capture.timeperframe.numerator); + if (std::fabs(fps - retFps) > 1.0) { + ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps); + return -1; + } + mV4l2StreamingFps = fps; + return 0; +} + +int ExternalCameraDeviceSession::configureV4l2StreamLocked( + const SupportedV4L2Format& v4l2Fmt, double requestFps) { + ATRACE_CALL(); + int ret = v4l2StreamOffLocked(); + if (ret != OK) { + ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret); + return ret; + } + + // VIDIOC_S_FMT w/h/fmt + v4l2_format fmt; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = v4l2Fmt.width; + fmt.fmt.pix.height = v4l2Fmt.height; + fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc; + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); + if (ret < 0) { + int numAttempt = 0; + while (ret < 0) { + ALOGW("%s: VIDIOC_S_FMT failed, wait 33ms and try again", __FUNCTION__); + usleep(IOCTL_RETRY_SLEEP_US); // sleep and try again + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); + if (numAttempt == MAX_RETRY) { + break; + } + numAttempt++; + } + if (ret < 0) { + ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + } + + if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height || + v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) { + ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__, + v4l2Fmt.fourcc & 0xFF, + (v4l2Fmt.fourcc >> 8) & 0xFF, + (v4l2Fmt.fourcc >> 16) & 0xFF, + (v4l2Fmt.fourcc >> 24) & 0xFF, + v4l2Fmt.width, v4l2Fmt.height, + fmt.fmt.pix.pixelformat & 0xFF, + (fmt.fmt.pix.pixelformat >> 8) & 0xFF, + (fmt.fmt.pix.pixelformat >> 16) & 0xFF, + (fmt.fmt.pix.pixelformat >> 24) & 0xFF, + fmt.fmt.pix.width, fmt.fmt.pix.height); + return -EINVAL; + } + uint32_t bufferSize = fmt.fmt.pix.sizeimage; + ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize); + uint32_t expectedMaxBufferSize = kMaxBytesPerPixel * fmt.fmt.pix.width * fmt.fmt.pix.height; + if ((bufferSize == 0) || (bufferSize > expectedMaxBufferSize)) { + ALOGE("%s: V4L2 buffer size: %u looks invalid. Expected maximum size: %u", __FUNCTION__, + bufferSize, expectedMaxBufferSize); + return -EINVAL; + } + mMaxV4L2BufferSize = bufferSize; + + const double kDefaultFps = 30.0; + double fps = 1000.0; + if (requestFps != 0.0) { + fps = requestFps; + } else { + double maxFps = -1.0; + // Try to pick the slowest fps that is at least 30 + for (const auto& fr : v4l2Fmt.frameRates) { + double f = fr.getDouble(); + if (maxFps < f) { + maxFps = f; + } + if (f >= kDefaultFps && f < fps) { + fps = f; + } + } + if (fps == 1000.0) { + fps = maxFps; + } + } + + int fpsRet = setV4l2FpsLocked(fps); + if (fpsRet != 0 && fpsRet != -EINVAL) { + ALOGE("%s: set fps failed: %s", __FUNCTION__, strerror(fpsRet)); + return fpsRet; + } + + uint32_t v4lBufferCount = (fps >= kDefaultFps) ? + mCfg.numVideoBuffers : mCfg.numStillBuffers; + // VIDIOC_REQBUFS: create buffers + v4l2_requestbuffers req_buffers{}; + req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req_buffers.memory = V4L2_MEMORY_MMAP; + req_buffers.count = v4lBufferCount; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { + ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + // Driver can indeed return more buffer if it needs more to operate + if (req_buffers.count < v4lBufferCount) { + ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", + __FUNCTION__, v4lBufferCount, req_buffers.count); + return NO_MEMORY; + } + + // VIDIOC_QUERYBUF: get buffer offset in the V4L2 fd + // VIDIOC_QBUF: send buffer to driver + mV4L2BufferCount = req_buffers.count; + for (uint32_t i = 0; i < req_buffers.count; i++) { + v4l2_buffer buffer = { + .index = i, .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, .memory = V4L2_MEMORY_MMAP}; + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { + ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); + return -errno; + } + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); + return -errno; + } + } + + // VIDIOC_STREAMON: start streaming + v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)); + if (ret < 0) { + int numAttempt = 0; + while (ret < 0) { + ALOGW("%s: VIDIOC_STREAMON failed, wait 33ms and try again", __FUNCTION__); + usleep(IOCTL_RETRY_SLEEP_US); // sleep 100 ms and try again + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)); + if (numAttempt == MAX_RETRY) { + break; + } + numAttempt++; + } + if (ret < 0) { + ALOGE("%s: VIDIOC_STREAMON ioctl failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + } + + // Swallow first few frames after streamOn to account for bad frames from some devices + for (int i = 0; i < kBadFramesAfterStreamOn; i++) { + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { + ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno)); + return -errno; + } + } + + ALOGI("%s: start V4L2 streaming %dx%d@%ffps", + __FUNCTION__, v4l2Fmt.width, v4l2Fmt.height, fps); + mV4l2StreamingFmt = v4l2Fmt; + mV4l2Streaming = true; + return OK; +} + +sp ExternalCameraDeviceSession::dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs) { + ATRACE_CALL(); + sp ret = nullptr; + + if (shutterTs == nullptr) { + ALOGE("%s: shutterTs must not be null!", __FUNCTION__); + return ret; + } + + { + std::unique_lock lk(mV4l2BufferLock); + if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) { + int waitRet = waitForV4L2BufferReturnLocked(lk); + if (waitRet != 0) { + return ret; + } + } + } + + ATRACE_BEGIN("VIDIOC_DQBUF"); + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { + ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); + return ret; + } + ATRACE_END(); + + if (buffer.index >= mV4L2BufferCount) { + ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index); + return ret; + } + + if (buffer.flags & V4L2_BUF_FLAG_ERROR) { + ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags); + // TODO: try to dequeue again + } + + if (buffer.bytesused > mMaxV4L2BufferSize) { + ALOGE("%s: v4l2 buffer bytes used: %u maximum %u", __FUNCTION__, buffer.bytesused, + mMaxV4L2BufferSize); + return ret; + } + + if (buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC) { + // Ideally we should also check for V4L2_BUF_FLAG_TSTAMP_SRC_SOE, but + // even V4L2_BUF_FLAG_TSTAMP_SRC_EOF is better than capture a timestamp now + *shutterTs = static_cast(buffer.timestamp.tv_sec)*1000000000LL + + buffer.timestamp.tv_usec * 1000LL; + } else { + *shutterTs = systemTime(SYSTEM_TIME_MONOTONIC); + } + + { + std::lock_guard lk(mV4l2BufferLock); + mNumDequeuedV4l2Buffers++; + } + return new V4L2Frame( + mV4l2StreamingFmt.width, mV4l2StreamingFmt.height, mV4l2StreamingFmt.fourcc, + buffer.index, mV4l2Fd.get(), buffer.bytesused, buffer.m.offset); +} + +void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp& frame) { + ATRACE_CALL(); + frame->unmap(); + ATRACE_BEGIN("VIDIOC_QBUF"); + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + buffer.index = frame->mBufferIndex; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, + frame->mBufferIndex, strerror(errno)); + return; + } + ATRACE_END(); + + { + std::lock_guard lk(mV4l2BufferLock); + mNumDequeuedV4l2Buffers--; + } + mV4L2BufferReturned.notify_one(); +} + +Status ExternalCameraDeviceSession::isStreamCombinationSupported( + const V3_2::StreamConfiguration& config, + const std::vector& supportedFormats, + const ExternalCameraConfig& devCfg) { + if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) { + ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode); + return Status::ILLEGAL_ARGUMENT; + } + + if (config.streams.size() == 0) { + ALOGE("%s: cannot configure zero stream", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + int numProcessedStream = 0; + int numStallStream = 0; + for (const auto& stream : config.streams) { + // Check if the format/width/height combo is supported + if (!isSupported(stream, supportedFormats, devCfg)) { + return Status::ILLEGAL_ARGUMENT; + } + if (stream.format == PixelFormat::BLOB) { + numStallStream++; + } else { + numProcessedStream++; + } + } + + if (numProcessedStream > kMaxProcessedStream) { + ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__, + kMaxProcessedStream, numProcessedStream); + return Status::ILLEGAL_ARGUMENT; + } + + if (numStallStream > kMaxStallStream) { + ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, + kMaxStallStream, numStallStream); + return Status::ILLEGAL_ARGUMENT; + } + + return Status::OK; +} + +Status ExternalCameraDeviceSession::configureStreams( + const V3_2::StreamConfiguration& config, + V3_3::HalStreamConfiguration* out, + uint32_t blobBufferSize) { + ATRACE_CALL(); + + Status status = isStreamCombinationSupported(config, mSupportedFormats, mCfg); + if (status != Status::OK) { + return status; + } + + status = initStatus(); + if (status != Status::OK) { + return status; + } + + + { + std::lock_guard lk(mInflightFramesLock); + if (!mInflightFrames.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!", + __FUNCTION__, mInflightFrames.size()); + return Status::INTERNAL_ERROR; + } + } + + Mutex::Autolock _l(mLock); + { + Mutex::Autolock _l(mCbsLock); + // Add new streams + for (const auto& stream : config.streams) { + if (mStreamMap.count(stream.id) == 0) { + mStreamMap[stream.id] = stream; + mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{}); + } + } + + // Cleanup removed streams + for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { + int id = it->first; + bool found = false; + for (const auto& stream : config.streams) { + if (id == stream.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + cleanupBuffersLocked(id); + it = mStreamMap.erase(it); + } else { + ++it; + } + } + } + + // Now select a V4L2 format to produce all output streams + float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio; + uint32_t maxDim = 0; + for (const auto& stream : config.streams) { + float aspectRatio = ASPECT_RATIO(stream); + ALOGI("%s: request stream %dx%d", __FUNCTION__, stream.width, stream.height); + if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || + (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { + desiredAr = aspectRatio; + } + + // The dimension that's not cropped + uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height; + if (dim > maxDim) { + maxDim = dim; + } + } + // Find the smallest format that matches the desired aspect ratio and is wide/high enough + SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0}; + for (const auto& fmt : mSupportedFormats) { + uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; + if (dim >= maxDim) { + float aspectRatio = ASPECT_RATIO(fmt); + if (isAspectRatioClose(aspectRatio, desiredAr)) { + v4l2Fmt = fmt; + // since mSupportedFormats is sorted by width then height, the first matching fmt + // will be the smallest one with matching aspect ratio + break; + } + } + } + if (v4l2Fmt.width == 0) { + // Cannot find exact good aspect ratio candidate, try to find a close one + for (const auto& fmt : mSupportedFormats) { + uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; + if (dim >= maxDim) { + float aspectRatio = ASPECT_RATIO(fmt); + if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || + (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { + v4l2Fmt = fmt; + break; + } + } + } + } + + if (v4l2Fmt.width == 0) { + ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)" + , __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", + maxDim, desiredAr); + return Status::ILLEGAL_ARGUMENT; + } + + if (configureV4l2StreamLocked(v4l2Fmt) != 0) { + ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", + v4l2Fmt.fourcc & 0xFF, + (v4l2Fmt.fourcc >> 8) & 0xFF, + (v4l2Fmt.fourcc >> 16) & 0xFF, + (v4l2Fmt.fourcc >> 24) & 0xFF, + v4l2Fmt.width, v4l2Fmt.height); + return Status::INTERNAL_ERROR; + } + + Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height}; + Size thumbSize { 0, 0 }; + camera_metadata_ro_entry entry = + mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); + for(uint32_t i = 0; i < entry.count; i += 2) { + Size sz { static_cast(entry.data.i32[i]), + static_cast(entry.data.i32[i+1]) }; + if(sz.width * sz.height > thumbSize.width * thumbSize.height) { + thumbSize = sz; + } + } + + if (thumbSize.width * thumbSize.height == 0) { + ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + mBlobBufferSize = blobBufferSize; + status = mOutputThread->allocateIntermediateBuffers(v4lSize, + mMaxThumbResolution, config.streams, blobBufferSize); + if (status != Status::OK) { + ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__); + return status; + } + + out->streams.resize(config.streams.size()); + for (size_t i = 0; i < config.streams.size(); i++) { + out->streams[i].overrideDataSpace = config.streams[i].dataSpace; + out->streams[i].v3_2.id = config.streams[i].id; + // TODO: double check should we add those CAMERA flags + mStreamMap[config.streams[i].id].usage = + out->streams[i].v3_2.producerUsage = config.streams[i].usage | + BufferUsage::CPU_WRITE_OFTEN | + BufferUsage::CAMERA_OUTPUT; + out->streams[i].v3_2.consumerUsage = 0; + out->streams[i].v3_2.maxBuffers = mV4L2BufferCount; + + switch (config.streams[i].format) { + case PixelFormat::BLOB: + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: // Used by SurfaceTexture + case PixelFormat::Y16: + // No override + out->streams[i].v3_2.overrideFormat = config.streams[i].format; + break; + case PixelFormat::IMPLEMENTATION_DEFINED: + // Override based on VIDEO or not + out->streams[i].v3_2.overrideFormat = + (config.streams[i].usage & BufferUsage::VIDEO_ENCODER) ? + PixelFormat::YCBCR_420_888 : PixelFormat::YV12; + // Save overridden formt in mStreamMap + mStreamMap[config.streams[i].id].format = out->streams[i].v3_2.overrideFormat; + break; + default: + ALOGE("%s: unsupported format 0x%x", __FUNCTION__, config.streams[i].format); + return Status::ILLEGAL_ARGUMENT; + } + } + + mFirstRequest = true; + return Status::OK; +} + +bool ExternalCameraDeviceSession::isClosed() { + Mutex::Autolock _l(mLock); + return mClosed; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(md, tag, data, size) \ +do { \ + if ((md).update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return BAD_VALUE; \ + } \ +} while (0) + +status_t ExternalCameraDeviceSession::initDefaultRequests() { + ::android::hardware::camera::common::V1_0::helper::CameraMetadata md; + + const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; + UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); + + const int32_t exposureCompensation = 0; + UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1); + + const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1); + + const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1); + + const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; + UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1); + + const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); + + const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1); + + const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; + UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); + + const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; + UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); + + const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; + UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); + + const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; + UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1); + + const int32_t thumbnailSize[] = {240, 180}; + UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); + + const uint8_t jpegQuality = 90; + UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1); + UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); + + const int32_t jpegOrientation = 0; + UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); + + const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1); + + const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF; + UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1); + + const int32_t testPatternModes = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; + UPDATE(md, ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternModes, 1); + + const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1); + + const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1); + + bool support30Fps = false; + int32_t maxFps = std::numeric_limits::min(); + for (const auto& supportedFormat : mSupportedFormats) { + for (const auto& fr : supportedFormat.frameRates) { + int32_t framerateInt = static_cast(fr.getDouble()); + if (maxFps < framerateInt) { + maxFps = framerateInt; + } + if (framerateInt == 30) { + support30Fps = true; + break; + } + } + if (support30Fps) { + break; + } + } + int32_t defaultFramerate = support30Fps ? 30 : maxFps; + int32_t defaultFpsRange[] = {defaultFramerate / 2, defaultFramerate}; + UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange)); + + uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1); + + const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1); + + auto requestTemplates = hidl_enum_range(); + for (RequestTemplate type : requestTemplates) { + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mdCopy = md; + uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + switch (type) { + case RequestTemplate::PREVIEW: + intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + break; + case RequestTemplate::STILL_CAPTURE: + intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + break; + case RequestTemplate::VIDEO_RECORD: + intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; + break; + case RequestTemplate::VIDEO_SNAPSHOT: + intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; + break; + default: + ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type); + continue; + } + UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1); + + camera_metadata_t* rawMd = mdCopy.release(); + CameraMetadata hidlMd; + hidlMd.setToExternal( + (uint8_t*) rawMd, get_camera_metadata_size(rawMd)); + mDefaultRequests[type] = hidlMd; + free_camera_metadata(rawMd); + } + + return OK; +} + +status_t ExternalCameraDeviceSession::fillCaptureResult( + common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { + bool afTrigger = false; + { + std::lock_guard lk(mAfTriggerLock); + afTrigger = mAfTrigger; + if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { + camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); + if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { + mAfTrigger = afTrigger = true; + } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { + mAfTrigger = afTrigger = false; + } + } + } + + // For USB camera, the USB camera handles everything and we don't have control + // over AF. We only simply fake the AF metadata based on the request + // received here. + uint8_t afState; + if (afTrigger) { + afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; + } else { + afState = ANDROID_CONTROL_AF_STATE_INACTIVE; + } + UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); + + camera_metadata_ro_entry activeArraySize = + mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); + + return fillCaptureResultCommon(md, timestamp, activeArraySize); +} + +#undef ARRAY_SIZE +#undef UPDATE + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/ExternalCameraUtils.cpp b/camera/device/3.4/default/ExternalCameraUtils.cpp new file mode 100644 index 0000000..8f4626c --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraUtils.cpp @@ -0,0 +1,900 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "ExtCamUtils@3.4" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include +#include + +#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs +#include + +#include + +#include "ExternalCameraUtils.h" + +namespace { + +buffer_handle_t sEmptyBuffer = nullptr; + +} // Anonymous namespace + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +Frame::Frame(uint32_t width, uint32_t height, uint32_t fourcc) : + mWidth(width), mHeight(height), mFourcc(fourcc) {} + +V4L2Frame::V4L2Frame( + uint32_t w, uint32_t h, uint32_t fourcc, + int bufIdx, int fd, uint32_t dataSize, uint64_t offset) : + Frame(w, h, fourcc), + mBufferIndex(bufIdx), mFd(fd), mDataSize(dataSize), mOffset(offset) {} + +int V4L2Frame::map(uint8_t** data, size_t* dataSize) { + if (data == nullptr || dataSize == nullptr) { + ALOGI("%s: V4L2 buffer map bad argument: data %p, dataSize %p", + __FUNCTION__, data, dataSize); + return -EINVAL; + } + + std::lock_guard lk(mLock); + if (!mMapped) { + void* addr = mmap(NULL, mDataSize, PROT_READ, MAP_SHARED, mFd, mOffset); + if (addr == MAP_FAILED) { + ALOGE("%s: V4L2 buffer map failed: %s", __FUNCTION__, strerror(errno)); + return -EINVAL; + } + mData = static_cast(addr); + mMapped = true; + } + *data = mData; + *dataSize = mDataSize; + ALOGV("%s: V4L map FD %d, data %p size %zu", __FUNCTION__, mFd, mData, mDataSize); + return 0; +} + +int V4L2Frame::unmap() { + std::lock_guard lk(mLock); + if (mMapped) { + ALOGV("%s: V4L unmap data %p size %zu", __FUNCTION__, mData, mDataSize); + if (munmap(mData, mDataSize) != 0) { + ALOGE("%s: V4L2 buffer unmap failed: %s", __FUNCTION__, strerror(errno)); + return -EINVAL; + } + mMapped = false; + } + return 0; +} + +V4L2Frame::~V4L2Frame() { + unmap(); +} + +int V4L2Frame::getData(uint8_t** outData, size_t* dataSize) { + return map(outData, dataSize); +} + +AllocatedFrame::AllocatedFrame( + uint32_t w, uint32_t h) : + Frame(w, h, V4L2_PIX_FMT_YUV420) {}; + +AllocatedFrame::~AllocatedFrame() {} + +int AllocatedFrame::allocate(YCbCrLayout* out) { + std::lock_guard lk(mLock); + if ((mWidth % 2) || (mHeight % 2)) { + ALOGE("%s: bad dimension %dx%d (not multiple of 2)", __FUNCTION__, mWidth, mHeight); + return -EINVAL; + } + + uint32_t dataSize = mWidth * mHeight * 3 / 2; // YUV420 + if (mData.size() != dataSize) { + mData.resize(dataSize); + } + + if (out != nullptr) { + out->y = mData.data(); + out->yStride = mWidth; + uint8_t* cbStart = mData.data() + mWidth * mHeight; + uint8_t* crStart = cbStart + mWidth * mHeight / 4; + out->cb = cbStart; + out->cr = crStart; + out->cStride = mWidth / 2; + out->chromaStep = 1; + } + return 0; +} + +int AllocatedFrame::getData(uint8_t** outData, size_t* dataSize) { + YCbCrLayout layout; + int ret = allocate(&layout); + if (ret != 0) { + return ret; + } + *outData = mData.data(); + *dataSize = mData.size(); + return 0; +} + +int AllocatedFrame::getLayout(YCbCrLayout* out) { + IMapper::Rect noCrop = {0, 0, + static_cast(mWidth), + static_cast(mHeight)}; + return getCroppedLayout(noCrop, out); +} + +int AllocatedFrame::getCroppedLayout(const IMapper::Rect& rect, YCbCrLayout* out) { + if (out == nullptr) { + ALOGE("%s: null out", __FUNCTION__); + return -1; + } + + std::lock_guard lk(mLock); + if ((rect.left + rect.width) > static_cast(mWidth) || + (rect.top + rect.height) > static_cast(mHeight) || + (rect.left % 2) || (rect.top % 2) || (rect.width % 2) || (rect.height % 2)) { + ALOGE("%s: bad rect left %d top %d w %d h %d", __FUNCTION__, + rect.left, rect.top, rect.width, rect.height); + return -1; + } + + out->y = mData.data() + mWidth * rect.top + rect.left; + out->yStride = mWidth; + uint8_t* cbStart = mData.data() + mWidth * mHeight; + uint8_t* crStart = cbStart + mWidth * mHeight / 4; + out->cb = cbStart + mWidth * rect.top / 4 + rect.left / 2; + out->cr = crStart + mWidth * rect.top / 4 + rect.left / 2; + out->cStride = mWidth / 2; + out->chromaStep = 1; + return 0; +} + +bool isAspectRatioClose(float ar1, float ar2) { + const float kAspectRatioMatchThres = 0.025f; // This threshold is good enough to distinguish + // 4:3/16:9/20:9 + // 1.33 / 1.78 / 2 + return (std::abs(ar1 - ar2) < kAspectRatioMatchThres); +} + +double SupportedV4L2Format::FrameRate::getDouble() const { + return durationDenominator / static_cast(durationNumerator); +} + +::android::hardware::camera::common::V1_0::Status importBufferImpl( + /*inout*/std::map& circulatingBuffers, + /*inout*/HandleImporter& handleImporter, + int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) { + using ::android::hardware::camera::common::V1_0::Status; + if (buf == nullptr && bufId == BUFFER_ID_NO_BUFFER) { + if (allowEmptyBuf) { + *outBufPtr = &sEmptyBuffer; + return Status::OK; + } else { + ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); + return Status::ILLEGAL_ARGUMENT; + } + } + + CirculatingBuffers& cbs = circulatingBuffers[streamId]; + if (cbs.count(bufId) == 0) { + if (buf == nullptr) { + ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); + return Status::ILLEGAL_ARGUMENT; + } + // Register a newly seen buffer + buffer_handle_t importedBuf = buf; + handleImporter.importBuffer(importedBuf); + if (importedBuf == nullptr) { + ALOGE("%s: output buffer for stream %d is invalid!", __FUNCTION__, streamId); + return Status::INTERNAL_ERROR; + } else { + cbs[bufId] = importedBuf; + } + } + *outBufPtr = &cbs[bufId]; + return Status::OK; +} + +uint32_t getFourCcFromLayout(const YCbCrLayout& layout) { + intptr_t cb = reinterpret_cast(layout.cb); + intptr_t cr = reinterpret_cast(layout.cr); + if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { + // Interleaved format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_NV21; + } else { + return V4L2_PIX_FMT_NV12; + } + } else if (layout.chromaStep == 1) { + // Planar format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_YVU420; // YV12 + } else { + return V4L2_PIX_FMT_YUV420; // YU12 + } + } else { + return FLEX_YUV_GENERIC; + } +} + +int getCropRect( + CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { + if (out == nullptr) { + ALOGE("%s: out is null", __FUNCTION__); + return -1; + } + + uint32_t inW = inSize.width; + uint32_t inH = inSize.height; + uint32_t outW = outSize.width; + uint32_t outH = outSize.height; + + // Handle special case where aspect ratio is close to input but scaled + // dimension is slightly larger than input + float arIn = ASPECT_RATIO(inSize); + float arOut = ASPECT_RATIO(outSize); + if (isAspectRatioClose(arIn, arOut)) { + out->left = 0; + out->top = 0; + out->width = inW; + out->height = inH; + return 0; + } + + if (ct == VERTICAL) { + uint64_t scaledOutH = static_cast(outH) * inW / outW; + if (scaledOutH > inH) { + ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 + + out->left = 0; + out->top = ((inH - scaledOutH) / 2) & ~0x1; + out->width = inW; + out->height = static_cast(scaledOutH); + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutH)); + } else { + uint64_t scaledOutW = static_cast(outW) * inH / outH; + if (scaledOutW > inW) { + ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 + + out->left = ((inW - scaledOutW) / 2) & ~0x1; + out->top = 0; + out->width = static_cast(scaledOutW); + out->height = inH; + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutW)); + } + + return 0; +} + +int formatConvert( + const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { + int ret = 0; + switch (format) { + case V4L2_PIX_FMT_NV21: + ret = libyuv::I420ToNV21( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV21 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_NV12: + ret = libyuv::I420ToNV12( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_YVU420: // YV12 + case V4L2_PIX_FMT_YUV420: // YU12 + // TODO: maybe we can speed up here by somehow save this copy? + ret = libyuv::I420Copy( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case FLEX_YUV_GENERIC: + // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. + ALOGE("%s: unsupported flexible yuv layout" + " y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, out.y, out.cb, out.cr, + out.yStride, out.cStride, out.chromaStep); + return -1; + default: + ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); + return -1; + } + return 0; +} + +int encodeJpegYU12( + const Size & inSz, const YCbCrLayout& inLayout, + int jpegQuality, const void *app1Buffer, size_t app1Size, + void *out, const size_t maxOutSize, size_t &actualCodeSize) +{ + /* libjpeg is a C library so we use C-style "inheritance" by + * putting libjpeg's jpeg_destination_mgr first in our custom + * struct. This allows us to cast jpeg_destination_mgr* to + * CustomJpegDestMgr* when we get it passed to us in a callback */ + struct CustomJpegDestMgr { + struct jpeg_destination_mgr mgr; + JOCTET *mBuffer; + size_t mBufferSize; + size_t mEncodedSize; + bool mSuccess; + } dmgr; + + jpeg_compress_struct cinfo = {}; + jpeg_error_mgr jerr; + + /* Initialize error handling with standard callbacks, but + * then override output_message (to print to ALOG) and + * error_exit to set a flag and print a message instead + * of killing the whole process */ + cinfo.err = jpeg_std_error(&jerr); + + cinfo.err->output_message = [](j_common_ptr cinfo) { + char buffer[JMSG_LENGTH_MAX]; + + /* Create the message */ + (*cinfo->err->format_message)(cinfo, buffer); + ALOGE("libjpeg error: %s", buffer); + }; + cinfo.err->error_exit = [](j_common_ptr cinfo) { + (*cinfo->err->output_message)(cinfo); + if(cinfo->client_data) { + auto & dmgr = + *reinterpret_cast(cinfo->client_data); + dmgr.mSuccess = false; + } + }; + /* Now that we initialized some callbacks, let's create our compressor */ + jpeg_create_compress(&cinfo); + + /* Initialize our destination manager */ + dmgr.mBuffer = static_cast(out); + dmgr.mBufferSize = maxOutSize; + dmgr.mEncodedSize = 0; + dmgr.mSuccess = true; + cinfo.client_data = static_cast(&dmgr); + + /* These lambdas become C-style function pointers and as per C++11 spec + * may not capture anything */ + dmgr.mgr.init_destination = [](j_compress_ptr cinfo) { + auto & dmgr = reinterpret_cast(*cinfo->dest); + dmgr.mgr.next_output_byte = dmgr.mBuffer; + dmgr.mgr.free_in_buffer = dmgr.mBufferSize; + ALOGV("%s:%d jpeg start: %p [%zu]", + __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize); + }; + + dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { + ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); + return 0; + }; + + dmgr.mgr.term_destination = [](j_compress_ptr cinfo) { + auto & dmgr = reinterpret_cast(*cinfo->dest); + dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer; + ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize); + }; + cinfo.dest = reinterpret_cast(&dmgr); + + /* We are going to be using JPEG in raw data mode, so we are passing + * straight subsampled planar YCbCr and it will not touch our pixel + * data or do any scaling or anything */ + cinfo.image_width = inSz.width; + cinfo.image_height = inSz.height; + cinfo.input_components = 3; + cinfo.in_color_space = JCS_YCbCr; + + /* Initialize defaults and then override what we want */ + jpeg_set_defaults(&cinfo); + + jpeg_set_quality(&cinfo, jpegQuality, 1); + jpeg_set_colorspace(&cinfo, JCS_YCbCr); + cinfo.raw_data_in = 1; + cinfo.dct_method = JDCT_IFAST; + + /* Configure sampling factors. The sampling factor is JPEG subsampling 420 + * because the source format is YUV420. Note that libjpeg sampling factors + * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and + * 1 V value for each 2 Y values */ + cinfo.comp_info[0].h_samp_factor = 2; + cinfo.comp_info[0].v_samp_factor = 2; + cinfo.comp_info[1].h_samp_factor = 1; + cinfo.comp_info[1].v_samp_factor = 1; + cinfo.comp_info[2].h_samp_factor = 1; + cinfo.comp_info[2].v_samp_factor = 1; + + /* Let's not hardcode YUV420 in 6 places... 5 was enough */ + int maxVSampFactor = std::max( { + cinfo.comp_info[0].v_samp_factor, + cinfo.comp_info[1].v_samp_factor, + cinfo.comp_info[2].v_samp_factor + }); + int cVSubSampling = cinfo.comp_info[0].v_samp_factor / + cinfo.comp_info[1].v_samp_factor; + + /* Start the compressor */ + jpeg_start_compress(&cinfo, TRUE); + + /* Compute our macroblock height, so we can pad our input to be vertically + * macroblock aligned. + * TODO: Does it need to be horizontally MCU aligned too? */ + + size_t mcuV = DCTSIZE*maxVSampFactor; + size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV); + + /* libjpeg uses arrays of row pointers, which makes it really easy to pad + * data vertically (unfortunately doesn't help horizontally) */ + std::vector yLines (paddedHeight); + std::vector cbLines(paddedHeight/cVSubSampling); + std::vector crLines(paddedHeight/cVSubSampling); + + uint8_t *py = static_cast(inLayout.y); + uint8_t *pcr = static_cast(inLayout.cr); + uint8_t *pcb = static_cast(inLayout.cb); + + for(uint32_t i = 0; i < paddedHeight; i++) + { + /* Once we are in the padding territory we still point to the last line + * effectively replicating it several times ~ CLAMP_TO_EDGE */ + int li = std::min(i, inSz.height - 1); + yLines[i] = static_cast(py + li * inLayout.yStride); + if(i < paddedHeight / cVSubSampling) + { + li = std::min(i, (inSz.height - 1) / cVSubSampling); + crLines[i] = static_cast(pcr + li * inLayout.cStride); + cbLines[i] = static_cast(pcb + li * inLayout.cStride); + } + } + + /* If APP1 data was passed in, use it */ + if(app1Buffer && app1Size) + { + jpeg_write_marker(&cinfo, JPEG_APP0 + 1, + static_cast(app1Buffer), app1Size); + } + + /* While we still have padded height left to go, keep giving it one + * macroblock at a time. */ + while (cinfo.next_scanline < cinfo.image_height) { + const uint32_t batchSize = DCTSIZE * maxVSampFactor; + const uint32_t nl = cinfo.next_scanline; + JSAMPARRAY planes[3]{ &yLines[nl], + &cbLines[nl/cVSubSampling], + &crLines[nl/cVSubSampling] }; + + uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize); + + if (done != batchSize) { + ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", + __FUNCTION__, done, batchSize, cinfo.next_scanline, + cinfo.image_height); + return -1; + } + } + + /* This will flush everything */ + jpeg_finish_compress(&cinfo); + + /* Grab the actual code size and set it */ + actualCodeSize = dmgr.mEncodedSize; + + return 0; +} + +Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata& chars) { + Size thumbSize { 0, 0 }; + camera_metadata_ro_entry entry = + chars.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); + for(uint32_t i = 0; i < entry.count; i += 2) { + Size sz { static_cast(entry.data.i32[i]), + static_cast(entry.data.i32[i+1]) }; + if(sz.width * sz.height > thumbSize.width * thumbSize.height) { + thumbSize = sz; + } + } + + if (thumbSize.width * thumbSize.height == 0) { + ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__); + } + + return thumbSize; +} + +void freeReleaseFences(hidl_vec& results) { + for (auto& result : results) { + if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + result.inputBuffer.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + for (auto& buf : result.outputBuffers) { + if (buf.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + buf.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + } + } + return; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(md, tag, data, size) \ +do { \ + if ((md).update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return BAD_VALUE; \ + } \ +} while (0) + +status_t fillCaptureResultCommon( + common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp, + camera_metadata_ro_entry& activeArraySize) { + if (activeArraySize.count < 4) { + ALOGE("%s: cannot find active array size!", __FUNCTION__); + return -EINVAL; + } + // android.control + // For USB camera, we don't know the AE state. Set the state to converged to + // indicate the frame should be good to use. Then apps don't have to wait the + // AE state. + const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); + + const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); + + // Set AWB state to converged to indicate the frame should be good to use. + const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); + + const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); + + const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; + UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1); + + // This means pipeline latency of X frame intervals. The maximum number is 4. + const uint8_t requestPipelineMaxDepth = 4; + UPDATE(md, ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1); + + // android.scaler + const int32_t crop_region[] = { + activeArraySize.data.i32[0], activeArraySize.data.i32[1], + activeArraySize.data.i32[2], activeArraySize.data.i32[3], + }; + UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); + + // android.sensor + UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); + + // android.statistics + const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); + + const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; + UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); + + return OK; +} + +#undef ARRAY_SIZE +#undef UPDATE + +} // namespace implementation +} // namespace V3_4 + +namespace V3_6 { +namespace implementation { + +AllocatedV4L2Frame::AllocatedV4L2Frame(sp frameIn) : + Frame(frameIn->mWidth, frameIn->mHeight, frameIn->mFourcc) { + uint8_t* dataIn; + size_t dataSize; + if (frameIn->getData(&dataIn, &dataSize) != 0) { + ALOGE("%s: map input V4L2 frame failed!", __FUNCTION__); + return; + } + + mData.resize(dataSize); + std::memcpy(mData.data(), dataIn, dataSize); +} + +int AllocatedV4L2Frame::getData(uint8_t** outData, size_t* dataSize) { + if (outData == nullptr || dataSize == nullptr) { + ALOGE("%s: outData(%p)/dataSize(%p) must not be null", __FUNCTION__, outData, dataSize); + return -1; + } + + *outData = mData.data(); + *dataSize = mData.size(); + return 0; +} + +AllocatedV4L2Frame::~AllocatedV4L2Frame() {} + +} // namespace implementation +} // namespace V3_6 +} // namespace device + + +namespace external { +namespace common { + +namespace { + const int kDefaultCameraIdOffset = 100; + const int kDefaultJpegBufSize = 5 << 20; // 5MB + const int kDefaultNumVideoBuffer = 4; + const int kDefaultNumStillBuffer = 2; + const int kDefaultOrientation = 0; // suitable for natural landscape displays like tablet/TV + // For phone devices 270 is better +} // anonymous namespace + +const char* ExternalCameraConfig::kDefaultCfgPath = "/vendor/etc/external_camera_config.xml"; + +ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) { + using namespace tinyxml2; + ExternalCameraConfig ret; + + XMLDocument configXml; + XMLError err = configXml.LoadFile(cfgPath); + if (err != XML_SUCCESS) { + ALOGE("%s: Unable to load external camera config file '%s'. Error: %s", + __FUNCTION__, cfgPath, XMLDocument::ErrorIDToName(err)); + return ret; + } else { + ALOGI("%s: load external camera config succeed!", __FUNCTION__); + } + + XMLElement *extCam = configXml.FirstChildElement("ExternalCamera"); + if (extCam == nullptr) { + ALOGI("%s: no external camera config specified", __FUNCTION__); + return ret; + } + + XMLElement *providerCfg = extCam->FirstChildElement("Provider"); + if (providerCfg == nullptr) { + ALOGI("%s: no external camera provider config specified", __FUNCTION__); + return ret; + } + + XMLElement *cameraIdOffset = providerCfg->FirstChildElement("CameraIdOffset"); + if (cameraIdOffset != nullptr) { + ret.cameraIdOffset = std::atoi(cameraIdOffset->GetText()); + } + + XMLElement *ignore = providerCfg->FirstChildElement("ignore"); + if (ignore == nullptr) { + ALOGI("%s: no internal ignored device specified", __FUNCTION__); + return ret; + } + + XMLElement *id = ignore->FirstChildElement("id"); + while (id != nullptr) { + const char* text = id->GetText(); + if (text != nullptr) { + ret.mInternalDevices.insert(text); + ALOGI("%s: device %s will be ignored by external camera provider", + __FUNCTION__, text); + } + id = id->NextSiblingElement("id"); + } + + XMLElement *deviceCfg = extCam->FirstChildElement("Device"); + if (deviceCfg == nullptr) { + ALOGI("%s: no external camera device config specified", __FUNCTION__); + return ret; + } + + XMLElement *jpegBufSz = deviceCfg->FirstChildElement("MaxJpegBufferSize"); + if (jpegBufSz == nullptr) { + ALOGI("%s: no max jpeg buffer size specified", __FUNCTION__); + } else { + ret.maxJpegBufSize = jpegBufSz->UnsignedAttribute("bytes", /*Default*/kDefaultJpegBufSize); + } + + XMLElement *numVideoBuf = deviceCfg->FirstChildElement("NumVideoBuffers"); + if (numVideoBuf == nullptr) { + ALOGI("%s: no num video buffers specified", __FUNCTION__); + } else { + ret.numVideoBuffers = + numVideoBuf->UnsignedAttribute("count", /*Default*/kDefaultNumVideoBuffer); + } + + XMLElement *numStillBuf = deviceCfg->FirstChildElement("NumStillBuffers"); + if (numStillBuf == nullptr) { + ALOGI("%s: no num still buffers specified", __FUNCTION__); + } else { + ret.numStillBuffers = + numStillBuf->UnsignedAttribute("count", /*Default*/kDefaultNumStillBuffer); + } + + XMLElement *fpsList = deviceCfg->FirstChildElement("FpsList"); + if (fpsList == nullptr) { + ALOGI("%s: no fps list specified", __FUNCTION__); + } else { + if (!updateFpsList(fpsList, ret.fpsLimits)) { + return ret; + } + } + + XMLElement *depth = deviceCfg->FirstChildElement("Depth16Supported"); + if (depth == nullptr) { + ret.depthEnabled = false; + ALOGI("%s: depth output is not enabled", __FUNCTION__); + } else { + ret.depthEnabled = depth->BoolAttribute("enabled", false); + } + + if(ret.depthEnabled) { + XMLElement *depthFpsList = deviceCfg->FirstChildElement("DepthFpsList"); + if (depthFpsList == nullptr) { + ALOGW("%s: no depth fps list specified", __FUNCTION__); + } else { + if(!updateFpsList(depthFpsList, ret.depthFpsLimits)) { + return ret; + } + } + } + + XMLElement *minStreamSize = deviceCfg->FirstChildElement("MinimumStreamSize"); + if (minStreamSize == nullptr) { + ALOGI("%s: no minimum stream size specified", __FUNCTION__); + } else { + ret.minStreamSize = { + minStreamSize->UnsignedAttribute("width", /*Default*/0), + minStreamSize->UnsignedAttribute("height", /*Default*/0)}; + } + + XMLElement *orientation = deviceCfg->FirstChildElement("Orientation"); + if (orientation == nullptr) { + ALOGI("%s: no sensor orientation specified", __FUNCTION__); + } else { + ret.orientation = orientation->IntAttribute("degree", /*Default*/kDefaultOrientation); + } + + ALOGI("%s: external camera cfg loaded: maxJpgBufSize %d," + " num video buffers %d, num still buffers %d, orientation %d", + __FUNCTION__, ret.maxJpegBufSize, + ret.numVideoBuffers, ret.numStillBuffers, ret.orientation); + for (const auto& limit : ret.fpsLimits) { + ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__, + limit.size.width, limit.size.height, limit.fpsUpperBound); + } + for (const auto& limit : ret.depthFpsLimits) { + ALOGI("%s: depthFpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height, + limit.fpsUpperBound); + } + ALOGI("%s: minStreamSize: %dx%d" , __FUNCTION__, + ret.minStreamSize.width, ret.minStreamSize.height); + return ret; +} + +bool ExternalCameraConfig::updateFpsList(tinyxml2::XMLElement* fpsList, + std::vector& fpsLimits) { + using namespace tinyxml2; + std::vector limits; + XMLElement* row = fpsList->FirstChildElement("Limit"); + while (row != nullptr) { + FpsLimitation prevLimit{{0, 0}, 1000.0}; + FpsLimitation limit; + limit.size = {row->UnsignedAttribute("width", /*Default*/ 0), + row->UnsignedAttribute("height", /*Default*/ 0)}; + limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/ 1000.0); + if (limit.size.width <= prevLimit.size.width || + limit.size.height <= prevLimit.size.height || + limit.fpsUpperBound >= prevLimit.fpsUpperBound) { + ALOGE( + "%s: FPS limit list must have increasing size and decreasing fps!" + " Prev %dx%d@%f, Current %dx%d@%f", + __FUNCTION__, prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound, + limit.size.width, limit.size.height, limit.fpsUpperBound); + return false; + } + limits.push_back(limit); + row = row->NextSiblingElement("Limit"); + } + fpsLimits = limits; + return true; +} + +ExternalCameraConfig::ExternalCameraConfig() : + cameraIdOffset(kDefaultCameraIdOffset), + maxJpegBufSize(kDefaultJpegBufSize), + numVideoBuffers(kDefaultNumVideoBuffer), + numStillBuffers(kDefaultNumStillBuffer), + depthEnabled(false), + orientation(kDefaultOrientation) { + fpsLimits.push_back({/*Size*/{ 640, 480}, /*FPS upper bound*/30.0}); + fpsLimits.push_back({/*Size*/{1280, 720}, /*FPS upper bound*/7.5}); + fpsLimits.push_back({/*Size*/{1920, 1080}, /*FPS upper bound*/5.0}); + minStreamSize = {0, 0}; +} + + +} // namespace common +} // namespace external +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/OWNERS b/camera/device/3.4/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/device/3.4/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/device/3.4/default/convert.cpp b/camera/device/3.4/default/convert.cpp new file mode 100644 index 0000000..f12230c --- /dev/null +++ b/camera/device/3.4/default/convert.cpp @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.device@3.4-convert-impl" +#include + +#include +#include "include/convert.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::camera::device::V3_2::BufferUsageFlags; + +void convertToHidl(const Camera3Stream* src, HalStream* dst) { + V3_3::implementation::convertToHidl(src, &dst->v3_3); + dst->physicalCameraId = src->physical_camera_id; +} + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst) { + dst->streams.resize(src.num_streams); + for (uint32_t i = 0; i < src.num_streams; i++) { + convertToHidl(static_cast(src.streams[i]), &dst->streams[i]); + } + return; +} + +void convertFromHidl(const Stream &src, Camera3Stream* dst) { + V3_2::implementation::convertFromHidl(src.v3_2, dst); + // Initialize physical_camera_id + dst->physical_camera_id = nullptr; + return; +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/include/convert.h b/camera/device/3.4/default/include/convert.h new file mode 100644 index 0000000..e8e3951 --- /dev/null +++ b/camera/device/3.4/default/include/convert.h @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_V3_4_DEFAULT_INCLUDE_CONVERT_H_ +#define HARDWARE_INTERFACES_CAMERA_DEVICE_V3_4_DEFAULT_INCLUDE_CONVERT_H_ + +#include +#include "hardware/camera3.h" +#include "../../3.3/default/include/convert.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::implementation::Camera3Stream; + +void convertToHidl(const Camera3Stream* src, HalStream* dst); + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst); + +void convertFromHidl(const Stream &src, Camera3Stream* dst); + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_V3_4_DEFAULT_INCLUDE_CONVERT_H_ diff --git a/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h b/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h new file mode 100644 index 0000000..280c4be --- /dev/null +++ b/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h @@ -0,0 +1,202 @@ +/* + * Copyright (C) 2017-2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H + +#include +#include +#include +#include <../../3.3/default/CameraDeviceSession.h> +#include <../../3.3/default/include/convert.h> +#include +#include +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "HandleImporter.h" +#include "hardware/camera3.h" +#include "hardware/camera_common.h" +#include "utils/Mutex.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_4::StreamConfiguration; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_4::ICameraDeviceSession; +using ::android::hardware::camera::device::V3_4::ICameraDeviceCallback; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +struct CameraDeviceSession : public V3_3::implementation::CameraDeviceSession { + + CameraDeviceSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&); + virtual ~CameraDeviceSession(); + + virtual sp getInterface() override { + return new TrampolineSessionInterface_3_4(this); + } + +protected: + // Methods from v3.3 and earlier will trampoline to inherited implementation + + Return configureStreams_3_4( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb); + + bool preProcessConfigurationLocked_3_4( + const StreamConfiguration& requestedConfiguration, bool useOverriddenFields, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/); + void postProcessConfigurationLocked_3_4(const StreamConfiguration& requestedConfiguration); + void postProcessConfigurationFailureLocked_3_4( + const StreamConfiguration& requestedConfiguration); + + void configureStreams_3_4_Impl( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb, + // Optional argument for ICameraDeviceSession@3.5 impl + uint32_t streamConfigCounter = 0, bool useOverriddenFields = true); + + Return processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb); + Status processOneCaptureRequest_3_4(const V3_4::CaptureRequest& request); + + std::map mPhysicalCameraIdMap; + + static V3_2::implementation::callbacks_process_capture_result_t sProcessCaptureResult_3_4; + static V3_2::implementation::callbacks_notify_t sNotify_3_4; + + class ResultBatcher_3_4 : public V3_3::implementation::CameraDeviceSession::ResultBatcher { + public: + ResultBatcher_3_4(const sp& callback); + void processCaptureResult_3_4(CaptureResult& result); + private: + void freeReleaseFences_3_4(hidl_vec&); + void processOneCaptureResult_3_4(CaptureResult& result); + void invokeProcessCaptureResultCallback_3_4(hidl_vec &results, + bool tryWriteFmq); + + sp mCallback_3_4; + } mResultBatcher_3_4; + + // Whether this camera device session is created with version 3.4 callback. + bool mHasCallback_3_4; + + // Physical camera ids for the logical multi-camera. Empty if this + // is not a logical multi-camera. + std::unordered_set mPhysicalCameraIds; + + Mutex mStreamConfigCounterLock; + uint32_t mStreamConfigCounter = 1; + +private: + + struct TrampolineSessionInterface_3_4 : public ICameraDeviceSession { + TrampolineSessionInterface_3_4(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H diff --git a/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h b/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h new file mode 100644 index 0000000..95ee20e --- /dev/null +++ b/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraModule.h" +#include "CameraMetadata.h" +#include "CameraDeviceSession.h" +#include <../../3.2/default/CameraDevice_3_2.h> + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct CameraDevice : public V3_2::implementation::CameraDevice { + + // Called by provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + // Delegates nearly all work to CameraDevice_3_2 + CameraDevice(sp module, + const std::string& cameraId, + const SortedVector>& cameraDeviceNames); + ~CameraDevice(); + +protected: + virtual sp createSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&) override; + +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h new file mode 100644 index 0000000..184c16e --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h @@ -0,0 +1,465 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICESESSION_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "HandleImporter.h" +#include "Exif.h" +#include "utils/KeyedVector.h" +#include "utils/Mutex.h" +#include "utils/Thread.h" +#include "android-base/unique_fd.h" +#include "ExternalCameraUtils.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_2::ErrorCode; +using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::MsgType; +using ::android::hardware::camera::device::V3_2::NotifyMsg; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::Stream; +using ::android::hardware::camera::device::V3_4::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfigurationMode; +using ::android::hardware::camera::device::V3_2::StreamRotation; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_2::DataspaceFlags; +using ::android::hardware::camera::device::V3_2::CameraBlob; +using ::android::hardware::camera::device::V3_2::CameraBlobId; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_4::ICameraDeviceSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::helper::ExifUtils; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::camera::external::common::SizeHasher; +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; +using ::android::base::unique_fd; + +struct ExternalCameraDeviceSession : public virtual RefBase, + public virtual OutputThreadInterface { + + ExternalCameraDeviceSession(const sp&, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd); + virtual ~ExternalCameraDeviceSession(); + // Call by CameraDevice to dump active device states + void dumpState(const native_handle_t*); + // Caller must use this method to check if CameraDeviceSession ctor failed + bool isInitFailed(); + bool isClosed(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineSessionInterface_3_4(this); + } + + static const int kMaxProcessedStream = 2; + static const int kMaxStallStream = 1; + static const uint32_t kMaxBytesPerPixel = 2; + + class OutputThread : public android::Thread { + public: + OutputThread(wp parent, CroppingType, + const common::V1_0::helper::CameraMetadata&); + virtual ~OutputThread(); + + Status allocateIntermediateBuffers( + const Size& v4lSize, const Size& thumbSize, + const hidl_vec& streams, + uint32_t blobBufferSize); + Status submitRequest(const std::shared_ptr&); + void flush(); + void dump(int fd); + virtual bool threadLoop() override; + + void setExifMakeModel(const std::string& make, const std::string& model); + + // The remaining request list is returned for offline processing + std::list> switchToOffline(); + + protected: + // Methods to request output buffer in parallel + // No-op for device@3.4. Implemented in device@3.5 + virtual int requestBufferStart(const std::vector&) { return 0; } + virtual int waitForBufferRequestDone( + /*out*/std::vector*) { return 0; } + + static const int kFlushWaitTimeoutSec = 3; // 3 sec + static const int kReqWaitTimeoutMs = 33; // 33ms + static const int kReqWaitTimesMax = 90; // 33ms * 90 ~= 3 sec + + void waitForNextRequest(std::shared_ptr* out); + void signalRequestDone(); + + int cropAndScaleLocked( + sp& in, const Size& outSize, + YCbCrLayout* out); + + int cropAndScaleThumbLocked( + sp& in, const Size& outSize, + YCbCrLayout* out); + + int createJpegLocked(HalStreamBuffer &halBuf, + const common::V1_0::helper::CameraMetadata& settings); + + void clearIntermediateBuffers(); + + const wp mParent; + const CroppingType mCroppingType; + const common::V1_0::helper::CameraMetadata mCameraCharacteristics; + + mutable std::mutex mRequestListLock; // Protect acccess to mRequestList, + // mProcessingRequest and mProcessingFrameNumer + std::condition_variable mRequestCond; // signaled when a new request is submitted + std::condition_variable mRequestDoneCond; // signaled when a request is done processing + std::list> mRequestList; + bool mProcessingRequest = false; + uint32_t mProcessingFrameNumer = 0; + + // V4L2 frameIn + // (MJPG decode)-> mYu12Frame + // (Scale)-> mScaledYu12Frames + // (Format convert) -> output gralloc frames + mutable std::mutex mBufferLock; // Protect access to intermediate buffers + sp mYu12Frame; + sp mYu12ThumbFrame; + std::unordered_map, SizeHasher> mIntermediateBuffers; + std::unordered_map, SizeHasher> mScaledYu12Frames; + YCbCrLayout mYu12FrameLayout; + YCbCrLayout mYu12ThumbFrameLayout; + std::vector mMuteTestPatternFrame; + uint32_t mTestPatternData[4] = {0, 0, 0, 0}; + bool mCameraMuted = false; + uint32_t mBlobBufferSize = 0; // 0 -> HAL derive buffer size, else: use given size + + std::string mExifMake; + std::string mExifModel; + }; + +protected: + + // Methods from ::android::hardware::camera::device::V3_2::ICameraDeviceSession follow + + Return constructDefaultRequestSettings( + RequestTemplate, + ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb); + + Return configureStreams( + const V3_2::StreamConfiguration&, + ICameraDeviceSession::configureStreams_cb); + + Return getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb); + + Return getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb); + + Return processCaptureRequest( + const hidl_vec&, + const hidl_vec&, + ICameraDeviceSession::processCaptureRequest_cb); + + Return flush(); + Return close(bool callerIsDtor = false); + + Return configureStreams_3_3( + const V3_2::StreamConfiguration&, + ICameraDeviceSession::configureStreams_3_3_cb); + + Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb); + + Return processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb); + +protected: + // Methods from OutputThreadInterface + virtual Status importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) override; + + virtual Status processCaptureResult(std::shared_ptr&) override; + + virtual Status processCaptureRequestError(const std::shared_ptr&, + /*out*/std::vector* msgs = nullptr, + /*out*/std::vector* results = nullptr) override; + + virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override; + + virtual void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) override; + // End of OutputThreadInterface methods + + Status constructDefaultRequestSettingsRaw(RequestTemplate type, + V3_2::CameraMetadata *outMetadata); + + bool initialize(); + // To init/close different version of output thread + virtual void initOutputThread(); + virtual void closeOutputThread(); + void closeOutputThreadImpl(); + + Status initStatus() const; + status_t initDefaultRequests(); + status_t fillCaptureResult(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp); + Status configureStreams(const V3_2::StreamConfiguration&, + V3_3::HalStreamConfiguration* out, + // Only filled by configureStreams_3_4, and only one blob stream supported + uint32_t blobBufferSize = 0); + // fps = 0.0 means default, which is + // slowest fps that is at least 30, or fastest fps if 30 is not supported + int configureV4l2StreamLocked(const SupportedV4L2Format& fmt, double fps = 0.0); + int v4l2StreamOffLocked(); + int setV4l2FpsLocked(double fps); + static Status isStreamCombinationSupported(const V3_2::StreamConfiguration& config, + const std::vector& supportedFormats, + const ExternalCameraConfig& devCfg); + + // TODO: change to unique_ptr for better tracking + sp dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs); // Called with mLock hold + void enqueueV4l2Frame(const sp&); + + // Check if input Stream is one of supported stream setting on this device + static bool isSupported(const Stream& stream, + const std::vector& supportedFormats, + const ExternalCameraConfig& cfg); + + // Validate and import request's output buffers and acquire fence + virtual Status importRequestLocked( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences); + + Status importRequestLockedImpl( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences, + // Optional argument for ICameraDeviceSession@3.5 impl + bool allowEmptyBuf = false); + + Status importBufferLocked(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf); + + static void cleanupInflightFences( + hidl_vec& allFences, size_t numFences); + void cleanupBuffersLocked(int id); + void updateBufferCaches(const hidl_vec& cachesToRemove); + + Status processOneCaptureRequest(const CaptureRequest& request); + + void notifyShutter(uint32_t frameNumber, nsecs_t shutterTs); + void invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq); + + Size getMaxJpegResolution() const; + Size getMaxThumbResolution() const; + + int waitForV4L2BufferReturnLocked(std::unique_lock& lk); + + // Protect (most of) HIDL interface methods from synchronized-entering + mutable Mutex mInterfaceLock; + + mutable Mutex mLock; // Protect all private members except otherwise noted + const sp mCallback; + const ExternalCameraConfig& mCfg; + const common::V1_0::helper::CameraMetadata mCameraCharacteristics; + const std::vector mSupportedFormats; + const CroppingType mCroppingType; + const std::string mCameraId; + + // Not protected by mLock, this is almost a const. + // Setup in constructor, reset in close() after OutputThread is joined + unique_fd mV4l2Fd; + + // device is closed either + // - closed by user + // - init failed + // - camera disconnected + bool mClosed = false; + bool mInitialized = false; + bool mInitFail = false; + bool mFirstRequest = false; + common::V1_0::helper::CameraMetadata mLatestReqSetting; + + bool mV4l2Streaming = false; + SupportedV4L2Format mV4l2StreamingFmt; + double mV4l2StreamingFps = 0.0; + size_t mV4L2BufferCount = 0; + + static const int kBufferWaitTimeoutSec = 3; // TODO: handle long exposure (or not allowing) + std::mutex mV4l2BufferLock; // protect the buffer count and condition below + std::condition_variable mV4L2BufferReturned; + size_t mNumDequeuedV4l2Buffers = 0; + uint32_t mMaxV4L2BufferSize = 0; + + // Not protected by mLock (but might be used when mLock is locked) + sp mOutputThread; + + // Stream ID -> Camera3Stream cache + std::unordered_map mStreamMap; + + std::mutex mInflightFramesLock; // protect mInflightFrames + std::unordered_set mInflightFrames; + + // Stream ID -> circulating buffers map + std::map mCirculatingBuffers; + // Protect mCirculatingBuffers, must not lock mLock after acquiring this lock + mutable Mutex mCbsLock; + + std::mutex mAfTriggerLock; // protect mAfTrigger + bool mAfTrigger = false; + + uint32_t mBlobBufferSize = 0; + + static HandleImporter sHandleImporter; + + /* Beginning of members not changed after initialize() */ + using RequestMetadataQueue = MessageQueue; + std::unique_ptr mRequestMetadataQueue; + using ResultMetadataQueue = MessageQueue; + std::shared_ptr mResultMetadataQueue; + + // Protect against invokeProcessCaptureResultCallback() + Mutex mProcessCaptureResultLock; + + std::unordered_map mDefaultRequests; + + const Size mMaxThumbResolution; + const Size mMaxJpegResolution; + + std::string mExifMake; + std::string mExifModel; + /* End of members not changed after initialize() */ + +private: + + struct TrampolineSessionInterface_3_4 : public ICameraDeviceSession { + TrampolineSessionInterface_3_4(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICESESSION_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h new file mode 100644 index 0000000..88726f4 --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h @@ -0,0 +1,247 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraMetadata.h" + +#include +#include +#include +#include "ExternalCameraDeviceSession.h" + +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_2::ICameraDevice; +using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; +using ::android::hardware::camera::common::V1_0::CameraResourceCost; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct ExternalCameraDevice : public virtual RefBase { + + // Called by external camera provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + ExternalCameraDevice(const std::string& cameraId, const ExternalCameraConfig& cfg); + virtual ~ExternalCameraDevice(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineDeviceInterface_3_4(this); + } + + // Caller must use this method to check if CameraDevice ctor failed + bool isInitFailed(); + bool isInitFailedLocked(); + + /* Methods from ::android::hardware::camera::device::V3_2::ICameraDevice follow. */ + // The following method can be called without opening the actual camera device + Return getResourceCost(ICameraDevice::getResourceCost_cb _hidl_cb); + + Return getCameraCharacteristics( + ICameraDevice::getCameraCharacteristics_cb _hidl_cb); + + Return setTorchMode(TorchMode); + + // Open the device HAL and also return a default capture session + Return open(const sp&, ICameraDevice::open_cb); + + // Forward the dump call to the opened session, or do nothing + Return dumpState(const ::android::hardware::hidl_handle&); + /* End of Methods from ::android::hardware::camera::device::V3_2::ICameraDevice */ + +protected: + // Overridden by child implementations for returning different versions of + // ExternalCameraDeviceSession + virtual sp createSession( + const sp&, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd); + + // Init supported w/h/format/fps in mSupportedFormats. Caller still owns fd + void initSupportedFormatsLocked(int fd); + + // Calls into virtual member function. Do not use it in constructor + status_t initCameraCharacteristics(); + // Init available capabilities keys + virtual status_t initAvailableCapabilities( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + // Init non-device dependent keys + virtual status_t initDefaultCharsKeys( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + // Init camera control chars keys. Caller still owns fd + status_t initCameraControlsCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + // Init camera output configuration related keys. Caller still owns fd + status_t initOutputCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + + // Helper function for initOutputCharskeys + template + status_t initOutputCharskeysByFormat( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*, + uint32_t fourcc, const std::array& formats, + int scaler_stream_config_tag, + int stream_configuration, int min_frame_duration, int stall_duration); + + bool calculateMinFps(::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + + static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format); + + static void updateFpsBounds(int fd, CroppingType cropType, + const std::vector& fpsLimits, + SupportedV4L2Format format, + std::vector& outFmts); + + // Get candidate supported formats list of input cropping type. + static std::vector getCandidateSupportedFormatsLocked( + int fd, CroppingType cropType, + const std::vector& fpsLimits, + const std::vector& depthFpsLimits, + const Size& minStreamSize, + bool depthEnabled); + // Trim supported format list by the cropping type. Also sort output formats by width/height + static void trimSupportedFormats(CroppingType cropType, + /*inout*/std::vector* pFmts); + + Mutex mLock; + bool mInitialized = false; + bool mInitFailed = false; + std::string mCameraId; + std::string mDevicePath; + const ExternalCameraConfig& mCfg; + std::vector mSupportedFormats; + CroppingType mCroppingType; + + wp mSession = nullptr; + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mCameraCharacteristics; + + const std::vector AVAILABLE_CHARACTERISTICS_KEYS_3_4 = { + ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + ANDROID_CONTROL_AE_AVAILABLE_MODES, + ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, + ANDROID_CONTROL_AE_COMPENSATION_RANGE, + ANDROID_CONTROL_AE_COMPENSATION_STEP, + ANDROID_CONTROL_AE_LOCK_AVAILABLE, + ANDROID_CONTROL_AF_AVAILABLE_MODES, + ANDROID_CONTROL_AVAILABLE_EFFECTS, + ANDROID_CONTROL_AVAILABLE_MODES, + ANDROID_CONTROL_AVAILABLE_SCENE_MODES, + ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + ANDROID_CONTROL_AWB_AVAILABLE_MODES, + ANDROID_CONTROL_AWB_LOCK_AVAILABLE, + ANDROID_CONTROL_MAX_REGIONS, + ANDROID_FLASH_INFO_AVAILABLE, + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, + ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, + ANDROID_LENS_FACING, + ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, + ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, + ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, + ANDROID_REQUEST_PARTIAL_RESULT_COUNT, + ANDROID_REQUEST_PIPELINE_MAX_DEPTH, + ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + ANDROID_SCALER_CROPPING_TYPE, + ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, + ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, + ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, + ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, + ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, + ANDROID_SENSOR_ORIENTATION, + ANDROID_SHADING_AVAILABLE_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, + ANDROID_SYNC_MAX_LATENCY}; + +private: + + struct TrampolineDeviceInterface_3_4 : public ICameraDevice { + TrampolineDeviceInterface_3_4(sp parent) : + mParent(parent) {} + + virtual Return getResourceCost(V3_2::ICameraDevice::getResourceCost_cb _hidl_cb) + override { + return mParent->getResourceCost(_hidl_cb); + } + + virtual Return getCameraCharacteristics( + V3_2::ICameraDevice::getCameraCharacteristics_cb _hidl_cb) override { + return mParent->getCameraCharacteristics(_hidl_cb); + } + + virtual Return setTorchMode(TorchMode mode) override { + return mParent->setTorchMode(mode); + } + + virtual Return open(const sp& callback, + V3_2::ICameraDevice::open_cb _hidl_cb) override { + return mParent->open(callback, _hidl_cb); + } + + virtual Return dumpState(const hidl_handle& fd) override { + return mParent->dumpState(fd); + } + + private: + sp mParent; + }; + +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h new file mode 100644 index 0000000..b354406 --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h @@ -0,0 +1,303 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "tinyxml2.h" // XML parsing +#include "utils/LightRefBase.h" +#include "utils/Timers.h" +#include +#include + + +using ::android::hardware::graphics::mapper::V2_0::IMapper; +using ::android::hardware::graphics::mapper::V2_0::YCbCrLayout; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::device::V3_2::ErrorCode; + +namespace android { +namespace hardware { +namespace camera { + +namespace external { +namespace common { + +struct Size { + uint32_t width; + uint32_t height; + + bool operator==(const Size& other) const { + return (width == other.width && height == other.height); + } +}; + +struct SizeHasher { + size_t operator()(const Size& sz) const { + size_t result = 1; + result = 31 * result + sz.width; + result = 31 * result + sz.height; + return result; + } +}; + +struct ExternalCameraConfig { + static const char* kDefaultCfgPath; + static ExternalCameraConfig loadFromCfg(const char* cfgPath = kDefaultCfgPath); + + // CameraId base offset for numerical representation + uint32_t cameraIdOffset; + + // List of internal V4L2 video nodes external camera HAL must ignore. + std::unordered_set mInternalDevices; + + // Maximal size of a JPEG buffer, in bytes + uint32_t maxJpegBufSize; + + // Maximum Size that can sustain 30fps streaming + Size maxVideoSize; + + // Size of v4l2 buffer queue when streaming <= kMaxVideoSize + uint32_t numVideoBuffers; + + // Size of v4l2 buffer queue when streaming > kMaxVideoSize + uint32_t numStillBuffers; + + // Indication that the device connected supports depth output + bool depthEnabled; + + struct FpsLimitation { + Size size; + double fpsUpperBound; + }; + std::vector fpsLimits; + std::vector depthFpsLimits; + + // Minimum output stream size + Size minStreamSize; + + // The value of android.sensor.orientation + int32_t orientation; + +private: + ExternalCameraConfig(); + static bool updateFpsList(tinyxml2::XMLElement* fpsList, std::vector& fpsLimits); +}; + +} // common +} // external + +namespace device { +namespace V3_4 { +namespace implementation { + +struct SupportedV4L2Format { + uint32_t width; + uint32_t height; + uint32_t fourcc; + // All supported frame rate for this w/h/fourcc combination + struct FrameRate { + uint32_t durationNumerator; // frame duration numerator. Ex: 1 + uint32_t durationDenominator; // frame duration denominator. Ex: 30 + double getDouble() const; // FrameRate in double. Ex: 30.0 + }; + std::vector frameRates; +}; + +// A Base class with basic information about a frame +struct Frame : public VirtualLightRefBase { +public: + Frame(uint32_t width, uint32_t height, uint32_t fourcc); + const uint32_t mWidth; + const uint32_t mHeight; + const uint32_t mFourcc; + + // getData might involve map/allocation + virtual int getData(uint8_t** outData, size_t* dataSize) = 0; +}; + +// A class provide access to a dequeued V4L2 frame buffer (mostly in MJPG format) +// Also contains necessary information to enqueue the buffer back to V4L2 buffer queue +class V4L2Frame : public Frame { +public: + V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, + uint32_t dataSize, uint64_t offset); + ~V4L2Frame() override; + + virtual int getData(uint8_t** outData, size_t* dataSize) override; + + const int mBufferIndex; // for later enqueue + int map(uint8_t** data, size_t* dataSize); + int unmap(); +private: + std::mutex mLock; + const int mFd; // used for mmap but doesn't claim ownership + const size_t mDataSize; + const uint64_t mOffset; // used for mmap + uint8_t* mData = nullptr; + bool mMapped = false; +}; + +// A RAII class representing a CPU allocated YUV frame used as intermeidate buffers +// when generating output images. +class AllocatedFrame : public Frame { +public: + AllocatedFrame(uint32_t w, uint32_t h); // only support V4L2_PIX_FMT_YUV420 for now + ~AllocatedFrame() override; + + virtual int getData(uint8_t** outData, size_t* dataSize) override; + + int allocate(YCbCrLayout* out = nullptr); + int getLayout(YCbCrLayout* out); + int getCroppedLayout(const IMapper::Rect&, YCbCrLayout* out); // return non-zero for bad input +private: + std::mutex mLock; + std::vector mData; +}; + +enum CroppingType { + HORIZONTAL = 0, + VERTICAL = 1 +}; + +// Aspect ratio is defined as width/height here and ExternalCameraDevice +// will guarantee all supported sizes has width >= height (so aspect ratio >= 1.0) +#define ASPECT_RATIO(sz) (static_cast((sz).width) / (sz).height) +const float kMaxAspectRatio = std::numeric_limits::max(); +const float kMinAspectRatio = 1.f; + +bool isAspectRatioClose(float ar1, float ar2); + +struct HalStreamBuffer { + int32_t streamId; + uint64_t bufferId; + uint32_t width; + uint32_t height; + ::android::hardware::graphics::common::V1_0::PixelFormat format; + ::android::hardware::camera::device::V3_2::BufferUsageFlags usage; + buffer_handle_t* bufPtr; + int acquireFence; + bool fenceTimeout; +}; + +struct HalRequest { + uint32_t frameNumber; + common::V1_0::helper::CameraMetadata setting; + sp frameIn; + nsecs_t shutterTs; + std::vector buffers; +}; + +static const uint64_t BUFFER_ID_NO_BUFFER = 0; + +// buffers currently circulating between HAL and camera service +// key: bufferId sent via HIDL interface +// value: imported buffer_handle_t +// Buffer will be imported during processCaptureRequest (or requestStreamBuffer +// in the case of HAL buffer manager is enabled) and will be freed +// when the stream is deleted or camera device session is closed +typedef std::unordered_map CirculatingBuffers; + +::android::hardware::camera::common::V1_0::Status importBufferImpl( + /*inout*/std::map& circulatingBuffers, + /*inout*/HandleImporter& handleImporter, + int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf); + +static const uint32_t FLEX_YUV_GENERIC = static_cast('F') | + static_cast('L') << 8 | static_cast('E') << 16 | + static_cast('X') << 24; + +// returns FLEX_YUV_GENERIC for formats other than YV12/YU12/NV12/NV21 +uint32_t getFourCcFromLayout(const YCbCrLayout&); + +using ::android::hardware::camera::external::common::Size; +int getCropRect(CroppingType ct, const Size& inSize, + const Size& outSize, IMapper::Rect* out); + +int formatConvert(const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format); + +int encodeJpegYU12(const Size &inSz, + const YCbCrLayout& inLayout, int jpegQuality, + const void *app1Buffer, size_t app1Size, + void *out, size_t maxOutSize, + size_t &actualCodeSize); + +Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata&); + +void freeReleaseFences(hidl_vec&); + +status_t fillCaptureResultCommon(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp, + camera_metadata_ro_entry& activeArraySize); + +// Interface for OutputThread calling back to parent +struct OutputThreadInterface : public virtual RefBase { + virtual ::android::hardware::camera::common::V1_0::Status importBuffer( + int32_t streamId, uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, bool allowEmptyBuf) = 0; + + virtual void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) = 0; + + // Callbacks are fired within the method if msgs/results are nullptr. + // Otherwise the callbacks will be returned and caller is responsible to + // fire the callback later + virtual ::android::hardware::camera::common::V1_0::Status processCaptureRequestError( + const std::shared_ptr&, + /*out*/std::vector* msgs = nullptr, + /*out*/std::vector* results = nullptr) = 0; + + virtual ::android::hardware::camera::common::V1_0::Status processCaptureResult( + std::shared_ptr&) = 0; + + virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const = 0; +}; + +} // namespace implementation +} // namespace V3_4 + +namespace V3_6 { +namespace implementation { + +// A CPU copy of a mapped V4L2Frame. Will map the input V4L2 frame. +class AllocatedV4L2Frame : public V3_4::implementation::Frame { +public: + AllocatedV4L2Frame(sp frameIn); + ~AllocatedV4L2Frame() override; + virtual int getData(uint8_t** outData, size_t* dataSize) override; +private: + std::vector mData; +}; + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H diff --git a/camera/device/3.4/types.hal b/camera/device/3.4/types.hal new file mode 100644 index 0000000..85b3f7d --- /dev/null +++ b/camera/device/3.4/types.hal @@ -0,0 +1,355 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.4; + +import @3.2::RequestTemplate; +import @3.2::StreamConfigurationMode; +import @3.2::Stream; +import @3.3::HalStream; +import @3.2::CameraMetadata; +import @3.2::CaptureRequest; +import @3.2::CaptureResult; + +/** + * Stream: + * + * A descriptor for a single camera input or output stream. A stream is defined + * by the framework by its buffer resolution and format, and additionally by the + * HAL with the gralloc usage flags and the maximum in-flight buffer count. + * + * This version extends the @3.2 Stream with the physicalCameraId and bufferSize field. + */ +struct Stream { + /** + * The definition of Stream from the prior version + */ + @3.2::Stream v3_2; + + /** + * The physical camera id this stream belongs to. + * + * If the camera device is not a logical multi camera, or if the camera is a logical + * multi camera but the stream is not a physical output stream, this field will point to a + * 0-length string. + * + * A logical multi camera is a camera device backed by multiple physical cameras that + * are also exposed to the application. And for a logical multi camera, a physical output + * stream is an output stream specifically requested on an underlying physical camera. + * + * A logical camera is a camera device backed by multiple physical camera + * devices. And a physical stream is a stream specifically requested on a + * underlying physical camera device. + * + * For an input stream, this field is guaranteed to be a 0-length string. + * + * When not empty, this field is the field of one of the full-qualified device + * instance names returned by getCameraIdList(). + */ + string physicalCameraId; + + /** + * The size of a buffer from this Stream, in bytes. + * + * For non PixelFormat::BLOB formats, this entry must be 0 and HAL should use + * android.hardware.graphics.mapper lockYCbCr API to get buffer layout. + * + * For BLOB format with dataSpace Dataspace::DEPTH, this must be zero and and HAL must + * determine the buffer size based on ANDROID_DEPTH_MAX_DEPTH_SAMPLES. + * + * For BLOB format with dataSpace Dataspace::JFIF, this must be non-zero and represent the + * maximal size HAL can lock using android.hardware.graphics.mapper lock API. + * + */ + uint32_t bufferSize; +}; + +/** + * StreamConfiguration: + * + * Identical to @3.2::StreamConfiguration, except that it contains session + * parameters, and the streams vector contains @3.4::Stream. + */ +struct StreamConfiguration { + /** + * An array of camera stream pointers, defining the input/output + * configuration for the camera HAL device. + */ + vec streams; + + /** + * The definition of operation mode from prior version. + * + */ + StreamConfigurationMode operationMode; + + /** + * Session wide camera parameters. + * + * The session parameters contain the initial values of any request keys that were + * made available via ANDROID_REQUEST_AVAILABLE_SESSION_KEYS. The Hal implementation + * can advertise any settings that can potentially introduce unexpected delays when + * their value changes during active process requests. Typical examples are + * parameters that trigger time-consuming HW re-configurations or internal camera + * pipeline updates. The field is optional, clients can choose to ignore it and avoid + * including any initial settings. If parameters are present, then hal must examine + * their values and configure the internal camera pipeline accordingly. + */ + CameraMetadata sessionParams; +}; + +/** + * HalStream: + * + * The camera HAL's response to each requested stream configuration. + * + * This version extends the @3.3 HalStream with the physicalCameraId + * field + */ +struct HalStream { + /** + * The definition of HalStream from the prior version. + */ + @3.3::HalStream v3_3; + + /** + * The physical camera id the current Hal stream belongs to. + * + * If current camera device isn't a logical camera, or the Hal stream isn't + * from a physical camera of the logical camera, this must be an empty + * string. + * + * A logical camera is a camera device backed by multiple physical camera + * devices. + * + * When not empty, this field is the field of one of the full-qualified device + * instance names returned by getCameraIdList(). + */ + string physicalCameraId; +}; + +/** + * HalStreamConfiguration: + * + * Identical to @3.3::HalStreamConfiguration, except that it contains @3.4::HalStream entries. + * + */ +struct HalStreamConfiguration { + vec streams; +}; + +/** + * PhysicalCameraSetting: + * + * Individual camera settings for logical camera backed by multiple physical devices. + * Clients are allowed to pass separate settings for each physical device that has + * corresponding configured HalStream and the respective stream id is present in the + * output buffers of the capture request. + */ +struct PhysicalCameraSetting { + /** + * If non-zero, read settings from request queue instead + * (see ICameraDeviceSession.getCaptureRequestMetadataQueue). + * If zero, read settings from .settings field. + * + * The v3_2 settings metadata is read first from the FMQ, followed by + * the physical cameras' settings metadata starting from index 0. + */ + uint64_t fmqSettingsSize; + + /** + * Contains the physical device camera id. Any settings passed by client here + * should be applied for this physical device. In case the physical id is invalid or + * it is not present among the last configured streams, Hal should fail the process + * request and return Status::ILLEGAL_ARGUMENT. + */ + string physicalCameraId; + + /** + * If fmqSettingsSize is zero, the settings buffer contains the capture and + * processing parameters for the physical device with id 'physicalCameraId'. + * As a special case, an empty settings buffer indicates that the + * settings are identical to the most-recently submitted capture request. + * An empty buffer cannot be used as the first submitted request after + * a configureStreams() call. + * + * This field must be used if fmqSettingsSize is zero. It must not be used + * if fmqSettingsSize is non-zero. + */ + CameraMetadata settings; +}; + +/** + * CaptureRequest: + * + * A single request for image capture/buffer reprocessing, sent to the Camera + * HAL device by the framework in processCaptureRequest(). + * + * The request contains the settings to be used for this capture, and the set of + * output buffers to write the resulting image data in. It may optionally + * contain an input buffer, in which case the request is for reprocessing that + * input buffer instead of capturing a new image with the camera sensor. The + * capture is identified by the frameNumber. + * + * In response, the camera HAL device must send a CaptureResult + * structure asynchronously to the framework, using the processCaptureResult() + * callback. + * + * Identical to @3.2::CaptureRequest, except that it contains + * @3.4::physCamSettings vector. + * + * With 3.4 CaptureRequest, there can be multiple sources of metadata settings. + * The @3.2::CaptureRequest v3_2 and each of the PhysicalCameraSetting in + * physicalCameraSettings can contain settings, and each buffer may have + * metadata settings from a different source. + * + * For both @3.2::CaptureRequest and PhysicalCameraSetting, the settings can be + * passed from framework to HAL using either hwbinder or FMQ; both of the + * structs have the fields fmqSettingsSize and settings to pass the metadata. + * When metadata settings are passed using hwbinder, fmqSettingsSize == 0 and + * settings field contains the metadata for the HAL to read. When metadata + * settings comes from FMQ, fmqSettingsSize > 0 and HAL reads metadata from FMQ. + * For the purposes of selecting which settings to use, it does not matter + * whether it comes from hwbinder or FMQ. When the below specifications say that + * v3_2 has settings or a PhysicalCameraSetting has settings, it could refer to + * either hwbinder or FMQ, whichever is specified in the struct. + * + * Below is the logic that the HAL must follow for applying the metadata + * settings when it receives a CaptureRequest request in + * processCaptureRequest_3_4. Note that HAL must be capable of storing both the + * request.v3_2 settings and the PhysicalCameraSetting settings for each + * physical device. + * - Case 1 - request.v3_2 has settings, request.physicalCameraSettings vector + * is empty: + * - Store the request.v3_2 settings, overwriting the previously stored + * request.v3_2 settings and clearing all previously stored physical device + * settings. + * - Apply the settings from the request.v3_2 to all buffers. + * - Case 2 - request.v3_2 has settings, request.physicalCameraSettings vector + * is not empty: + * - Store the request.v3_2 settings, overwriting the previously stored + * request.v3_2 settings. + * - Each PhysicalCameraSetting in request.physicalCameraSettings must have + * settings; if not, return error. + * - For each PhysicalCameraSetting in request.physicalCameraSettings, store + * the settings, overwriting the previously stored settings for this + * physical camera; apply these settings to the buffers belonging to the + * stream for this device. + * - If there are any stored physical camera settings which do not correspond + * to one of the PhysicalCameraSetting in this request, clear them. + * - Apply the request.v3_2 settings to all buffers belonging to streams not + * covered by one of the PhysicalCameraSetting in this request. + * - Case 3 - request.v3_2 does not have settings, + * request.physicalCameraSettings vector is empty: + * - Clear all previously stored physical device settings. + * - Apply the stored request.v3_2 settings to all buffers. If there is no + * stored request.v3_2 settings, return error. + * - Case 4 - request.v3_2 does not have settings, + * request.physicalCameraSettings vector is not empty: + * - If request.physicalCameraSettings does not have the same set of physical + * cameras as the stored physical camera settings, return error. + * - Each PhysicalCameraSetting in request.physicalCameraSettings must not + * have settings; if any do have settings, return error. + * - For each PhysicalCameraSetting in request.physicalCameraSettings, apply + * the previously stored settings for this physical camera to the buffers + * belonging to the stream for this device. + * - Apply the stored request.v3_2 settings to all buffers belonging to + * streams not covered by one of the PhysicalCameraSetting in this request. + * If there is no stored request.v3_2 settings, return error. + * + * For the first request received by the HAL, only Case 1 and Case 2 are + * allowed. + */ +struct CaptureRequest { + /** + * The definition of CaptureRequest from prior version. + */ + @3.2::CaptureRequest v3_2; + + /** + * A vector containing individual camera settings for logical camera backed by multiple physical + * devices. In case the vector is empty, Hal should use the settings field in 'v3_2'. The + * individual settings should only be honored for physical devices that have respective Hal + * stream. Physical devices that have a corresponding Hal stream but don't have attached + * settings here should use the settings field in 'v3_2'. + * If any of the physical settings in the array are applied on one or more devices, then the + * visual effect on any Hal streams attached to the logical camera is undefined. + */ + vec physicalCameraSettings; +}; + +/** + * PhysicalCameraMetadata: + * + * Individual camera metadata for a physical camera as part of a logical + * multi-camera. Camera HAL should return one such metadata for each physical + * camera being requested on. + */ +struct PhysicalCameraMetadata { + /** + * If non-zero, read metadata from result metadata queue instead + * (see ICameraDeviceSession.getCaptureResultMetadataQueue). + * If zero, read metadata from .metadata field. + * + * The v3_2 CaptureResult metadata is read first from the FMQ, followed by + * the physical cameras' metadata starting from index 0. + */ + uint64_t fmqMetadataSize; + + /** + * Contains the physical device camera id. As long as the corresponding + * processCaptureRequest requests on a particular physical camera stream, + * the metadata for that physical camera should be generated for the capture + * result. */ + string physicalCameraId; + + /** + * If fmqMetadataSize is zero, the metadata buffer contains the metadata + * for the physical device with physicalCameraId. + */ + CameraMetadata metadata; +}; + +/** + * CaptureResult: + * + * Identical to @3.2::CaptureResult, except that it contains a list of + * physical camera metadata. + * + * Physical camera metadata needs to be generated if and only if a + * request is pending on a stream from that physical camera. For example, + * if the processCaptureRequest call doesn't request on physical camera + * streams, the physicalCameraMetadata field of the CaptureResult being returned + * should be an 0-size vector. If the processCaptureRequest call requests on + * streams from one of the physical camera, the physicalCameraMetadata field + * should contain one metadata describing the capture from that physical camera. + * + * For a CaptureResult that contains physical camera metadata, its + * partialResult field must be android.request.partialResultCount. In other + * words, the physicalCameraMetadata must only be contained in a final capture + * result. + */ +struct CaptureResult { + /** + * The definition of CaptureResult from the prior version. + */ + @3.2::CaptureResult v3_2; + + /** + * The physical metadata for logical multi-camera. + */ + vec physicalCameraMetadata; +}; diff --git a/camera/device/3.5/ICameraDevice.hal b/camera/device/3.5/ICameraDevice.hal new file mode 100644 index 0000000..492105c --- /dev/null +++ b/camera/device/3.5/ICameraDevice.hal @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.5; + +import android.hardware.camera.common@1.0::Status; +import @3.2::CameraMetadata; +import @3.2::ICameraDevice; +import @3.4::StreamConfiguration; + +/** + * Camera device interface + * + * Supports the android.hardware.Camera API, and the android.hardware.camera2 + * API at LIMITED or better hardware level. + * + */ +interface ICameraDevice extends @3.2::ICameraDevice { + + /** + * getPhysicalCameraCharacteristics: + * + * Return the static camera information for a physical camera ID backing + * this logical camera device. This information may not change between consecutive calls. + * + * Note that HAL must support this function for physical camera IDs that are + * not exposed via ICameraProvider::getCameraIdList(). Calling + * getCameraDeviceInterface_V3_x() on these camera IDs must return ILLEGAL_ARGUMENT. + * + * The characteristics of all cameras returned by + * ICameraProvider::getCameraIdList() must be queried via + * getCameraCharacteristics(). Calling getPhysicalCameraCharacteristics() on + * those cameras must return ILLEGAL_ARGUMENT. + * + * @param physicalCameraId The physical camera id parsed from the logical + * camera's ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS static metadata + * key. The framework assumes that this ID is just the part of fully + * qualified camera device name "device@.//". And + * the physical camera must be of the same version and type as the parent + * logical camera device. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful query of the physical camera device characteristics + * INTERNAL_ERROR: + * The camera device cannot be opened due to an internal + * error. + * CAMERA_DISCONNECTED: + * An external camera device has been disconnected, and is no longer + * available. This camera device interface is now stale, and a new + * instance must be acquired if the device is reconnected. All + * subsequent calls on this interface must return + * CAMERA_DISCONNECTED. + * ILLEGAL_ARGUMENT: + * If the physicalCameraId is not a valid physical camera Id outside + * of ICameraProvider::getCameraIdList(). + * + * @return cameraCharacteristics + * The static metadata for this logical camera device's physical device, or an empty + * metadata structure if status is not OK. + * + */ + getPhysicalCameraCharacteristics(string physicalCameraId) + generates (Status status, CameraMetadata cameraCharacteristics); + + + /** + * isStreamCombinationSupported: + * + * Check for device support of specific camera stream combination. + * + * The streamList must contain at least one output-capable stream, and may + * not contain more than one input-capable stream. + * In contrast to regular stream configuration the framework does not create + * or initialize any actual streams. This means that Hal must not use or + * consider the stream "id" value. + * + * ------------------------------------------------------------------------ + * + * Preconditions: + * + * The framework can call this method at any time before, during and + * after active session configuration. This means that calls must not + * impact the performance of pending camera requests in any way. In + * particular there must not be any glitches or delays during normal + * camera streaming. + * + * Performance requirements: + * This call is expected to be significantly faster than stream + * configuration. In general HW and SW camera settings must not be + * changed and there must not be a user-visible impact on camera performance. + * + * @return Status Status code for the operation, one of: + * OK: + * On successful stream combination query. + * METHOD_NOT_SUPPORTED: + * The camera device does not support stream combination query. + * INTERNAL_ERROR: + * The stream combination query cannot complete due to internal + * error. + * @return true in case the stream combination is supported, false otherwise. + * + */ + isStreamCombinationSupported(@3.4::StreamConfiguration streams) + generates (Status status, bool queryStatus); +}; diff --git a/camera/device/3.5/ICameraDeviceCallback.hal b/camera/device/3.5/ICameraDeviceCallback.hal new file mode 100644 index 0000000..aa4ad22 --- /dev/null +++ b/camera/device/3.5/ICameraDeviceCallback.hal @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.5; + +import @3.2::StreamBuffer; +import @3.4::ICameraDeviceCallback; + +/** + * Callback methods for the HAL to call into the framework. + */ +interface ICameraDeviceCallback extends @3.4::ICameraDeviceCallback { + + /** + * requestStreamBuffers: + * + * Synchronous callback for HAL to ask for output buffers from camera service. + * + * This call may be serialized in camera service so it is strongly + * recommended to only call this method from one thread. + * + * When camera device advertises + * (CameraMetadataEnumAndroidInfoSupportedBufferManagementVersion == + * ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5), HAL + * can use this method to request buffers from camera service. + * + * @return status Status code for the operation, one of: + * OK: all requested buffers are returned + * FAILED_PARTIAL: some streams failed while some succeeds. Check + * individual StreamBufferRet for details. + * FAILED_CONFIGURING: the request failed because camera servicve is + * performing configureStreams and no buffers are returned. + * FAILED_UNKNOWN: the request failed for unknown reason and no buffers + * are returned. + * + * Performance requirements: + * This is a blocking call that takes more time with more buffers requested. + * HAL must not request large amount of buffers on a latency critical code + * path. It is highly recommended to use a dedicated thread to perform + * all requestStreamBuffers calls, and adjust the thread priority and/or + * timing of making the call in order for buffers to arrive before HAL is + * ready to fill the buffer. + */ + requestStreamBuffers(vec bufReqs) + generates (BufferRequestStatus st, vec buffers); + + /** + * returnStreamBuffers: + * + * Synchronous callback for HAL to return output buffers to camera service. + * + * If this method is called during a configureStreams call, it must be blocked + * until camera service finishes the ongoing configureStreams call. + */ + returnStreamBuffers(vec buffers); + +}; diff --git a/camera/device/3.5/ICameraDeviceSession.hal b/camera/device/3.5/ICameraDeviceSession.hal new file mode 100644 index 0000000..c868e1e --- /dev/null +++ b/camera/device/3.5/ICameraDeviceSession.hal @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.5; + +import android.hardware.camera.common@1.0::Status; +import @3.2::CameraMetadata; +import @3.4::ICameraDeviceSession; +import @3.4::HalStreamConfiguration; + +/** + * Camera device active session interface. + * + * Obtained via ICameraDevice::open(), this interface contains the methods to + * configure and request captures from an active camera device. + */ +interface ICameraDeviceSession extends @3.4::ICameraDeviceSession { + + /** + * configureStreams_3_5: + * + * Identical to @3.4::ICameraDeviceSession.configureStreams, except that: + * + * - a streamConfigCounter counter is provided to check for race condition + * between configureStreams_3_5 and signalStreamFlush call. + * - In case the HAL overrides dataspace or format for + * IMPLEMENTATION_DEFINED pixel format, camera framework must use original + * dataspace and format in subsequent configureStreams_3_5 calls for the same + * stream. HAL is allowed to change the overriding behavior of format or + * dataspace for reconfiguration of the same stream depending on the stream + * combination. + * + * @return status Status code for the operation, one of: + * OK: + * On successful stream configuration. + * INTERNAL_ERROR: + * If there has been a fatal error and the device is no longer + * operational. Only close() can be called successfully by the + * framework after this error is returned. + * ILLEGAL_ARGUMENT: + * If the requested stream configuration is invalid. Some examples + * of invalid stream configurations include: + * - Including more than 1 INPUT stream + * - Not including any OUTPUT streams + * - Including streams with unsupported formats, or an unsupported + * size for that format. + * - Including too many output streams of a certain format. + * - Unsupported rotation configuration + * - Stream sizes/formats don't satisfy the + * StreamConfigurationMode requirements + * for non-NORMAL mode, or the requested operation_mode is not + * supported by the HAL. + * - Unsupported usage flag + * The camera service cannot filter out all possible illegal stream + * configurations, since some devices may support more simultaneous + * streams or larger stream resolutions than the minimum required + * for a given camera device hardware level. The HAL must return an + * ILLEGAL_ARGUMENT for any unsupported stream set, and then be + * ready to accept a future valid stream configuration in a later + * configureStreams call. + * @return halConfiguration The stream parameters desired by the HAL for + * each stream, including maximum buffers, the usage flags, and the + * override format. + */ + configureStreams_3_5(@3.5::StreamConfiguration requestedConfiguration) + generates (Status status, + @3.4::HalStreamConfiguration halConfiguration); + + + /** + * signalStreamFlush: + * + * Signaling HAL camera service is about to perform configureStreams_3_5 and + * HAL must return all buffers of designated streams. HAL must finish + * inflight requests normally and return all buffers that belongs to the + * designated streams through processCaptureResult or returnStreamBuffer + * API in a timely manner, or camera service will run into a fatal error. + * + * Note that this call serves as an optional hint and camera service may + * skip sending this call if all buffers are already returned. + * + * @param streamIds The ID of streams camera service need all of its + * buffers returned. + * + * @param streamConfigCounter Note that due to concurrency nature, it is + * possible the signalStreamFlush call arrives later than the + * corresponding configureStreams_3_5 call, HAL must check + * streamConfigCounter for such race condition. If the counter is less + * than the counter in the last configureStreams_3_5 call HAL last + * received, the call is stale and HAL should just return this call. + */ + oneway signalStreamFlush( + vec streamIds, + uint32_t streamConfigCounter + ); + + /** + * isReconfigurationRequired: + * + * Check whether complete stream reconfiguration is required for possible new session + * parameter values. + * + * This method must be called by the camera framework in case the client changes + * the value of any advertised session parameters. Depending on the specific values + * the HAL can decide whether a complete stream reconfiguration is required. In case + * the HAL returns false, the camera framework must skip the internal reconfiguration. + * In case Hal returns true, the framework must reconfigure the streams and pass the + * new session parameter values accordingly. + * This call may be done by the framework some time before the request with new parameters + * is submitted to the HAL, and the request may be cancelled before it ever gets submitted. + * Therefore, the HAL must not use this query as an indication to change its behavior in any + * way. + * ------------------------------------------------------------------------ + * + * Preconditions: + * + * The framework can call this method at any time after active + * session configuration. There must be no impact on the performance of + * pending camera requests in any way. In particular there must not be + * any glitches or delays during normal camera streaming. + * + * Performance requirements: + * HW and SW camera settings must not be changed and there must not be + * a user-visible impact on camera performance. + * + * @param oldSessionParams Before session parameters, usually the current session parameters. + * @param newSessionParams The new session parameters which may be set by client. + * + * @return Status Status code for the operation, one of: + * OK: + * On successful reconfiguration required query. + * METHOD_NOT_SUPPORTED: + * The camera device does not support the reconfiguration query. + * INTERNAL_ERROR: + * The reconfiguration query cannot complete due to internal + * error. + * @return true in case the stream reconfiguration is required, false otherwise. + */ + isReconfigurationRequired(CameraMetadata oldSessionParams, CameraMetadata newSessionParams) + generates(Status status, bool reconfigurationNeeded); +}; diff --git a/camera/device/3.5/default/Android.bp b/camera/device/3.5/default/Android.bp new file mode 100644 index 0000000..7c1128f --- /dev/null +++ b/camera/device/3.5/default/Android.bp @@ -0,0 +1,107 @@ +// +// Copyright (C) 2018 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + + +cc_library_headers { + name: "vendor.camera.device@3.5-impl_headers", + vendor: true, + export_include_dirs: ["include/device_v3_5_impl"], +} + +cc_library_headers { + name: "vendor.camera.device@3.5-external-impl_headers", + vendor: true, + export_include_dirs: ["include/ext_device_v3_5_impl"], +} + +cc_library_shared { + name: "vendor.camera.device@3.5-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "CameraDevice.cpp", + "CameraDeviceSession.cpp", + ], + shared_libs: [ + "libhidlbase", + "libutils", + "libcutils", + "vendor.camera.device@3.2-impl", + "vendor.camera.device@3.3-impl", + "vendor.camera.device@3.4-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/device_v3_5_impl"], +} + +cc_library_shared { + name: "vendor.camera.device@3.5-external-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "ExternalCameraDevice.cpp", + "ExternalCameraDeviceSession.cpp", + ], + shared_libs: [ + "libhidlbase", + "libutils", + "libcutils", + "vendor.camera.device@3.2-impl", + "vendor.camera.device@3.3-impl", + "vendor.camera.device@3.4-external-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + "libfmq", + "libsync", + "libyuv", + "libjpeg", + "libexif", + "libtinyxml2", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/ext_device_v3_5_impl"], + export_shared_lib_headers: [ + "libfmq", + ], +} diff --git a/camera/device/3.5/default/CameraDevice.cpp b/camera/device/3.5/default/CameraDevice.cpp new file mode 100644 index 0000000..cffda4e --- /dev/null +++ b/camera/device/3.5/default/CameraDevice.cpp @@ -0,0 +1,157 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDev@3.5-impl" +#include + +#include "CameraModule.h" +#include "CameraDevice_3_5.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::device::V3_2::CameraMetadata; + +CameraDevice::CameraDevice(sp module, const std::string& cameraId, + const SortedVector>& cameraDeviceNames) : + V3_4::implementation::CameraDevice(module, cameraId, cameraDeviceNames) { +} + +CameraDevice::~CameraDevice() { +} + +sp CameraDevice::createSession(camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) { + sp session = new CameraDeviceSession(device, deviceInfo, callback); + IF_ALOGV() { + session->getInterface()->interfaceChain([]( + ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) { + ALOGV("Session interface chain:"); + for (auto iface : interfaceChain) { + ALOGV(" %s", iface.c_str()); + } + }); + } + return session; +} + +Return CameraDevice::getPhysicalCameraCharacteristics(const hidl_string& physicalCameraId, + V3_5::ICameraDevice::getPhysicalCameraCharacteristics_cb _hidl_cb) { + Status status = initStatus(); + CameraMetadata cameraCharacteristics; + if (status == Status::OK) { + // Require module 2.5+ version. + if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_5) { + ALOGE("%s: get_physical_camera_info must be called on camera module 2.5 or newer", + __FUNCTION__); + status = Status::INTERNAL_ERROR; + } else { + char *end; + errno = 0; + long id = strtol(physicalCameraId.c_str(), &end, 0); + if (id > INT_MAX || (errno == ERANGE && id == LONG_MAX) || + id < INT_MIN || (errno == ERANGE && id == LONG_MIN) || + *end != '\0') { + ALOGE("%s: Invalid physicalCameraId %s", __FUNCTION__, physicalCameraId.c_str()); + status = Status::ILLEGAL_ARGUMENT; + } else { + camera_metadata_t *physicalInfo = nullptr; + int ret = mModule->getPhysicalCameraInfo((int)id, &physicalInfo); + if (ret == OK) { + V3_2::implementation::convertToHidl(physicalInfo, &cameraCharacteristics); + } else if (ret == -EINVAL) { + ALOGE("%s: %s is not a valid physical camera Id outside of getCameraIdList()", + __FUNCTION__, physicalCameraId.c_str()); + status = Status::ILLEGAL_ARGUMENT; + } else { + ALOGE("%s: Failed to get physical camera %s info: %s (%d)!", __FUNCTION__, + physicalCameraId.c_str(), strerror(-ret), ret); + status = Status::INTERNAL_ERROR; + } + } + } + } + _hidl_cb(status, cameraCharacteristics); + return Void(); +} + +Return CameraDevice::isStreamCombinationSupported(const V3_4::StreamConfiguration& streams, + V3_5::ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) { + Status status; + bool streamsSupported = false; + + // Require module 2.5+ version. + if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_5) { + ALOGE("%s: is_stream_combination_supported must be called on camera module 2.5 or "\ + "newer", __FUNCTION__); + status = Status::INTERNAL_ERROR; + } else { + camera_stream_combination_t streamComb{}; + streamComb.operation_mode = static_cast (streams.operationMode); + streamComb.num_streams = streams.streams.size(); + camera_stream_t *streamBuffer = new camera_stream_t[streamComb.num_streams]; + + size_t i = 0; + for (const auto &it : streams.streams) { + streamBuffer[i].stream_type = static_cast (it.v3_2.streamType); + streamBuffer[i].width = it.v3_2.width; + streamBuffer[i].height = it.v3_2.height; + streamBuffer[i].format = static_cast (it.v3_2.format); + streamBuffer[i].data_space = static_cast (it.v3_2.dataSpace); + streamBuffer[i].usage = static_cast (it.v3_2.usage); + streamBuffer[i].physical_camera_id = it.physicalCameraId.c_str(); + streamBuffer[i++].rotation = static_cast (it.v3_2.rotation); + } + streamComb.streams = streamBuffer; + auto res = mModule->isStreamCombinationSupported(mCameraIdInt, &streamComb); + switch (res) { + case NO_ERROR: + streamsSupported = true; + status = Status::OK; + break; + case BAD_VALUE: + status = Status::OK; + break; + case INVALID_OPERATION: + status = Status::METHOD_NOT_SUPPORTED; + break; + default: + ALOGE("%s: Unexpected error: %d", __FUNCTION__, res); + status = Status::INTERNAL_ERROR; + }; + delete [] streamBuffer; + } + + _hidl_cb(status, streamsSupported); + return Void(); +} + +// End of methods from ::android::hardware::camera::device::V3_2::ICameraDevice. + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + diff --git a/camera/device/3.5/default/CameraDeviceSession.cpp b/camera/device/3.5/default/CameraDeviceSession.cpp new file mode 100644 index 0000000..44d067d --- /dev/null +++ b/camera/device/3.5/default/CameraDeviceSession.cpp @@ -0,0 +1,407 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDevSession@3.5-impl" +#define ATRACE_TAG ATRACE_TAG_CAMERA +#include + +#include +#include +#include "CameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +CameraDeviceSession::CameraDeviceSession( + camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) : + V3_4::implementation::CameraDeviceSession(device, deviceInfo, callback) { + + mCallback_3_5 = nullptr; + + auto castResult = ICameraDeviceCallback::castFrom(callback); + if (castResult.isOk()) { + sp callback3_5 = castResult; + if (callback3_5 != nullptr) { + mCallback_3_5 = callback3_5; + } + } + + if (mCallback_3_5 != nullptr) { + camera_metadata_entry bufMgrVersion = mDeviceInfo.find( + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION); + if (bufMgrVersion.count > 0) { + mSupportBufMgr = (bufMgrVersion.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + if (mSupportBufMgr) { + request_stream_buffers = sRequestStreamBuffers; + return_stream_buffers = sReturnStreamBuffers; + } + } + } +} + +CameraDeviceSession::~CameraDeviceSession() { +} + +Return CameraDeviceSession::configureStreams_3_5( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_5_cb _hidl_cb) { + configureStreams_3_4_Impl(requestedConfiguration.v3_4, _hidl_cb, + requestedConfiguration.streamConfigCounter, false /*useOverriddenFields*/); + return Void(); +} + +Return CameraDeviceSession::signalStreamFlush( + const hidl_vec& streamIds, uint32_t streamConfigCounter) { + if (mDevice->ops->signal_stream_flush == nullptr) { + return Void(); + } + + uint32_t currentCounter = 0; + { + Mutex::Autolock _l(mStreamConfigCounterLock); + currentCounter = mStreamConfigCounter; + } + + if (streamConfigCounter < currentCounter) { + ALOGV("%s: streamConfigCounter %d is stale (current %d), skipping signal_stream_flush call", + __FUNCTION__, streamConfigCounter, mStreamConfigCounter); + return Void(); + } + + std::vector streams(streamIds.size()); + { + Mutex::Autolock _l(mInflightLock); + for (size_t i = 0; i < streamIds.size(); i++) { + int32_t id = streamIds[i]; + if (mStreamMap.count(id) == 0) { + ALOGE("%s: unknown streamId %d", __FUNCTION__, id); + return Void(); + } + streams[i] = &mStreamMap[id]; + } + } + + mDevice->ops->signal_stream_flush(mDevice, streams.size(), streams.data()); + return Void(); +} + +Status CameraDeviceSession::importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) { + if (mSupportBufMgr) { + return importRequestImpl(request, allBufPtrs, allFences, /*allowEmptyBuf*/ true); + } + return importRequestImpl(request, allBufPtrs, allFences, /*allowEmptyBuf*/ false); +} + +void CameraDeviceSession::pushBufferId( + const buffer_handle_t& buf, uint64_t bufferId, int streamId) { + std::lock_guard lock(mBufferIdMapLock); + + // emplace will return existing entry if there is one. + auto pair = mBufferIdMaps.emplace(streamId, BufferIdMap{}); + BufferIdMap& bIdMap = pair.first->second; + bIdMap[buf] = bufferId; +} + +uint64_t CameraDeviceSession::popBufferId( + const buffer_handle_t& buf, int streamId) { + std::lock_guard lock(mBufferIdMapLock); + + auto streamIt = mBufferIdMaps.find(streamId); + if (streamIt == mBufferIdMaps.end()) { + return BUFFER_ID_NO_BUFFER; + } + BufferIdMap& bIdMap = streamIt->second; + auto it = bIdMap.find(buf); + if (it == bIdMap.end()) { + return BUFFER_ID_NO_BUFFER; + } + uint64_t bufId = it->second; + bIdMap.erase(it); + if (bIdMap.empty()) { + mBufferIdMaps.erase(streamIt); + } + return bufId; +} + +uint64_t CameraDeviceSession::getCapResultBufferId(const buffer_handle_t& buf, int streamId) { + if (mSupportBufMgr) { + return popBufferId(buf, streamId); + } + return BUFFER_ID_NO_BUFFER; +} + +Camera3Stream* CameraDeviceSession::getStreamPointer(int32_t streamId) { + Mutex::Autolock _l(mInflightLock); + if (mStreamMap.count(streamId) == 0) { + ALOGE("%s: unknown streamId %d", __FUNCTION__, streamId); + return nullptr; + } + return &mStreamMap[streamId]; +} + +void CameraDeviceSession::cleanupInflightBufferFences( + std::vector& fences, std::vector>& bufs) { + hidl_vec hFences = fences; + cleanupInflightFences(hFences, fences.size()); + for (auto& p : bufs) { + popBufferId(p.first, p.second); + } +} + +camera3_buffer_request_status_t CameraDeviceSession::requestStreamBuffers( + uint32_t num_buffer_reqs, + const camera3_buffer_request_t *buffer_reqs, + /*out*/uint32_t *num_returned_buf_reqs, + /*out*/camera3_stream_buffer_ret_t *returned_buf_reqs) { + ATRACE_CALL(); + *num_returned_buf_reqs = 0; + hidl_vec hBufReqs(num_buffer_reqs); + for (size_t i = 0; i < num_buffer_reqs; i++) { + hBufReqs[i].streamId = + static_cast(buffer_reqs[i].stream)->mId; + hBufReqs[i].numBuffersRequested = buffer_reqs[i].num_buffers_requested; + } + + ATRACE_BEGIN("HIDL requestStreamBuffers"); + BufferRequestStatus status; + hidl_vec bufRets; + auto err = mCallback_3_5->requestStreamBuffers(hBufReqs, + [&status, &bufRets] + (BufferRequestStatus s, const hidl_vec& rets) { + status = s; + bufRets = std::move(rets); + }); + if (!err.isOk()) { + ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str()); + return CAMERA3_BUF_REQ_FAILED_UNKNOWN; + } + ATRACE_END(); + + switch (status) { + case BufferRequestStatus::FAILED_CONFIGURING: + return CAMERA3_BUF_REQ_FAILED_CONFIGURING; + case BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS: + return CAMERA3_BUF_REQ_FAILED_ILLEGAL_ARGUMENTS; + default: + break; // Other status Handled by following code + } + + if (status != BufferRequestStatus::OK && status != BufferRequestStatus::FAILED_PARTIAL && + status != BufferRequestStatus::FAILED_UNKNOWN) { + ALOGE("%s: unknown buffer request error code %d", __FUNCTION__, status); + return CAMERA3_BUF_REQ_FAILED_UNKNOWN; + } + + // Only OK, FAILED_PARTIAL and FAILED_UNKNOWN reaches here + if (bufRets.size() != num_buffer_reqs) { + ALOGE("%s: expect %d buffer requests returned, only got %zu", + __FUNCTION__, num_buffer_reqs, bufRets.size()); + return CAMERA3_BUF_REQ_FAILED_UNKNOWN; + } + + *num_returned_buf_reqs = num_buffer_reqs; + for (size_t i = 0; i < num_buffer_reqs; i++) { + // maybe we can query all streams in one call to avoid frequent locking device here? + Camera3Stream* stream = getStreamPointer(bufRets[i].streamId); + if (stream == nullptr) { + ALOGE("%s: unknown streamId %d", __FUNCTION__, bufRets[i].streamId); + return CAMERA3_BUF_REQ_FAILED_UNKNOWN; + } + returned_buf_reqs[i].stream = stream; + } + + // Handle failed streams + for (size_t i = 0; i < num_buffer_reqs; i++) { + if (bufRets[i].val.getDiscriminator() == StreamBuffersVal::hidl_discriminator::error) { + returned_buf_reqs[i].num_output_buffers = 0; + switch (bufRets[i].val.error()) { + case StreamBufferRequestError::NO_BUFFER_AVAILABLE: + returned_buf_reqs[i].status = CAMERA3_PS_BUF_REQ_NO_BUFFER_AVAILABLE; + break; + case StreamBufferRequestError::MAX_BUFFER_EXCEEDED: + returned_buf_reqs[i].status = CAMERA3_PS_BUF_REQ_MAX_BUFFER_EXCEEDED; + break; + case StreamBufferRequestError::STREAM_DISCONNECTED: + returned_buf_reqs[i].status = CAMERA3_PS_BUF_REQ_STREAM_DISCONNECTED; + break; + case StreamBufferRequestError::UNKNOWN_ERROR: + returned_buf_reqs[i].status = CAMERA3_PS_BUF_REQ_UNKNOWN_ERROR; + break; + default: + ALOGE("%s: Unknown StreamBufferRequestError %d", + __FUNCTION__, bufRets[i].val.error()); + return CAMERA3_BUF_REQ_FAILED_UNKNOWN; + } + } + } + + if (status == BufferRequestStatus::FAILED_UNKNOWN) { + return CAMERA3_BUF_REQ_FAILED_UNKNOWN; + } + + // Only BufferRequestStatus::OK and BufferRequestStatus::FAILED_PARTIAL reaches here + std::vector importedFences; + std::vector> importedBuffers; + for (size_t i = 0; i < num_buffer_reqs; i++) { + if (bufRets[i].val.getDiscriminator() != + StreamBuffersVal::hidl_discriminator::buffers) { + continue; + } + int streamId = bufRets[i].streamId; + const hidl_vec& hBufs = bufRets[i].val.buffers(); + camera3_stream_buffer_t* outBufs = returned_buf_reqs[i].output_buffers; + returned_buf_reqs[i].num_output_buffers = hBufs.size(); + for (size_t b = 0; b < hBufs.size(); b++) { + const StreamBuffer& hBuf = hBufs[b]; + camera3_stream_buffer_t& outBuf = outBufs[b]; + // maybe add importBuffers API to avoid frequent locking device? + Status s = importBuffer(streamId, + hBuf.bufferId, hBuf.buffer.getNativeHandle(), + /*out*/&(outBuf.buffer), + /*allowEmptyBuf*/false); + // Buffer import should never fail - restart HAL since something is very wrong. + LOG_ALWAYS_FATAL_IF(s != Status::OK, + "%s: import stream %d bufferId %" PRIu64 " failed!", + __FUNCTION__, streamId, hBuf.bufferId); + + pushBufferId(*(outBuf.buffer), hBuf.bufferId, streamId); + importedBuffers.push_back(std::make_pair(*(outBuf.buffer), streamId)); + + bool succ = sHandleImporter.importFence(hBuf.acquireFence, outBuf.acquire_fence); + // Fence import should never fail - restart HAL since something is very wrong. + LOG_ALWAYS_FATAL_IF(!succ, + "%s: stream %d bufferId %" PRIu64 "acquire fence is invalid", + __FUNCTION__, streamId, hBuf.bufferId); + importedFences.push_back(outBuf.acquire_fence); + outBuf.stream = returned_buf_reqs[i].stream; + outBuf.status = CAMERA3_BUFFER_STATUS_OK; + outBuf.release_fence = -1; + } + returned_buf_reqs[i].status = CAMERA3_PS_BUF_REQ_OK; + } + + return (status == BufferRequestStatus::OK) ? + CAMERA3_BUF_REQ_OK : CAMERA3_BUF_REQ_FAILED_PARTIAL; +} + +void CameraDeviceSession::returnStreamBuffers( + uint32_t num_buffers, + const camera3_stream_buffer_t* const* buffers) { + ATRACE_CALL(); + hidl_vec hBufs(num_buffers); + + for (size_t i = 0; i < num_buffers; i++) { + hBufs[i].streamId = + static_cast(buffers[i]->stream)->mId; + hBufs[i].buffer = nullptr; // use bufferId + hBufs[i].bufferId = popBufferId(*(buffers[i]->buffer), hBufs[i].streamId); + if (hBufs[i].bufferId == BUFFER_ID_NO_BUFFER) { + ALOGE("%s: unknown buffer is returned to stream %d", + __FUNCTION__, hBufs[i].streamId); + } + // ERROR since the buffer is not for application to consume + hBufs[i].status = BufferStatus::ERROR; + // skip acquire fence since it's of no use to camera service + if (buffers[i]->release_fence != -1) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = buffers[i]->release_fence; + hBufs[i].releaseFence.setTo(handle, /*shouldOwn*/true); + } + } + + mCallback_3_5->returnStreamBuffers(hBufs); + return; +} + +/** + * Static callback forwarding methods from HAL to instance + */ +camera3_buffer_request_status_t CameraDeviceSession::sRequestStreamBuffers( + const struct camera3_callback_ops *cb, + uint32_t num_buffer_reqs, + const camera3_buffer_request_t *buffer_reqs, + /*out*/uint32_t *num_returned_buf_reqs, + /*out*/camera3_stream_buffer_ret_t *returned_buf_reqs) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + + if (num_buffer_reqs == 0 || buffer_reqs == nullptr || num_returned_buf_reqs == nullptr || + returned_buf_reqs == nullptr) { + ALOGE("%s: bad argument: numBufReq %d, bufReqs %p, numRetBufReq %p, retBufReqs %p", + __FUNCTION__, num_buffer_reqs, buffer_reqs, + num_returned_buf_reqs, returned_buf_reqs); + return CAMERA3_BUF_REQ_FAILED_ILLEGAL_ARGUMENTS; + } + + return d->requestStreamBuffers(num_buffer_reqs, buffer_reqs, + num_returned_buf_reqs, returned_buf_reqs); +} + +void CameraDeviceSession::sReturnStreamBuffers( + const struct camera3_callback_ops *cb, + uint32_t num_buffers, + const camera3_stream_buffer_t* const* buffers) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + + d->returnStreamBuffers(num_buffers, buffers); +} + +Return CameraDeviceSession::isReconfigurationRequired( + const V3_2::CameraMetadata& oldSessionParams, const V3_2::CameraMetadata& newSessionParams, + ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb) { + if (mDevice->ops->is_reconfiguration_required != nullptr) { + const camera_metadata_t *oldParams, *newParams; + V3_2::implementation::convertFromHidl(oldSessionParams, &oldParams); + V3_2::implementation::convertFromHidl(newSessionParams, &newParams); + auto ret = mDevice->ops->is_reconfiguration_required(mDevice, oldParams, newParams); + switch (ret) { + case 0: + _hidl_cb(Status::OK, true); + break; + case -EINVAL: + _hidl_cb(Status::OK, false); + break; + case -ENOSYS: + _hidl_cb(Status::METHOD_NOT_SUPPORTED, true); + break; + default: + _hidl_cb(Status::INTERNAL_ERROR, true); + break; + }; + } else { + _hidl_cb(Status::METHOD_NOT_SUPPORTED, true); + } + + return Void(); +} + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.5/default/ExternalCameraDevice.cpp b/camera/device/3.5/default/ExternalCameraDevice.cpp new file mode 100644 index 0000000..d0de1a4 --- /dev/null +++ b/camera/device/3.5/default/ExternalCameraDevice.cpp @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamDev@3.5" +//#define LOG_NDEBUG 0 +#include + +#include "ExternalCameraDevice_3_5.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +ExternalCameraDevice::ExternalCameraDevice( + const std::string& cameraId, const ExternalCameraConfig& cfg) : + V3_4::implementation::ExternalCameraDevice(cameraId, cfg) {} + +ExternalCameraDevice::~ExternalCameraDevice() {} + +Return ExternalCameraDevice::getPhysicalCameraCharacteristics(const hidl_string&, + V3_5::ICameraDevice::getPhysicalCameraCharacteristics_cb _hidl_cb) { + CameraMetadata cameraCharacteristics; + // External camera HAL doesn't support physical camera functions + _hidl_cb(Status::ILLEGAL_ARGUMENT, cameraCharacteristics); + return Void(); +} + +sp ExternalCameraDevice::createSession( + const sp& cb, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) { + return new ExternalCameraDeviceSession( + cb, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd)); +} + +#define UPDATE(tag, data, size) \ +do { \ + if (metadata->update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return -EINVAL; \ + } \ +} while (0) + +status_t ExternalCameraDevice::initDefaultCharsKeys( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + status_t res = + V3_4::implementation::ExternalCameraDevice::initDefaultCharsKeys(metadata); + + if (res != OK) { + return res; + } + + const uint8_t bufMgrVer = ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5; + UPDATE(ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &bufMgrVer, 1); + + std::vector availableCharacteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_3_4; + availableCharacteristicsKeys.reserve(availableCharacteristicsKeys.size() + + EXTRA_CHARACTERISTICS_KEYS_3_5.size()); + for (const auto& key : EXTRA_CHARACTERISTICS_KEYS_3_5) { + availableCharacteristicsKeys.push_back(key); + } + UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, + availableCharacteristicsKeys.data(), + availableCharacteristicsKeys.size()); + + return OK; +} + +Return ExternalCameraDevice::isStreamCombinationSupported( + const V3_4::StreamConfiguration& streams, + V3_5::ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) { + + if (isInitFailed()) { + ALOGE("%s: camera %s. camera init failed!", __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, false); + return Void(); + } + + hidl_vec streamsV3_2(streams.streams.size()); + size_t i = 0; + for (const auto& it : streams.streams) { + streamsV3_2[i++] = it.v3_2; + } + V3_2::StreamConfiguration streamConfig = {streamsV3_2, streams.operationMode}; + auto status = ExternalCameraDeviceSession::isStreamCombinationSupported(streamConfig, + mSupportedFormats, mCfg); + _hidl_cb(Status::OK, Status::OK == status); + return Void(); +} +#undef UPDATE + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + diff --git a/camera/device/3.5/default/ExternalCameraDeviceSession.cpp b/camera/device/3.5/default/ExternalCameraDeviceSession.cpp new file mode 100644 index 0000000..287ac32 --- /dev/null +++ b/camera/device/3.5/default/ExternalCameraDeviceSession.cpp @@ -0,0 +1,314 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamDevSsn@3.5" +#include + +#include +#include "ExternalCameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +ExternalCameraDeviceSession::ExternalCameraDeviceSession( + const sp& callback, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) : + V3_4::implementation::ExternalCameraDeviceSession( + callback, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd)) { + + mCallback_3_5 = nullptr; + + auto castResult = V3_5::ICameraDeviceCallback::castFrom(callback); + if (castResult.isOk()) { + sp callback3_5 = castResult; + if (callback3_5 != nullptr) { + mCallback_3_5 = callback3_5; + } + } + + if (mCallback_3_5 != nullptr) { + mSupportBufMgr = true; + } +} + +ExternalCameraDeviceSession::~ExternalCameraDeviceSession() { + closeOutputThreadImpl(); +} + +Return ExternalCameraDeviceSession::configureStreams_3_5( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_5_cb _hidl_cb) { + return configureStreams_3_4(requestedConfiguration.v3_4, _hidl_cb); +} + +Return ExternalCameraDeviceSession::signalStreamFlush( + const hidl_vec& /*streamIds*/, uint32_t /*streamConfigCounter*/) { + return Void(); +} + +Status ExternalCameraDeviceSession::importRequestLocked( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) { + if (mSupportBufMgr) { + return importRequestLockedImpl(request, allBufPtrs, allFences, /*allowEmptyBuf*/ true); + } + return importRequestLockedImpl(request, allBufPtrs, allFences, /*allowEmptyBuf*/ false); +} + + +ExternalCameraDeviceSession::BufferRequestThread::BufferRequestThread( + wp parent, + sp callbacks) : + mParent(parent), + mCallbacks(callbacks) {} + +int ExternalCameraDeviceSession::BufferRequestThread::requestBufferStart( + const std::vector& bufReqs) { + if (bufReqs.empty()) { + ALOGE("%s: bufReqs is empty!", __FUNCTION__); + return -1; + } + + { + std::lock_guard lk(mLock); + if (mRequestingBuffer) { + ALOGE("%s: BufferRequestThread does not support more than one concurrent request!", + __FUNCTION__); + return -1; + } + + mBufferReqs = bufReqs; + mRequestingBuffer = true; + } + mRequestCond.notify_one(); + return 0; +} + +int ExternalCameraDeviceSession::BufferRequestThread::waitForBufferRequestDone( + std::vector* outBufReq) { + std::unique_lock lk(mLock); + if (!mRequestingBuffer) { + ALOGE("%s: no pending buffer request!", __FUNCTION__); + return -1; + } + + if (mPendingReturnBufferReqs.empty()) { + std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqProcTimeoutMs); + auto st = mRequestDoneCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for buffer request finish timeout!", __FUNCTION__); + return -1; + } + } + mRequestingBuffer = false; + *outBufReq = std::move(mPendingReturnBufferReqs); + mPendingReturnBufferReqs.clear(); + return 0; +} + +void ExternalCameraDeviceSession::BufferRequestThread::waitForNextRequest() { + ATRACE_CALL(); + std::unique_lock lk(mLock); + int waitTimes = 0; + while (mBufferReqs.empty()) { + if (exitPending()) { + return; + } + std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqWaitTimeoutMs); + auto st = mRequestCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + waitTimes++; + if (waitTimes == kReqWaitTimesWarn) { + // BufferRequestThread just wait forever for new buffer request + // But it will print some periodic warning indicating it's waiting + ALOGV("%s: still waiting for new buffer request", __FUNCTION__); + waitTimes = 0; + } + } + } + + // Fill in hidl BufferRequest + mHalBufferReqs.resize(mBufferReqs.size()); + for (size_t i = 0; i < mHalBufferReqs.size(); i++) { + mHalBufferReqs[i].streamId = mBufferReqs[i].streamId; + mHalBufferReqs[i].numBuffersRequested = 1; + } +} + +bool ExternalCameraDeviceSession::BufferRequestThread::threadLoop() { + waitForNextRequest(); + if (exitPending()) { + return false; + } + + ATRACE_BEGIN("HIDL requestStreamBuffers"); + BufferRequestStatus status; + hidl_vec bufRets; + auto err = mCallbacks->requestStreamBuffers(mHalBufferReqs, + [&status, &bufRets] + (BufferRequestStatus s, const hidl_vec& rets) { + status = s; + bufRets = std::move(rets); + }); + ATRACE_END(); + if (!err.isOk()) { + ALOGE("%s: Transaction error: %s", __FUNCTION__, err.description().c_str()); + return false; + } + + std::unique_lock lk(mLock); + if (status == BufferRequestStatus::OK || status == BufferRequestStatus::FAILED_PARTIAL) { + if (bufRets.size() != mHalBufferReqs.size()) { + ALOGE("%s: expect %zu buffer requests returned, only got %zu", + __FUNCTION__, mHalBufferReqs.size(), bufRets.size()); + return false; + } + + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return false; + } + + hidl_vec importedFences; + importedFences.resize(bufRets.size()); + for (size_t i = 0; i < bufRets.size(); i++) { + int streamId = bufRets[i].streamId; + switch (bufRets[i].val.getDiscriminator()) { + case StreamBuffersVal::hidl_discriminator::error: + continue; + case StreamBuffersVal::hidl_discriminator::buffers: { + const hidl_vec& hBufs = bufRets[i].val.buffers(); + if (hBufs.size() != 1) { + ALOGE("%s: expect 1 buffer returned, got %zu!", __FUNCTION__, hBufs.size()); + return false; + } + const V3_2::StreamBuffer& hBuf = hBufs[0]; + + mBufferReqs[i].bufferId = hBuf.bufferId; + // TODO: create a batch import API so we don't need to lock/unlock mCbsLock + // repeatedly? + lk.unlock(); + Status s = parent->importBuffer(streamId, + hBuf.bufferId, hBuf.buffer.getNativeHandle(), + /*out*/&mBufferReqs[i].bufPtr, + /*allowEmptyBuf*/false); + lk.lock(); + + if (s != Status::OK) { + ALOGE("%s: stream %d import buffer failed!", __FUNCTION__, streamId); + cleanupInflightFences(importedFences, i - 1); + return false; + } + if (!sHandleImporter.importFence(hBuf.acquireFence, + mBufferReqs[i].acquireFence)) { + ALOGE("%s: stream %d import fence failed!", __FUNCTION__, streamId); + cleanupInflightFences(importedFences, i - 1); + return false; + } + importedFences[i] = mBufferReqs[i].acquireFence; + } + break; + default: + ALOGE("%s: unkown StreamBuffersVal discrimator!", __FUNCTION__); + return false; + } + } + } else { + ALOGE("%s: requestStreamBuffers call failed!", __FUNCTION__); + } + + mPendingReturnBufferReqs = std::move(mBufferReqs); + mBufferReqs.clear(); + + lk.unlock(); + mRequestDoneCond.notify_one(); + return true; +} + +void ExternalCameraDeviceSession::initOutputThread() { + if (mSupportBufMgr) { + mBufferRequestThread = new BufferRequestThread(this, mCallback_3_5); + mBufferRequestThread->run("ExtCamBufReq", PRIORITY_DISPLAY); + } + mOutputThread = new OutputThread( + this, mCroppingType, mCameraCharacteristics, mBufferRequestThread); +} + +void ExternalCameraDeviceSession::closeOutputThreadImpl() { + if (mBufferRequestThread) { + mBufferRequestThread->requestExit(); + mBufferRequestThread->join(); + mBufferRequestThread.clear(); + } +} + +void ExternalCameraDeviceSession::closeOutputThread() { + closeOutputThreadImpl(); + V3_4::implementation::ExternalCameraDeviceSession::closeOutputThread(); +} + +ExternalCameraDeviceSession::OutputThread::OutputThread( + wp parent, + CroppingType ct, + const common::V1_0::helper::CameraMetadata& chars, + sp bufReqThread) : + V3_4::implementation::ExternalCameraDeviceSession::OutputThread(parent, ct, chars), + mBufferRequestThread(bufReqThread) {} + +ExternalCameraDeviceSession::OutputThread::~OutputThread() {} + +int ExternalCameraDeviceSession::OutputThread::requestBufferStart( + const std::vector& bufs) { + if (mBufferRequestThread != nullptr) { + return mBufferRequestThread->requestBufferStart(bufs); + } + return 0; +} + +int ExternalCameraDeviceSession::OutputThread::waitForBufferRequestDone( + /*out*/std::vector* outBufs) { + if (mBufferRequestThread != nullptr) { + return mBufferRequestThread->waitForBufferRequestDone(outBufs); + } + return 0; +} + +Return ExternalCameraDeviceSession::isReconfigurationRequired( + const V3_2::CameraMetadata& /*oldSessionParams*/, + const V3_2::CameraMetadata& /*newSessionParams*/, + ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb) { + //Stub implementation + _hidl_cb(Status::OK, true); + return Void(); +} + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.5/default/OWNERS b/camera/device/3.5/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/device/3.5/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/device/3.5/default/include/device_v3_5_impl/CameraDeviceSession.h b/camera/device/3.5/default/include/device_v3_5_impl/CameraDeviceSession.h new file mode 100644 index 0000000..87d616c --- /dev/null +++ b/camera/device/3.5/default/include/device_v3_5_impl/CameraDeviceSession.h @@ -0,0 +1,261 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_CAMERADEVICE3SESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_CAMERADEVICE3SESSION_H + +#include +#include +#include +#include <../../3.4/default/include/device_v3_4_impl/CameraDeviceSession.h> +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::StreamBuffer; +using ::android::hardware::camera::device::V3_5::StreamConfiguration; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_5::ICameraDeviceSession; +using ::android::hardware::camera::device::V3_5::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::implementation::Camera3Stream; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + + +/** + * Function pointer types with C calling convention to + * use for HAL callback functions. + */ +extern "C" { + typedef camera3_buffer_request_status_t (callbacks_request_stream_buffer_t)( + const struct camera3_callback_ops *, + uint32_t num_buffer_reqs, + const camera3_buffer_request_t *buffer_reqs, + /*out*/uint32_t *num_returned_buf_reqs, + /*out*/camera3_stream_buffer_ret_t *returned_buf_reqs); + + typedef void (callbacks_return_stream_buffer_t)( + const struct camera3_callback_ops *, + uint32_t num_buffers, + const camera3_stream_buffer_t* const* buffers); +} + +struct CameraDeviceSession : public V3_4::implementation::CameraDeviceSession { + + CameraDeviceSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&); + virtual ~CameraDeviceSession(); + + virtual sp getInterface() override { + return new TrampolineSessionInterface_3_5(this); + } + +protected: + // Methods from v3.4 and earlier will trampoline to inherited implementation + Return configureStreams_3_5( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_5_cb _hidl_cb); + + Return signalStreamFlush( + const hidl_vec& streamIds, + uint32_t streamConfigCounter); + + virtual Status importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) override; + + Return isReconfigurationRequired(const V3_2::CameraMetadata& oldSessionParams, + const V3_2::CameraMetadata& newSessionParams, + ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb); + /** + * Static callback forwarding methods from HAL to instance + */ + static callbacks_request_stream_buffer_t sRequestStreamBuffers; + static callbacks_return_stream_buffer_t sReturnStreamBuffers; + + camera3_buffer_request_status_t requestStreamBuffers( + uint32_t num_buffer_reqs, + const camera3_buffer_request_t *buffer_reqs, + /*out*/uint32_t *num_returned_buf_reqs, + /*out*/camera3_stream_buffer_ret_t *returned_buf_reqs); + + void returnStreamBuffers( + uint32_t num_buffers, + const camera3_stream_buffer_t* const* buffers); + + struct BufferHasher { + size_t operator()(const buffer_handle_t& buf) const { + if (buf == nullptr) + return 0; + + size_t result = 1; + result = 31 * result + buf->numFds; + for (int i = 0; i < buf->numFds; i++) { + result = 31 * result + buf->data[i]; + } + return result; + } + }; + + struct BufferComparator { + bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const { + if (buf1->numFds == buf2->numFds) { + for (int i = 0; i < buf1->numFds; i++) { + if (buf1->data[i] != buf2->data[i]) { + return false; + } + } + return true; + } + return false; + } + }; + + Camera3Stream* getStreamPointer(int32_t streamId); + + // Register buffer to mBufferIdMaps so we can find corresponding bufferId + // when the buffer is returned to camera service + void pushBufferId(const buffer_handle_t& buf, uint64_t bufferId, int streamId); + + // Method to pop buffer's bufferId from mBufferIdMaps + // BUFFER_ID_NO_BUFFER is returned if no matching buffer is found + uint64_t popBufferId(const buffer_handle_t& buf, int streamId); + + // Method to cleanup imported buffer/fences if requestStreamBuffers fails half way + void cleanupInflightBufferFences( + std::vector& fences, std::vector>& bufs); + + // Overrides the default constructCaptureResult behavior for buffer management APIs + virtual uint64_t getCapResultBufferId(const buffer_handle_t& buf, int streamId) override; + + std::mutex mBufferIdMapLock; // protecting mBufferIdMaps and mNextBufferId + typedef std::unordered_map BufferIdMap; + // stream ID -> per stream buffer ID map for buffers coming from requestStreamBuffers API + // Entries are created during requestStreamBuffers when a stream first request a buffer, and + // deleted in returnStreamBuffers/processCaptureResult* when all buffers are returned + std::unordered_map mBufferIdMaps; + + sp mCallback_3_5; + bool mSupportBufMgr; + +private: + + struct TrampolineSessionInterface_3_5 : public ICameraDeviceSession { + TrampolineSessionInterface_3_5(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_5( + const StreamConfiguration& requestedConfiguration, + configureStreams_3_5_cb _hidl_cb) override { + return mParent->configureStreams_3_5(requestedConfiguration, _hidl_cb); + } + + virtual Return signalStreamFlush( + const hidl_vec& requests, + uint32_t streamConfigCounter) override { + return mParent->signalStreamFlush(requests, streamConfigCounter); + } + + virtual Return isReconfigurationRequired(const V3_2::CameraMetadata& oldSessionParams, + const V3_2::CameraMetadata& newSessionParams, + ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb) override { + return mParent->isReconfigurationRequired(oldSessionParams, newSessionParams, _hidl_cb); + } + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_CAMERADEVICE3SESSION_H diff --git a/camera/device/3.5/default/include/device_v3_5_impl/CameraDevice_3_5.h b/camera/device/3.5/default/include/device_v3_5_impl/CameraDevice_3_5.h new file mode 100644 index 0000000..76c8cf8 --- /dev/null +++ b/camera/device/3.5/default/include/device_v3_5_impl/CameraDevice_3_5.h @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_CAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_CAMERADEVICE_H + +#include "CameraDeviceSession.h" +#include <../../../../3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h> + +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +using namespace ::android::hardware::camera::device; + +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_string; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::sp; + +struct CameraDevice : public V3_4::implementation::CameraDevice { + // Called by provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + // Delegates nearly all work to CameraDevice_3_4 + CameraDevice(sp module, + const std::string& cameraId, + const SortedVector>& cameraDeviceNames); + virtual ~CameraDevice(); + + virtual sp getInterface() override { + return new TrampolineDeviceInterface_3_5(this); + } + +protected: + virtual sp createSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&) override; + + Return getPhysicalCameraCharacteristics(const hidl_string& physicalCameraId, + V3_5::ICameraDevice::getPhysicalCameraCharacteristics_cb _hidl_cb); + + Return isStreamCombinationSupported( + const V3_4::StreamConfiguration& streams, + V3_5::ICameraDevice::isStreamCombinationSupported_cb _hidl_cb); + +private: + struct TrampolineDeviceInterface_3_5 : public ICameraDevice { + TrampolineDeviceInterface_3_5(sp parent) : + mParent(parent) {} + + virtual Return getResourceCost(V3_2::ICameraDevice::getResourceCost_cb _hidl_cb) + override { + return mParent->getResourceCost(_hidl_cb); + } + + virtual Return getCameraCharacteristics( + V3_2::ICameraDevice::getCameraCharacteristics_cb _hidl_cb) override { + return mParent->getCameraCharacteristics(_hidl_cb); + } + + virtual Return setTorchMode(TorchMode mode) override { + return mParent->setTorchMode(mode); + } + + virtual Return open(const sp& callback, + V3_2::ICameraDevice::open_cb _hidl_cb) override { + return mParent->open(callback, _hidl_cb); + } + + virtual Return dumpState(const hidl_handle& fd) override { + return mParent->dumpState(fd); + } + + virtual Return getPhysicalCameraCharacteristics(const hidl_string& physicalCameraId, + V3_5::ICameraDevice::getPhysicalCameraCharacteristics_cb _hidl_cb) override { + return mParent->getPhysicalCameraCharacteristics(physicalCameraId, _hidl_cb); + } + + virtual Return isStreamCombinationSupported( + const V3_4::StreamConfiguration& streams, + V3_5::ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) override { + return mParent->isStreamCombinationSupported(streams, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_CAMERADEVICE_H diff --git a/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h new file mode 100644 index 0000000..e89ef45 --- /dev/null +++ b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h @@ -0,0 +1,275 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICESESSION_H + +#include +#include +#include <../../3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h> + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_5::BufferRequest; +using ::android::hardware::camera::device::V3_5::BufferRequestStatus; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_2::ErrorCode; +using ::android::hardware::camera::device::V3_5::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::MsgType; +using ::android::hardware::camera::device::V3_2::NotifyMsg; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::Stream; +using ::android::hardware::camera::device::V3_5::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfigurationMode; +using ::android::hardware::camera::device::V3_2::StreamRotation; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_2::DataspaceFlags; +using ::android::hardware::camera::device::V3_2::CameraBlob; +using ::android::hardware::camera::device::V3_2::CameraBlobId; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_5::ICameraDeviceSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::helper::ExifUtils; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::camera::external::common::SizeHasher; +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; +using ::android::base::unique_fd; + +using ::android::hardware::camera::device::V3_4::implementation::SupportedV4L2Format; +using ::android::hardware::camera::device::V3_4::implementation::CroppingType; +using ::android::hardware::camera::device::V3_4::implementation::HalStreamBuffer; + +struct ExternalCameraDeviceSession : public V3_4::implementation::ExternalCameraDeviceSession { + + ExternalCameraDeviceSession(const sp&, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd); + virtual ~ExternalCameraDeviceSession(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() override { + return new TrampolineSessionInterface_3_5(this); + } + + static Status isStreamCombinationSupported(const V3_2::StreamConfiguration& config, + const std::vector& supportedFormats, + const ExternalCameraConfig& devCfg) { + return V3_4::implementation::ExternalCameraDeviceSession::isStreamCombinationSupported( + config, supportedFormats, devCfg); + } + + class BufferRequestThread : public android::Thread { + public: + BufferRequestThread( + wp parent, + sp callbacks); + + int requestBufferStart(const std::vector&); + int waitForBufferRequestDone( + /*out*/std::vector*); + + virtual bool threadLoop() override; + + private: + void waitForNextRequest(); + + const wp mParent; + const sp mCallbacks; + + std::mutex mLock; + bool mRequestingBuffer = false; + + std::vector mBufferReqs; + std::vector mPendingReturnBufferReqs; + // mHalBufferReqs is not under mLock protection during the HIDL transaction + hidl_vec mHalBufferReqs; + + // request buffers takes much less time in steady state, but can take much longer + // when requesting 1st buffer from a stream. + // TODO: consider a separate timeout for new vs. steady state? + // TODO: or make sure framework is warming up the pipeline during configure new stream? + static const int kReqProcTimeoutMs = 66; + + static const int kReqWaitTimeoutMs = 33; + static const int kReqWaitTimesWarn = 90; // 33ms * 90 ~= 3 sec + std::condition_variable mRequestCond; // signaled when a new buffer request incoming + std::condition_variable mRequestDoneCond; // signaled when a request is done + }; + + class OutputThread : + public V3_4::implementation::ExternalCameraDeviceSession::OutputThread { + public: + // TODO: pass buffer request thread to OutputThread ctor + OutputThread(wp parent, CroppingType, + const common::V1_0::helper::CameraMetadata&, + sp bufReqThread); + virtual ~OutputThread(); + + protected: + // Methods to request output buffer in parallel + virtual int requestBufferStart(const std::vector&) override; + virtual int waitForBufferRequestDone( + /*out*/std::vector*) override; + + const sp mBufferRequestThread; + }; + +protected: + // Methods from v3.4 and earlier will trampoline to inherited implementation + Return configureStreams_3_5( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_5_cb _hidl_cb); + + Return signalStreamFlush( + const hidl_vec& requests, + uint32_t streamConfigCounter); + + Return isReconfigurationRequired(const V3_2::CameraMetadata& oldSessionParams, + const V3_2::CameraMetadata& newSessionParams, + ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb); + + virtual void initOutputThread() override; + virtual void closeOutputThread() override; + void closeOutputThreadImpl(); + + virtual Status importRequestLocked( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) override; + + sp mBufferRequestThread; + + sp mCallback_3_5; + bool mSupportBufMgr; + +private: + + struct TrampolineSessionInterface_3_5 : public ICameraDeviceSession { + TrampolineSessionInterface_3_5(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + virtual Return configureStreams_3_5( + const StreamConfiguration& requestedConfiguration, + configureStreams_3_5_cb _hidl_cb) override { + return mParent->configureStreams_3_5(requestedConfiguration, _hidl_cb); + } + + virtual Return signalStreamFlush( + const hidl_vec& requests, + uint32_t streamConfigCounter) override { + return mParent->signalStreamFlush(requests, streamConfigCounter); + } + + virtual Return isReconfigurationRequired(const V3_2::CameraMetadata& oldSessionParams, + const V3_2::CameraMetadata& newSessionParams, + ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb) override { + return mParent->isReconfigurationRequired(oldSessionParams, newSessionParams, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICESESSION_H diff --git a/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDevice_3_5.h b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDevice_3_5.h new file mode 100644 index 0000000..b73490c --- /dev/null +++ b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDevice_3_5.h @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraMetadata.h" + +#include +#include +#include +#include +#include "ExternalCameraDeviceSession.h" +#include <../../../../3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h> + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_5 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_5::ICameraDevice; +using ::android::hardware::camera::common::V1_0::CameraResourceCost; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct ExternalCameraDevice : public V3_4::implementation::ExternalCameraDevice { + + // Called by external camera provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + ExternalCameraDevice(const std::string& cameraId, const ExternalCameraConfig& cfg); + virtual ~ExternalCameraDevice(); + + virtual sp getInterface() override { + return new TrampolineDeviceInterface_3_5(this); + } + + Return getPhysicalCameraCharacteristics(const hidl_string& physicalCameraId, + V3_5::ICameraDevice::getPhysicalCameraCharacteristics_cb _hidl_cb); + + Return isStreamCombinationSupported( + const V3_4::StreamConfiguration& streams, + V3_5::ICameraDevice::isStreamCombinationSupported_cb _hidl_cb); + +protected: + virtual sp createSession( + const sp&, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) override; + + virtual status_t initDefaultCharsKeys( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*) override; + + const std::vector EXTRA_CHARACTERISTICS_KEYS_3_5 = { + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION + }; + +private: + struct TrampolineDeviceInterface_3_5 : public ICameraDevice { + TrampolineDeviceInterface_3_5(sp parent) : + mParent(parent) {} + + virtual Return getResourceCost(V3_2::ICameraDevice::getResourceCost_cb _hidl_cb) + override { + return mParent->getResourceCost(_hidl_cb); + } + + virtual Return getCameraCharacteristics( + V3_2::ICameraDevice::getCameraCharacteristics_cb _hidl_cb) override { + return mParent->getCameraCharacteristics(_hidl_cb); + } + + virtual Return setTorchMode(TorchMode mode) override { + return mParent->setTorchMode(mode); + } + + virtual Return open(const sp& callback, + V3_2::ICameraDevice::open_cb _hidl_cb) override { + return mParent->open(callback, _hidl_cb); + } + + virtual Return dumpState(const hidl_handle& fd) override { + return mParent->dumpState(fd); + } + + virtual Return getPhysicalCameraCharacteristics(const hidl_string& physicalCameraId, + V3_5::ICameraDevice::getPhysicalCameraCharacteristics_cb _hidl_cb) override { + return mParent->getPhysicalCameraCharacteristics(physicalCameraId, _hidl_cb); + } + + virtual Return isStreamCombinationSupported( + const V3_4::StreamConfiguration& streams, + V3_5::ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) override { + return mParent->isStreamCombinationSupported(streams, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_5 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICE_H diff --git a/camera/device/3.5/types.hal b/camera/device/3.5/types.hal new file mode 100644 index 0000000..38493b4 --- /dev/null +++ b/camera/device/3.5/types.hal @@ -0,0 +1,177 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.5; + +import @3.2::StreamBuffer; +import @3.4::StreamConfiguration; +import @3.2::CameraBlobId; + +/** + * If the result metadata cannot be produced for a physical camera device part of a logical + * multi-camera, then HAL must invoke the notification callback and pass a message with ERROR_RESULT + * code and errorStreamId that contains the stream id associated with that physical device. Such + * callback must be made before the final processCaptureResult() call for the corresponding request. + * The behavior during absent result metadata remains unchanged for a logical or a non-logical + * camera device and the errorStreamId must be set to -1. + */ + +/** + * StreamConfiguration: + * + * Identical to @3.4::StreamConfiguration, except that it contains streamConfigCounter + */ +struct StreamConfiguration { + @3.4::StreamConfiguration v3_4; + + /** + * An incrementing counter used for HAL to keep track of the stream + * configuration and the paired oneway signalStreamFlush call. When the + * counter in signalStreamFlush call is less than the counter here, that + * signalStreamFlush call is stale. + */ + uint32_t streamConfigCounter; +}; + +enum StreamBufferRequestError : uint32_t { + /** + * Get buffer failed due to timeout waiting for an available buffer. This is + * likely due to the client application holding too many buffers, or the + * system is under memory pressure. + * This is not a fatal error. HAL may try to request buffer for this stream + * later. If HAL cannot get a buffer for certain capture request in time + * due to this error, HAL can send an ERROR_REQUEST to camera service and + * drop processing that request. + */ + NO_BUFFER_AVAILABLE = 1, + + /** + * Get buffer failed due to HAL has reached its maxBuffer count. This is not + * a fatal error. HAL may try to request buffer for this stream again after + * it returns at least one buffer of that stream to camera service. + */ + MAX_BUFFER_EXCEEDED = 2, + + /** + * Get buffer failed due to the stream is disconnected by client + * application, has been removed, or not recognized by camera service. + * This means application is no longer interested in this stream. + * Requesting buffer for this stream must never succeed after this error is + * returned. HAL must safely return all buffers of this stream after + * getting this error. If HAL gets another capture request later targeting + * a disconnected stream, HAL must send an ERROR_REQUEST to camera service + * and drop processing that request. + */ + STREAM_DISCONNECTED = 3, + + /** + * Get buffer failed for unknown reasons. This is a fatal error and HAL must + * send ERROR_DEVICE to camera service and be ready to be closed. + */ + UNKNOWN_ERROR = 4 +}; + +/** + * Per-stream return value for requestStreamBuffers. + * For each stream, either an StreamBufferRequestError error code, or all + * requested buffers for this stream is returned, so buffers.size() must be + * equal to BufferRequest::numBuffersRequested of corresponding stream. + */ +safe_union StreamBuffersVal { + StreamBufferRequestError error; + vec<@3.2::StreamBuffer> buffers; +}; + +struct StreamBufferRet { + int32_t streamId; + StreamBuffersVal val; +}; + +enum BufferRequestStatus : uint32_t { + /** + * Method call succeeded and all requested buffers are returned. + */ + OK = 0, + + /** + * Method call failed for some streams. Check per stream status for each + * returned StreamBufferRet. + */ + FAILED_PARTIAL = 1, + + /** + * Method call failed for all streams and no buffers are returned at all. + * Camera service is about to or is performing configureStreams. HAL must + * wait until next configureStreams call is finished before requesting + * buffers again. + */ + FAILED_CONFIGURING = 2, + + /** + * Method call failed for all streams and no buffers are returned at all. + * Failure due to bad BufferRequest input, eg: unknown streamId or repeated + * streamId. + */ + FAILED_ILLEGAL_ARGUMENTS = 3, + + /** + * Method call failed for all streams and no buffers are returned at all. + * Failure due to unknown reason, or all streams has individual failing + * reason. For the latter case, check per stream status for each returned + * StreamBufferRet. + */ + FAILED_UNKNOWN = 4, +}; + +struct BufferRequest { + int32_t streamId; + uint32_t numBuffersRequested; +}; + +/** + * CameraBlob: + * + * Identical to @3.2::CameraBlob, except that it also supports transport of JPEG + * APP segments blob, which contains JPEG APP1 to APPn (Application Marker) + * segments as specified in JEITA CP-3451. + * + * To capture a JPEG APP segments blob, a stream is created using the pixel format + * HAL_PIXEL_FORMAT_BLOB and dataspace HAL_DATASPACE_JPEG_APP_SEGMENTS. The buffer + * size for the stream is calculated by the framework, based on the static + * metadata field android.heic.maxAppSegmentsCount, and is assigned to both + * @3.2::Stream::width and @3.4::Stream::bufferSize. Camera framework sets + * @3.2::Stream::height to 1. + * + * Similar to JPEG image, the JPEG APP segment images can be of variable size, + * so the HAL needs to include the final size of all APP segments using this + * structure inside the output stream buffer. The camera blob ID field must be + * set to CameraBlobId::JPEG_APP_SEGMENTS. + * + * The transport header must be at the end of the JPEG APP segments output stream + * buffer. That means the blobId must start at byte[buffer_size - + * sizeof(CameraBlob)], where the buffer_size is the size of gralloc + * buffer. The JPEG APP segments data itself starts at the beginning of the + * buffer and must be blobSize bytes long. + */ +enum CameraBlobId : @3.2::CameraBlobId { + JPEG_APP_SEGMENTS = 0x100, +}; + +struct CameraBlob { + CameraBlobId blobId; + uint32_t blobSize; +}; + diff --git a/camera/device/3.6/ICameraDevice.hal b/camera/device/3.6/ICameraDevice.hal new file mode 100644 index 0000000..e859606 --- /dev/null +++ b/camera/device/3.6/ICameraDevice.hal @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.6; + +import @3.5::ICameraDevice; + +/** + * Camera device interface + * + * Supports the android.hardware.Camera API, and the android.hardware.camera2 + * API at LIMITED or better hardware level. + * + * ICameraDevice.open() must return @3.2::ICameraDeviceSession or + * @3.5::ICameraDeviceSession or @3.6::ICameraDeviceSession. + */ +interface ICameraDevice extends @3.5::ICameraDevice { +}; diff --git a/camera/device/3.6/ICameraDeviceSession.hal b/camera/device/3.6/ICameraDeviceSession.hal new file mode 100644 index 0000000..00ebcc3 --- /dev/null +++ b/camera/device/3.6/ICameraDeviceSession.hal @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.6; + +import android.hardware.camera.common@1.0::Status; +import @3.5::ICameraDeviceSession; +import @3.5::StreamConfiguration; +import ICameraOfflineSession; + +/** + * Camera device active session interface. + * + * Obtained via ICameraDevice::open(), this interface contains the methods to + * configure and request captures from an active camera device. + */ +interface ICameraDeviceSession extends @3.5::ICameraDeviceSession { + /** + * configureStreams_3_6: + * + * Identical to @3.5::ICameraDeviceSession.configureStreams, except that: + * + * - a boolean supportOffline is added to HalStreamConfiguration to indicate + * if this stream can be switched to offline mode later. + * + * @return status Status code for the operation, one of: + * OK: + * On successful stream configuration. + * INTERNAL_ERROR: + * If there has been a fatal error and the device is no longer + * operational. Only close() can be called successfully by the + * framework after this error is returned. + * ILLEGAL_ARGUMENT: + * If the requested stream configuration is invalid. Some examples + * of invalid stream configurations include: + * - Including more than 1 INPUT stream + * - Not including any OUTPUT streams + * - Including streams with unsupported formats, or an unsupported + * size for that format. + * - Including too many output streams of a certain format. + * - Unsupported rotation configuration + * - Stream sizes/formats don't satisfy the + * StreamConfigurationMode requirements + * for non-NORMAL mode, or the requested operation_mode is not + * supported by the HAL. + * - Unsupported usage flag + * The camera service cannot filter out all possible illegal stream + * configurations, since some devices may support more simultaneous + * streams or larger stream resolutions than the minimum required + * for a given camera device hardware level. The HAL must return an + * ILLEGAL_ARGUMENT for any unsupported stream set, and then be + * ready to accept a future valid stream configuration in a later + * configureStreams call. + * @return halConfiguration The stream parameters desired by the HAL for + * each stream, including maximum buffers, the usage flags, and the + * override format. + */ + configureStreams_3_6(@3.5::StreamConfiguration requestedConfiguration) + generates (Status status, HalStreamConfiguration halConfiguration); + + /** + * switchToOffline: + * + * Switch the current running session from actively streaming mode to the + * offline mode. See ICameraOfflineSession for more details. + * + * The streamsToKeep argument contains list of streams IDs where application + * still needs its output. For all streams application does not need anymore, + * camera HAL can send ERROR_BUFFER to speed up the transition, or even send + * ERROR_REQUEST if all output targets of a request is not needed. By the + * time this call returns, camera HAL must have returned all buffers coming + * from streams no longer needed and have erased buffer caches of such streams. + * + * For all requests that are going to be transferred to offline session, + * the ICameraDeviceSession is responsible to capture all input buffers from + * the image sensor before the switchToOffline call returns. Before + * switchToOffline returns, camera HAL must have completed all requests not + * switching to offline mode, and collected information on what streams and + * requests are going to continue in the offline session, in the + * offlineSessionInfo output argument. + * + * If there are no requests qualified to be transferred to offline session, + * the camera HAL must return a null ICameraOfflineSession object with OK + * status. In this scenario, the camera HAL still must flush all inflight + * requests and unconfigure all streams before returning this call. + * + * After switchToOffline returns, the ICameraDeviceSession must be back to + * unconfigured state as if it is just created and no streams are configured. + * Also, camera HAL must not call any methods in ICameraDeviceCallback since + * all unfinished requests are now transferred to the offline session. + * After the call returns, camera service may then call close to close + * the camera device, or call configureStream* again to reconfigure the + * camera and then send new capture requests with processCaptureRequest. In + * the latter case, it is legitimate for camera HAL to call methods in + * ICameraDeviceCallback again in response to the newly submitted capture + * requests. + * + * @return status Status code for the operation, one of: + * OK: + * On switching to offline session and unconfiguring streams + * successfully. + * ILLEGAL_ARGUMENT: + * If camera does not support offline mode in any one of streams + * in streamsToKeep argument. Note that the camera HAL must report + * if a stream supports offline mode in HalStreamConfiguration + * output of configureStreams_3_6 method. If all streams in + * streamsToKeep argument support offline mode, then the camera HAL + * must not return this error. + * + * + * @return offlineSessionInfo Information on what streams and requests will + * be transferred to offline session to continue processing. + * + * @return offlineSession The offline session object camera service will use + * to interact with. + */ + switchToOffline(vec streamsToKeep) generates (Status status, + CameraOfflineSessionInfo offlineSessionInfo, ICameraOfflineSession offlineSession); +}; diff --git a/camera/device/3.6/ICameraOfflineSession.hal b/camera/device/3.6/ICameraOfflineSession.hal new file mode 100644 index 0000000..03cea64 --- /dev/null +++ b/camera/device/3.6/ICameraOfflineSession.hal @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.6; + +import @3.5::ICameraDeviceCallback; + +/** + * Camera device offline session interface. + * + * Obtained via ICameraDeviceSession::switchToOffline(), this interface contains + * the methods and callback interfaces that define how camera service interacts + * with an offline session. + * + * An offline session contains some unfinished capture requests that were submitted + * to the parent ICameraDeviceSession before calling switchToOffline, and is + * responsible for delivering these capture results back to camera service regardless + * of whether the parent camera device is still opened or not. An offline session must + * not have access to the camera device's image sensor. During switchToOffline + * call, camera HAL must capture all necessary frames from the image sensor that + * is needed for completing the requests offline later. + */ +interface ICameraOfflineSession { + /** + * Set the callbacks for offline session to communicate with camera service. + * + * Offline session is responsible to store all callbacks the camera HAL + * generated after the return of ICameraDeviceSession::switchToOffline, and + * send them to camera service once this method is called. + * + * Camera service must not call this method more than once, so these + * callbacks can be assumed to be constant after the first setCallback call. + */ + setCallback(ICameraDeviceCallback cb); + + /** + * getCaptureResultMetadataQueue: + * + * Retrieves the queue used along with + * ICameraDeviceCallback#processCaptureResult. + * + * Clients to ICameraOfflineSession must: + * - Call getCaptureRequestMetadataQueue to retrieve the fast message queue; + * - In implementation of ICameraDeviceCallback, test whether + * .fmqResultSize field is zero. + * - If .fmqResultSize != 0, read result metadata from the fast message + * queue; + * - otherwise, read result metadata in CaptureResult.result. + * + * @return queue the queue that implementation writes result metadata to. + */ + getCaptureResultMetadataQueue() generates (fmq_sync queue); + + /** + * Close the offline session and release all resources. + * + * Camera service may call this method before or after the offline session + * has finished all requests it needs to handle. If there are still unfinished + * requests when close is called, camera HAL must send ERROR_REQUEST for + * all unfinished requests and return all buffers via + * ICameraDeviceCallback#processCaptureResult or + * ICameraDeviceCallback#returnStreamBuffers. + * Also, all buffer caches maintained by the offline session must be erased + * before the close call returns. + */ + close(); +}; diff --git a/camera/device/3.6/default/Android.bp b/camera/device/3.6/default/Android.bp new file mode 100644 index 0000000..d5eb273 --- /dev/null +++ b/camera/device/3.6/default/Android.bp @@ -0,0 +1,68 @@ +// +// Copyright (C) 2020 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_library_headers { + name: "vendor.camera.device@3.6-external-impl_headers", + vendor: true, + export_include_dirs: ["include/ext_device_v3_6_impl"], +} + +cc_library_shared { + name: "vendor.camera.device@3.6-external-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "ExternalCameraDevice.cpp", + "ExternalCameraDeviceSession.cpp", + "ExternalCameraOfflineSession.cpp", + ], + shared_libs: [ + "libhidlbase", + "libutils", + "libcutils", + "vendor.camera.device@3.2-impl", + "vendor.camera.device@3.3-impl", + "vendor.camera.device@3.4-external-impl", + "vendor.camera.device@3.5-external-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.device@3.6", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "liblog", + "libgralloctypes", + "libhardware", + "libcamera_metadata", + "libfmq", + "libsync", + "libyuv", + "libjpeg", + "libexif", + "libtinyxml2", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/ext_device_v3_6_impl"], + export_shared_lib_headers: [ + "libfmq", + ], +} diff --git a/camera/device/3.6/default/ExternalCameraDevice.cpp b/camera/device/3.6/default/ExternalCameraDevice.cpp new file mode 100644 index 0000000..244c7dd --- /dev/null +++ b/camera/device/3.6/default/ExternalCameraDevice.cpp @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamDev@3.6" +//#define LOG_NDEBUG 0 +#include + +#include "ExternalCameraDevice_3_6.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +ExternalCameraDevice::ExternalCameraDevice( + const std::string& cameraId, const ExternalCameraConfig& cfg) : + V3_5::implementation::ExternalCameraDevice(cameraId, cfg) {} + +ExternalCameraDevice::~ExternalCameraDevice() {} + +sp ExternalCameraDevice::createSession( + const sp& cb, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) { + return new ExternalCameraDeviceSession( + cb, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd)); +} + +#define UPDATE(tag, data, size) \ +do { \ + if (metadata->update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return -EINVAL; \ + } \ +} while (0) + +status_t ExternalCameraDevice::initAvailableCapabilities( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + status_t res = + V3_4::implementation::ExternalCameraDevice::initAvailableCapabilities(metadata); + + if (res != OK) { + return res; + } + + camera_metadata_entry caps = metadata->find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES); + std::vector availableCapabilities; + + for (size_t i = 0; i < caps.count; i++) { + uint8_t capability = caps.data.u8[i]; + availableCapabilities.push_back(capability); + } + + // Add OFFLINE_PROCESSING capability to device 3.6 + availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING); + + UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + availableCapabilities.data(), + availableCapabilities.size()); + + return OK; +} + +#undef UPDATE + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + diff --git a/camera/device/3.6/default/ExternalCameraDeviceSession.cpp b/camera/device/3.6/default/ExternalCameraDeviceSession.cpp new file mode 100644 index 0000000..8fd8e58 --- /dev/null +++ b/camera/device/3.6/default/ExternalCameraDeviceSession.cpp @@ -0,0 +1,360 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamDevSsn@3.6" +#include + +#include +#include "ExternalCameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +ExternalCameraDeviceSession::ExternalCameraDeviceSession( + const sp& callback, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) : + V3_5::implementation::ExternalCameraDeviceSession( + callback, cfg, sortedFormats, croppingType, chars, cameraId, std::move(v4l2Fd)) { +} + +ExternalCameraDeviceSession::~ExternalCameraDeviceSession() {} + + +Return ExternalCameraDeviceSession::configureStreams_3_6( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_6_cb _hidl_cb) { + V3_2::StreamConfiguration config_v32; + V3_3::HalStreamConfiguration outStreams_v33; + V3_6::HalStreamConfiguration outStreams; + const V3_4::StreamConfiguration& requestedConfiguration_3_4 = requestedConfiguration.v3_4; + Mutex::Autolock _il(mInterfaceLock); + + config_v32.operationMode = requestedConfiguration_3_4.operationMode; + config_v32.streams.resize(requestedConfiguration_3_4.streams.size()); + uint32_t blobBufferSize = 0; + int numStallStream = 0; + for (size_t i = 0; i < config_v32.streams.size(); i++) { + config_v32.streams[i] = requestedConfiguration_3_4.streams[i].v3_2; + if (config_v32.streams[i].format == PixelFormat::BLOB) { + blobBufferSize = requestedConfiguration_3_4.streams[i].bufferSize; + numStallStream++; + } + } + + // Fail early if there are multiple BLOB streams + if (numStallStream > kMaxStallStream) { + ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, + kMaxStallStream, numStallStream); + _hidl_cb(Status::ILLEGAL_ARGUMENT, outStreams); + return Void(); + } + + Status status = configureStreams(config_v32, &outStreams_v33, blobBufferSize); + + fillOutputStream3_6(outStreams_v33, &outStreams); + + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::switchToOffline( + const hidl_vec& streamsToKeep, + ICameraDeviceSession::switchToOffline_cb _hidl_cb) { + std::vector msgs; + std::vector results; + CameraOfflineSessionInfo info; + sp session; + + Status st = switchToOffline(streamsToKeep, &msgs, &results, &info, &session); + + mCallback->notify(msgs); + hidl_vec hidlResults(std::move(results)); + invokeProcessCaptureResultCallback(hidlResults, /* tryWriteFmq */true); + V3_4::implementation::freeReleaseFences(hidlResults); + + _hidl_cb(st, info, session); + return Void(); +} + +void ExternalCameraDeviceSession::fillOutputStream3_6( + const V3_3::HalStreamConfiguration& outStreams_v33, + /*out*/V3_6::HalStreamConfiguration* outStreams_v36) { + if (outStreams_v36 == nullptr) { + ALOGE("%s: outStreams_v36 must not be null!", __FUNCTION__); + return; + } + Mutex::Autolock _l(mLock); + outStreams_v36->streams.resize(outStreams_v33.streams.size()); + for (size_t i = 0; i < outStreams_v36->streams.size(); i++) { + outStreams_v36->streams[i].v3_4.v3_3 = outStreams_v33.streams[i]; + outStreams_v36->streams[i].supportOffline = + supportOfflineLocked(outStreams_v33.streams[i].v3_2.id); + } +} + +bool ExternalCameraDeviceSession::supportOfflineLocked(int32_t streamId) { + const Stream& stream = mStreamMap[streamId]; + if (stream.format == PixelFormat::BLOB && + stream.dataSpace == static_cast(Dataspace::V0_JFIF)) { + return true; + } + // TODO: support YUV output stream? + return false; +} + +bool ExternalCameraDeviceSession::canDropRequest(const hidl_vec& offlineStreams, + std::shared_ptr halReq) { + for (const auto& buffer : halReq->buffers) { + for (auto offlineStreamId : offlineStreams) { + if (buffer.streamId == offlineStreamId) { + return false; + } + } + } + // Only drop a request completely if it has no offline output + return true; +} + +void ExternalCameraDeviceSession::fillOfflineSessionInfo(const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers, + /*out*/CameraOfflineSessionInfo* info) { + if (info == nullptr) { + ALOGE("%s: output info must not be null!", __FUNCTION__); + return; + } + + info->offlineStreams.resize(offlineStreams.size()); + info->offlineRequests.resize(offlineReqs.size()); + + // Fill in offline reqs and count outstanding buffers + for (size_t i = 0; i < offlineReqs.size(); i++) { + info->offlineRequests[i].frameNumber = offlineReqs[i]->frameNumber; + info->offlineRequests[i].pendingStreams.resize(offlineReqs[i]->buffers.size()); + for (size_t bIdx = 0; bIdx < offlineReqs[i]->buffers.size(); bIdx++) { + int32_t streamId = offlineReqs[i]->buffers[bIdx].streamId; + info->offlineRequests[i].pendingStreams[bIdx] = streamId; + } + } + + for (size_t i = 0; i < offlineStreams.size(); i++) { + int32_t streamId = offlineStreams[i]; + info->offlineStreams[i].id = streamId; + // outstanding buffers are 0 since we are doing hal buffer management and + // offline session will ask for those buffers later + info->offlineStreams[i].numOutstandingBuffers = 0; + const CirculatingBuffers& bufIdMap = circulatingBuffers.at(streamId); + info->offlineStreams[i].circulatingBufferIds.resize(bufIdMap.size()); + size_t bIdx = 0; + for (const auto& pair : bufIdMap) { + // Fill in bufferId + info->offlineStreams[i].circulatingBufferIds[bIdx++] = pair.first; + } + + } +} + +Status ExternalCameraDeviceSession::switchToOffline(const hidl_vec& offlineStreams, + /*out*/std::vector* msgs, + /*out*/std::vector* results, + /*out*/CameraOfflineSessionInfo* info, + /*out*/sp* session) { + ATRACE_CALL(); + if (offlineStreams.size() > 1) { + ALOGE("%s: more than one offline stream is not supported", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (msgs == nullptr || results == nullptr || info == nullptr || session == nullptr) { + ALOGE("%s: output arguments (%p, %p, %p, %p) must not be null", __FUNCTION__, + msgs, results, info, session); + return Status::ILLEGAL_ARGUMENT; + } + + msgs->clear(); + results->clear(); + + Mutex::Autolock _il(mInterfaceLock); + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + Mutex::Autolock _l(mLock); + for (auto streamId : offlineStreams) { + if (!supportOfflineLocked(streamId)) { + return Status::ILLEGAL_ARGUMENT; + } + } + + // pause output thread and get all remaining inflight requests + auto remainingReqs = mOutputThread->switchToOffline(); + std::vector> halReqs; + + // Send out buffer/request error for remaining requests and filter requests + // to be handled in offline mode + for (auto& halReq : remainingReqs) { + bool dropReq = canDropRequest(offlineStreams, halReq); + if (dropReq) { + // Request is dropped completely. Just send request error and + // there is no need to send the request to offline session + processCaptureRequestError(halReq, msgs, results); + continue; + } + + // All requests reach here must have at least one offline stream output + NotifyMsg shutter; + shutter.type = MsgType::SHUTTER; + shutter.msg.shutter.frameNumber = halReq->frameNumber; + shutter.msg.shutter.timestamp = halReq->shutterTs; + msgs->push_back(shutter); + + std::vector offlineBuffers; + for (const auto& buffer : halReq->buffers) { + bool dropBuffer = true; + for (auto offlineStreamId : offlineStreams) { + if (buffer.streamId == offlineStreamId) { + dropBuffer = false; + break; + } + } + if (dropBuffer) { + NotifyMsg error; + error.type = MsgType::ERROR; + error.msg.error.frameNumber = halReq->frameNumber; + error.msg.error.errorStreamId = buffer.streamId; + error.msg.error.errorCode = ErrorCode::ERROR_BUFFER; + msgs->push_back(error); + + CaptureResult result; + result.frameNumber = halReq->frameNumber; + result.partialResult = 0; // buffer only result + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(1); + result.outputBuffers[0].streamId = buffer.streamId; + result.outputBuffers[0].bufferId = buffer.bufferId; + result.outputBuffers[0].status = BufferStatus::ERROR; + if (buffer.acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = buffer.acquireFence; + result.outputBuffers[0].releaseFence.setTo(handle, /*shouldOwn*/false); + } + results->push_back(result); + } else { + offlineBuffers.push_back(buffer); + } + } + halReq->buffers = offlineBuffers; + halReqs.push_back(halReq); + } + + // convert hal requests to offline request + std::deque> offlineReqs(halReqs.size()); + size_t i = 0; + for (auto& v4lReq : halReqs) { + offlineReqs[i] = std::make_shared(); + offlineReqs[i]->frameNumber = v4lReq->frameNumber; + offlineReqs[i]->setting = v4lReq->setting; + offlineReqs[i]->shutterTs = v4lReq->shutterTs; + offlineReqs[i]->buffers = v4lReq->buffers; + sp v4l2Frame = + static_cast(v4lReq->frameIn.get()); + offlineReqs[i]->frameIn = new AllocatedV4L2Frame(v4l2Frame); + i++; + // enqueue V4L2 frame + enqueueV4l2Frame(v4l2Frame); + } + + // Collect buffer caches/streams + hidl_vec streamInfos; + streamInfos.resize(offlineStreams.size()); + std::map circulatingBuffers; + { + Mutex::Autolock _l(mCbsLock); + size_t idx = 0; + for(auto streamId : offlineStreams) { + circulatingBuffers[streamId] = mCirculatingBuffers.at(streamId); + mCirculatingBuffers.erase(streamId); + streamInfos[idx++] = mStreamMap.at(streamId); + mStreamMap.erase(streamId); + } + } + + fillOfflineSessionInfo(offlineStreams, offlineReqs, circulatingBuffers, info); + + // create the offline session object + bool afTrigger; + { + std::lock_guard lk(mAfTriggerLock); + afTrigger = mAfTrigger; + } + sp sessionImpl = new ExternalCameraOfflineSession( + mCroppingType, mCameraCharacteristics, mCameraId, + mExifMake, mExifModel, mBlobBufferSize, afTrigger, + streamInfos, offlineReqs, circulatingBuffers); + + bool initFailed = sessionImpl->initialize(); + if (initFailed) { + ALOGE("%s: offline session initialize failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + // cleanup stream and buffer caches + { + Mutex::Autolock _l(mCbsLock); + for(auto pair : mStreamMap) { + cleanupBuffersLocked(/*Stream ID*/pair.first); + } + mCirculatingBuffers.clear(); + } + mStreamMap.clear(); + + // update inflight records + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.clear(); + } + + // stop v4l2 streaming + if (v4l2StreamOffLocked() !=0) { + ALOGE("%s: stop V4L2 streaming failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + // No need to return session if there is no offline requests left + if (offlineReqs.size() != 0) { + *session = sessionImpl->getInterface(); + } else { + *session = nullptr; + } + return Status::OK; +} + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.6/default/ExternalCameraOfflineSession.cpp b/camera/device/3.6/default/ExternalCameraOfflineSession.cpp new file mode 100644 index 0000000..e606fda --- /dev/null +++ b/camera/device/3.6/default/ExternalCameraOfflineSession.cpp @@ -0,0 +1,554 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamOfflnSsn@3.6" +#define ATRACE_TAG ATRACE_TAG_CAMERA +#include + +#include +#include + +#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs +#include + +#include +#include "ExternalCameraOfflineSession.h" + +namespace { + +// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. +static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; + +} // anonymous namespace + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +// static instance +HandleImporter ExternalCameraOfflineSession::sHandleImporter; + +using V3_5::implementation::ExternalCameraDeviceSession; + +ExternalCameraOfflineSession::ExternalCameraOfflineSession( + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + const std::string& exifMake, + const std::string& exifModel, + const uint32_t blobBufferSize, + const bool afTrigger, + const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers) : + mCroppingType(croppingType), mChars(chars), mCameraId(cameraId), + mExifMake(exifMake), mExifModel(exifModel), mBlobBufferSize(blobBufferSize), + mAfTrigger(afTrigger), mOfflineStreams(offlineStreams), mOfflineReqs(offlineReqs), + mCirculatingBuffers(circulatingBuffers) {} + +ExternalCameraOfflineSession::~ExternalCameraOfflineSession() { + close(); +} + +bool ExternalCameraOfflineSession::initialize() { + mResultMetadataQueue = std::make_shared( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mResultMetadataQueue->isValid()) { + ALOGE("%s: invalid result fmq", __FUNCTION__); + return true; + } + return false; +} + +void ExternalCameraOfflineSession::initOutputThread() { + if (mOutputThread != nullptr) { + ALOGE("%s: OutputThread already exist!", __FUNCTION__); + return; + } + + mBufferRequestThread = new ExternalCameraDeviceSession::BufferRequestThread( + this, mCallback); + mBufferRequestThread->run("ExtCamBufReq", PRIORITY_DISPLAY); + + mOutputThread = new OutputThread(this, mCroppingType, mChars, + mBufferRequestThread, mOfflineReqs); + + mOutputThread->setExifMakeModel(mExifMake, mExifModel); + + Size inputSize = { mOfflineReqs[0]->frameIn->mWidth, mOfflineReqs[0]->frameIn->mHeight}; + Size maxThumbSize = V3_4::implementation::getMaxThumbnailResolution(mChars); + mOutputThread->allocateIntermediateBuffers( + inputSize, maxThumbSize, mOfflineStreams, mBlobBufferSize); + + mOutputThread->run("ExtCamOfflnOut", PRIORITY_DISPLAY); +} + +bool ExternalCameraOfflineSession::OutputThread::threadLoop() { + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return false; + } + + if (mOfflineReqs.empty()) { + ALOGI("%s: all offline requests are processed. Stopping.", __FUNCTION__); + return false; + } + + std::shared_ptr req = mOfflineReqs.front(); + mOfflineReqs.pop_front(); + + auto onDeviceError = [&](auto... args) { + ALOGE(args...); + parent->notifyError( + req->frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + signalRequestDone(); + return false; + }; + + if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) { + return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, + req->frameIn->mFourcc & 0xFF, + (req->frameIn->mFourcc >> 8) & 0xFF, + (req->frameIn->mFourcc >> 16) & 0xFF, + (req->frameIn->mFourcc >> 24) & 0xFF); + } + + int res = requestBufferStart(req->buffers); + if (res != 0) { + ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res); + return onDeviceError("%s: failed to send buffer request!", __FUNCTION__); + } + + std::unique_lock lk(mBufferLock); + // Convert input V4L2 frame to YU12 of the same size + // TODO: see if we can save some computation by converting to YV12 here + uint8_t* inData; + size_t inDataSize; + if (req->frameIn->getData(&inData, &inDataSize) != 0) { + lk.unlock(); + return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__); + } + + // TODO: in some special case maybe we can decode jpg directly to gralloc output? + if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) { + ATRACE_BEGIN("MJPGtoI420"); + int res = libyuv::MJPGToI420( + inData, inDataSize, static_cast(mYu12FrameLayout.y), mYu12FrameLayout.yStride, + static_cast(mYu12FrameLayout.cb), mYu12FrameLayout.cStride, + static_cast(mYu12FrameLayout.cr), mYu12FrameLayout.cStride, + mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, mYu12Frame->mHeight); + ATRACE_END(); + + if (res != 0) { + // For some webcam, the first few V4L2 frames might be malformed... + ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); + lk.unlock(); + Status st = parent->processCaptureRequestError(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture request error!", __FUNCTION__); + } + signalRequestDone(); + return true; + } + } + + ATRACE_BEGIN("Wait for BufferRequest done"); + res = waitForBufferRequestDone(&req->buffers); + ATRACE_END(); + + if (res != 0) { + ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res); + lk.unlock(); + return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__); + } + + ALOGV("%s processing new request", __FUNCTION__); + const int kSyncWaitTimeoutMs = 500; + for (auto& halBuf : req->buffers) { + if (*(halBuf.bufPtr) == nullptr) { + ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId); + halBuf.fenceTimeout = true; + } else if (halBuf.acquireFence >= 0) { + int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); + if (ret) { + halBuf.fenceTimeout = true; + } else { + ::close(halBuf.acquireFence); + halBuf.acquireFence = -1; + } + } + + if (halBuf.fenceTimeout) { + continue; + } + + // Gralloc lockYCbCr the buffer + switch (halBuf.format) { + case PixelFormat::BLOB: { + int ret = createJpegLocked(halBuf, req->setting); + + if(ret != 0) { + lk.unlock(); + return onDeviceError("%s: createJpegLocked failed with %d", + __FUNCTION__, ret); + } + } break; + case PixelFormat::Y16: { + void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize); + + std::memcpy(outLayout, inData, inDataSize); + + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence >= 0) { + halBuf.acquireFence = relFence; + } + } break; + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: { + IMapper::Rect outRect {0, 0, + static_cast(halBuf.width), + static_cast(halBuf.height)}; + YCbCrLayout outLayout = sHandleImporter.lockYCbCr( + *(halBuf.bufPtr), halBuf.usage, outRect); + ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, + outLayout.yStride, outLayout.cStride, outLayout.chromaStep); + + // Convert to output buffer size/format + uint32_t outputFourcc = V3_4::implementation::getFourCcFromLayout(outLayout); + ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, + outputFourcc & 0xFF, + (outputFourcc >> 8) & 0xFF, + (outputFourcc >> 16) & 0xFF, + (outputFourcc >> 24) & 0xFF); + + YCbCrLayout cropAndScaled; + ATRACE_BEGIN("cropAndScaleLocked"); + int ret = cropAndScaleLocked( + mYu12Frame, + Size { halBuf.width, halBuf.height }, + &cropAndScaled); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: crop and scale failed!", __FUNCTION__); + } + + Size sz {halBuf.width, halBuf.height}; + ATRACE_BEGIN("formatConvert"); + ret = V3_4::implementation::formatConvert(cropAndScaled, outLayout, sz, outputFourcc); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: format coversion failed!", __FUNCTION__); + } + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence >= 0) { + halBuf.acquireFence = relFence; + } + } break; + default: + lk.unlock(); + return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format); + } + } // for each buffer + mScaledYu12Frames.clear(); + + // Don't hold the lock while calling back to parent + lk.unlock(); + Status st = parent->processCaptureResult(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture result!", __FUNCTION__); + } + signalRequestDone(); + return true; +} + +Status ExternalCameraOfflineSession::importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) { + Mutex::Autolock _l(mCbsLock); + return V3_4::implementation::importBufferImpl( + mCirculatingBuffers, sHandleImporter, streamId, + bufId, buf, outBufPtr, allowEmptyBuf); + return Status::OK; +}; + +#define UPDATE(md, tag, data, size) \ +do { \ + if ((md).update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return BAD_VALUE; \ + } \ +} while (0) + +status_t ExternalCameraOfflineSession::fillCaptureResult( + common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { + bool afTrigger = false; + { + std::lock_guard lk(mAfTriggerLock); + afTrigger = mAfTrigger; + if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { + camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); + if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { + mAfTrigger = afTrigger = true; + } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { + mAfTrigger = afTrigger = false; + } + } + } + + // For USB camera, the USB camera handles everything and we don't have control + // over AF. We only simply fake the AF metadata based on the request + // received here. + uint8_t afState; + if (afTrigger) { + afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; + } else { + afState = ANDROID_CONTROL_AF_STATE_INACTIVE; + } + UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); + + camera_metadata_ro_entry activeArraySize = + mChars.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); + + return V3_4::implementation::fillCaptureResultCommon(md, timestamp, activeArraySize); +} + +#undef UPDATE + +Status ExternalCameraOfflineSession::processCaptureResult(std::shared_ptr& req) { + ATRACE_CALL(); + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + if (req->buffers[i].fenceTimeout) { + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER); + } else { + result.outputBuffers[i].status = BufferStatus::OK; + // TODO: refactor + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + } + + // Fill capture result metadata + fillCaptureResult(req->setting, req->shutterTs); + const camera_metadata_t *rawResult = req->setting.getAndLock(); + V3_2::implementation::convertToHidl(rawResult, &result.result); + req->setting.unlock(rawResult); + + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + V3_4::implementation::freeReleaseFences(results); + return Status::OK; +}; + +void ExternalCameraOfflineSession::invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + const nsecs_t NS_TO_SECOND = 1000000000; + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.result.size() > 0) { + if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { + result.fmqResultSize = result.result.size(); + result.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + result.fmqResultSize = 0; + } + } else { + result.fmqResultSize = 0; + } + } + } + auto status = mCallback->processCaptureResult(results); + if (!status.isOk()) { + ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__, + status.description().c_str()); + } + + mProcessCaptureResultLock.unlock(); +} + +Status ExternalCameraOfflineSession::processCaptureRequestError( + const std::shared_ptr& req, + /*out*/std::vector* outMsgs, + /*out*/std::vector* outResults) { + ATRACE_CALL(); + + if (outMsgs == nullptr) { + notifyError(/*frameNum*/req->frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); + } else { + NotifyMsg shutter; + shutter.type = MsgType::SHUTTER; + shutter.msg.shutter.frameNumber = req->frameNumber; + shutter.msg.shutter.timestamp = req->shutterTs; + + NotifyMsg error; + error.type = MsgType::ERROR; + error.msg.error.frameNumber = req->frameNumber; + error.msg.error.errorStreamId = -1; + error.msg.error.errorCode = ErrorCode::ERROR_REQUEST; + outMsgs->push_back(shutter); + outMsgs->push_back(error); + } + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + + if (outResults == nullptr) { + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + V3_4::implementation::freeReleaseFences(results); + } else { + outResults->push_back(result); + } + return Status::OK; +}; + +ssize_t ExternalCameraOfflineSession::getJpegBufferSize( + uint32_t /*width*/, uint32_t /*height*/) const { + // Empty implementation here as the jpeg buffer size is passed in by ctor + return 0; +}; + +void ExternalCameraOfflineSession::notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) { + NotifyMsg msg; + msg.type = MsgType::ERROR; + msg.msg.error.frameNumber = frameNumber; + msg.msg.error.errorStreamId = streamId; + msg.msg.error.errorCode = ec; + mCallback->notify({msg}); +}; + +Return ExternalCameraOfflineSession::setCallback(const sp& cb) { + Mutex::Autolock _il(mInterfaceLock); + if (mCallback != nullptr && cb != nullptr) { + ALOGE("%s: callback must not be set twice!", __FUNCTION__); + return Void(); + } + mCallback = cb; + + initOutputThread(); + + if (mOutputThread == nullptr) { + ALOGE("%s: init OutputThread failed!", __FUNCTION__); + } + return Void(); +} + +Return ExternalCameraOfflineSession::getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mResultMetadataQueue->getDesc()); + return Void(); +} + +void ExternalCameraOfflineSession::cleanupBuffersLocked(int id) { + for (auto& pair : mCirculatingBuffers.at(id)) { + sHandleImporter.freeBuffer(pair.second); + } + mCirculatingBuffers[id].clear(); + mCirculatingBuffers.erase(id); +} + +Return ExternalCameraOfflineSession::close() { + Mutex::Autolock _il(mInterfaceLock); + { + Mutex::Autolock _l(mLock); + if (mClosed) { + ALOGW("%s: offline session already closed!", __FUNCTION__); + return Void(); + } + } + if (mBufferRequestThread) { + mBufferRequestThread->requestExit(); + mBufferRequestThread->join(); + mBufferRequestThread.clear(); + } + if (mOutputThread) { + mOutputThread->flush(); + mOutputThread->requestExit(); + mOutputThread->join(); + mOutputThread.clear(); + } + + Mutex::Autolock _l(mLock); + // free all buffers + { + Mutex::Autolock _cbl(mCbsLock); + for(auto stream : mOfflineStreams) { + cleanupBuffersLocked(stream.id); + } + } + mCallback.clear(); + mClosed = true; + return Void(); +} + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.6/default/OWNERS b/camera/device/3.6/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/device/3.6/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h new file mode 100644 index 0000000..db0d9a5 --- /dev/null +++ b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h @@ -0,0 +1,208 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICESESSION_H + +#include +#include +#include <../../3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h> +#include "ExternalCameraOfflineSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_5::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::Stream; +using ::android::hardware::camera::device::V3_5::StreamConfiguration; +using ::android::hardware::camera::device::V3_6::ICameraDeviceSession; +using ::android::hardware::camera::device::V3_6::ICameraOfflineSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; +using ::android::base::unique_fd; + +using ::android::hardware::camera::device::V3_4::implementation::SupportedV4L2Format; +using ::android::hardware::camera::device::V3_4::implementation::CroppingType; + +struct ExternalCameraDeviceSession : public V3_5::implementation::ExternalCameraDeviceSession { + + ExternalCameraDeviceSession(const sp&, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd); + virtual ~ExternalCameraDeviceSession(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() override { + return new TrampolineSessionInterface_3_6(this); + } + +protected: + // Methods from v3.5 and earlier will trampoline to inherited implementation + Return configureStreams_3_6( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_6_cb _hidl_cb); + + Return switchToOffline( + const hidl_vec& streamsToKeep, + ICameraDeviceSession::switchToOffline_cb _hidl_cb); + + void fillOutputStream3_6(const V3_3::HalStreamConfiguration& outStreams_v33, + /*out*/V3_6::HalStreamConfiguration* outStreams_v36); + bool supportOfflineLocked(int32_t streamId); + + // Main body of switchToOffline. This method does not invoke any callbacks + // but instead returns the necessary callbacks in output arguments so callers + // can callback later without holding any locks + Status switchToOffline(const hidl_vec& offlineStreams, + /*out*/std::vector* msgs, + /*out*/std::vector* results, + /*out*/CameraOfflineSessionInfo* info, + /*out*/sp* session); + + // Whether a request can be completely dropped when switching to offline + bool canDropRequest(const hidl_vec& offlineStreams, + std::shared_ptr halReq); + + void fillOfflineSessionInfo(const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers, + /*out*/CameraOfflineSessionInfo* info); + +private: + + struct TrampolineSessionInterface_3_6 : public ICameraDeviceSession { + TrampolineSessionInterface_3_6(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + virtual Return configureStreams_3_5( + const StreamConfiguration& requestedConfiguration, + configureStreams_3_5_cb _hidl_cb) override { + return mParent->configureStreams_3_5(requestedConfiguration, _hidl_cb); + } + + virtual Return signalStreamFlush( + const hidl_vec& requests, + uint32_t streamConfigCounter) override { + return mParent->signalStreamFlush(requests, streamConfigCounter); + } + + virtual Return isReconfigurationRequired(const V3_2::CameraMetadata& oldSessionParams, + const V3_2::CameraMetadata& newSessionParams, + ICameraDeviceSession::isReconfigurationRequired_cb _hidl_cb) override { + return mParent->isReconfigurationRequired(oldSessionParams, newSessionParams, _hidl_cb); + } + + virtual Return configureStreams_3_6( + const StreamConfiguration& requestedConfiguration, + configureStreams_3_6_cb _hidl_cb) override { + return mParent->configureStreams_3_6(requestedConfiguration, _hidl_cb); + } + + virtual Return switchToOffline( + const hidl_vec& streamsToKeep, + switchToOffline_cb _hidl_cb) override { + return mParent->switchToOffline(streamsToKeep, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICESESSION_H diff --git a/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDevice_3_6.h b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDevice_3_6.h new file mode 100644 index 0000000..020bec4 --- /dev/null +++ b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDevice_3_6.h @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICE_H + +#include + +#include "ExternalCameraDeviceSession.h" +#include <../../../../3.5/default/include/ext_device_v3_5_impl/ExternalCameraDevice_3_5.h> + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_6::ICameraDevice; +using ::android::hardware::camera::common::V1_0::CameraResourceCost; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct ExternalCameraDevice : public V3_5::implementation::ExternalCameraDevice { + + // Called by external camera provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + ExternalCameraDevice(const std::string& cameraId, const ExternalCameraConfig& cfg); + virtual ~ExternalCameraDevice(); + + virtual sp getInterface() override { + return new TrampolineDeviceInterface_3_6(this); + } + +protected: + virtual sp createSession( + const sp&, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) override; + + virtual status_t initAvailableCapabilities( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*) override; + +private: + struct TrampolineDeviceInterface_3_6 : public ICameraDevice { + TrampolineDeviceInterface_3_6(sp parent) : + mParent(parent) {} + + virtual Return getResourceCost(V3_2::ICameraDevice::getResourceCost_cb _hidl_cb) + override { + return mParent->getResourceCost(_hidl_cb); + } + + virtual Return getCameraCharacteristics( + V3_2::ICameraDevice::getCameraCharacteristics_cb _hidl_cb) override { + return mParent->getCameraCharacteristics(_hidl_cb); + } + + virtual Return setTorchMode(TorchMode mode) override { + return mParent->setTorchMode(mode); + } + + virtual Return open(const sp& callback, + V3_2::ICameraDevice::open_cb _hidl_cb) override { + return mParent->open(callback, _hidl_cb); + } + + virtual Return dumpState(const hidl_handle& fd) override { + return mParent->dumpState(fd); + } + + virtual Return getPhysicalCameraCharacteristics(const hidl_string& physicalCameraId, + V3_5::ICameraDevice::getPhysicalCameraCharacteristics_cb _hidl_cb) override { + return mParent->getPhysicalCameraCharacteristics(physicalCameraId, _hidl_cb); + } + + virtual Return isStreamCombinationSupported( + const V3_4::StreamConfiguration& streams, + V3_5::ICameraDevice::isStreamCombinationSupported_cb _hidl_cb) override { + return mParent->isStreamCombinationSupported(streams, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICE_H diff --git a/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraOfflineSession.h b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraOfflineSession.h new file mode 100644 index 0000000..230b67c --- /dev/null +++ b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraOfflineSession.h @@ -0,0 +1,232 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERAOFFLINESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERAOFFLINESESSION_H + +#include +#include +#include +#include +#include +#include +#include +#include <../../3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h> +#include <../../3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h> +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_5::BufferRequest; +using ::android::hardware::camera::device::V3_5::BufferRequestStatus; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_2::ErrorCode; +using ::android::hardware::camera::device::V3_5::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::MsgType; +using ::android::hardware::camera::device::V3_2::NotifyMsg; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::Stream; +using ::android::hardware::camera::device::V3_5::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfigurationMode; +using ::android::hardware::camera::device::V3_2::StreamRotation; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_2::DataspaceFlags; +using ::android::hardware::camera::device::V3_2::CameraBlob; +using ::android::hardware::camera::device::V3_2::CameraBlobId; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_6::ICameraOfflineSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::helper::ExifUtils; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::camera::external::common::SizeHasher; +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; +using ::android::base::unique_fd; + +using ::android::hardware::camera::device::V3_4::implementation::SupportedV4L2Format; +using ::android::hardware::camera::device::V3_4::implementation::CroppingType; +using ::android::hardware::camera::device::V3_4::implementation::CirculatingBuffers; +using ::android::hardware::camera::device::V3_4::implementation::HalRequest; +using ::android::hardware::camera::device::V3_4::implementation::OutputThreadInterface; + +struct ExternalCameraOfflineSession : public virtual RefBase, + public virtual OutputThreadInterface { + + ExternalCameraOfflineSession( + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + const std::string& exifMake, + const std::string& exifModel, + uint32_t blobBufferSize, + bool afTrigger, + const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers); + + bool initialize(); + + virtual ~ExternalCameraOfflineSession(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineSessionInterface_3_6(this); + } + +protected: + + // Methods from OutputThreadInterface + virtual Status importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) override; + + virtual Status processCaptureResult(std::shared_ptr&) override; + + virtual Status processCaptureRequestError(const std::shared_ptr&, + /*out*/std::vector* msgs = nullptr, + /*out*/std::vector* results = nullptr) override; + + virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override; + + virtual void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) override; + // End of OutputThreadInterface methods + + class OutputThread : public V3_5::implementation::ExternalCameraDeviceSession::OutputThread { + public: + OutputThread( + wp parent, CroppingType ct, + const common::V1_0::helper::CameraMetadata& chars, + sp bufReqThread, + std::deque>& offlineReqs) : + V3_5::implementation::ExternalCameraDeviceSession::OutputThread( + parent, ct, chars, bufReqThread), + mOfflineReqs(offlineReqs) {} + + virtual bool threadLoop() override; + + protected: + std::deque> mOfflineReqs; + }; // OutputThread + + + Return setCallback(const sp& cb); + + Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb); + + Return close(); + + void initOutputThread(); + + void invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq); + + status_t fillCaptureResult(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp); + + void cleanupBuffersLocked(int id); + + // Protect (most of) HIDL interface methods from synchronized-entering + mutable Mutex mInterfaceLock; + + mutable Mutex mLock; // Protect all data members except otherwise noted + + bool mClosed = false; + const CroppingType mCroppingType; + const common::V1_0::helper::CameraMetadata mChars; + const std::string mCameraId; + const std::string mExifMake; + const std::string mExifModel; + const uint32_t mBlobBufferSize; + + std::mutex mAfTriggerLock; // protect mAfTrigger + bool mAfTrigger; + + const hidl_vec mOfflineStreams; + std::deque> mOfflineReqs; + + // Protect mCirculatingBuffers, must not lock mLock after acquiring this lock + mutable Mutex mCbsLock; + std::map mCirculatingBuffers; + + static HandleImporter sHandleImporter; + + using ResultMetadataQueue = MessageQueue; + std::shared_ptr mResultMetadataQueue; + + // Protect against invokeProcessCaptureResultCallback() + Mutex mProcessCaptureResultLock; + + sp mCallback; + + sp mBufferRequestThread; + sp mOutputThread; +private: + + struct TrampolineSessionInterface_3_6 : public ICameraOfflineSession { + TrampolineSessionInterface_3_6(sp parent) : + mParent(parent) {} + + virtual Return setCallback(const sp& cb) override { + return mParent->setCallback(cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return close() override { + return mParent->close(); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERAOFFLINESESSION_H diff --git a/camera/device/3.6/types.hal b/camera/device/3.6/types.hal new file mode 100644 index 0000000..f4c50ed --- /dev/null +++ b/camera/device/3.6/types.hal @@ -0,0 +1,149 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.6; + +import @3.2::BufferCache; +import @3.4::HalStream; + +/** + * OfflineRequest: + * + * Information about a capture request being switched to offline mode via the + * ICameraDeviceSession#switchToOffline method. + * + */ +struct OfflineRequest { + /** + * Must match a inflight CaptureRequest sent by camera service + */ + uint32_t frameNumber; + + /** + * Stream IDs for outputs that will be returned via ICameraDeviceCallback. + * The stream ID must be within one of offline stream listed in + * CameraOfflineSessionInfo. + * Camera service will validate these pending buffers are matching camera + * service's record to make sure no buffers are leaked during the + * switchToOffline call. + */ + vec pendingStreams; +}; + +/** + * OfflineStream: + * + * Information about a stream being switched to offline mode via the + * ICameraDeviceSession#switchToOffline method. + * + */ +struct OfflineStream { + /** + * IDs of a stream to be transferred to offline session. + * + * For devices that do not support HAL buffer management, this must be + * one of stream ID listed in streamsToKeep argument of the + * switchToOffline call. + * For devices that support HAL buffer management, this could be any stream + * that was configured right before calling switchToOffline. + */ + int32_t id; + + /** + * Number of outstanding buffers that will be returned via offline session + */ + uint32_t numOutstandingBuffers; + + /** + * Buffer ID of buffers currently cached between camera service and this + * stream, which may or may not be owned by the camera HAL right now. + * See StreamBuffer#bufferId for more details. + */ + vec circulatingBufferIds; +}; + +/** + * CameraOfflineSessionInfo: + * + * Information about pending outputs that's being transferred to an offline + * session from an active session using the + * ICameraDeviceSession#switchToOffline method. + * + */ +struct CameraOfflineSessionInfo { + /** + * Information on what streams will be preserved in offline session. + * Streams not listed here will be removed by camera service after + * switchToOffline call returns. + */ + vec offlineStreams; + + /** + * Information for requests that will be handled by offline session + * Camera service will validate this matches what camera service has on + * record. + */ + vec offlineRequests; +}; + +/** + * HalStream: + * + * The camera HAL's response to each requested stream configuration. + * + * This version extends the @3.4 HalStream with the physicalCameraId + * field + */ +struct HalStream { + /** + * The definition of HalStream from the prior version. + */ + @3.4::HalStream v3_4; + + /** + * Whether this stream can be switch to offline mode. + * + * For devices that does not support the OFFLINE_PROCESSING capability, this + * fields will always be false. + * + * For backward compatible camera devices that support the + * OFFLINE_PROCESSING capability: any input stream and any output stream + * that can be output of the input stream must set this field to true. Also + * any stream of YUV420_888 format or JPEG format, with CPU_READ usage flag, + * must set this field to true. + * + * For depth only camera devices that support the OFFLINE_PROCESSING + * capability: any DEPTH16 output stream must set this field to true. + * + * All other streams are up to camera HAL to advertise support or not, + * though it is not recommended to list support for streams with + * hardware composer or video encoder usage flags as these streams tend + * to be targeted continuously and can lead to long latency when trying to + * switch to offline. + * + */ + bool supportOffline; +}; + +/** + * HalStreamConfiguration: + * + * Identical to @3.4::HalStreamConfiguration, except that it contains @3.6::HalStream entries. + * + */ +struct HalStreamConfiguration { + vec streams; +}; diff --git a/camera/device/3.7/ICameraDevice.hal b/camera/device/3.7/ICameraDevice.hal new file mode 100644 index 0000000..9bc2083 --- /dev/null +++ b/camera/device/3.7/ICameraDevice.hal @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.7; + +import android.hardware.camera.common@1.0::Status; +import @3.6::ICameraDevice; + +/** + * Camera device interface + * + * Supports the android.hardware.Camera API, and the android.hardware.camera2 + * API at LIMITED or better hardware level. + * + * ICameraDevice.open() must return @3.2::ICameraDeviceSession, + * @3.5::ICameraDeviceSession, @3.6::ICameraDeviceSession, or + * @3.7::ICameraDeviceSession. + */ +interface ICameraDevice extends @3.6::ICameraDevice { + /** + * isStreamCombinationSupported_3_7: + * + * Identical to @3.5::ICameraDevice.isStreamCombinationSupported, except + * that it takes a @3.7::StreamConfiguration parameter, which could contain + * information about multi-resolution input and output streams. + * + */ + isStreamCombinationSupported_3_7(StreamConfiguration streams) + generates (Status status, bool queryStatus); +}; diff --git a/camera/device/3.7/ICameraDeviceSession.hal b/camera/device/3.7/ICameraDeviceSession.hal new file mode 100644 index 0000000..fb5c7fa --- /dev/null +++ b/camera/device/3.7/ICameraDeviceSession.hal @@ -0,0 +1,124 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.7; + +import android.hardware.camera.common@1.0::Status; +import @3.2::BufferCache; +import @3.5::StreamConfiguration; +import @3.6::ICameraDeviceSession; +import @3.6::HalStreamConfiguration; + +/** + * Camera device active session interface. + * + * Obtained via ICameraDevice::open(), this interface contains the methods to + * configure and request captures from an active camera device. + */ +interface ICameraDeviceSession extends @3.6::ICameraDeviceSession { + /** + * configureStreams_3_7: + * + * Identical to @3.6::ICameraDeviceSession.configureStreams_3_6, except that: + * + * - The requestedConfiguration allows the camera framework to configure + * stream groups. + * - For requested configurations of streams within the same group, the + * corresponding halConfiguration must have the same usage flags and + * maxBuffers. + * - Within a CaptureRequest, the application is guaranteed not to request + * more than one streams within the same stream group. When one of the + * stream within a stream group is requested, the camera HAL can either + * produce output on that stream, or any other stream within the same + * stream group. + * - The requestedConfiguration allows the camera framework to indicate that + * input images of different sizes may be submitted within capture + * requests. + * + * @return status Status code for the operation, one of: + * OK: + * On successful stream configuration. + * INTERNAL_ERROR: + * If there has been a fatal error and the device is no longer + * operational. Only close() can be called successfully by the + * framework after this error is returned. + * ILLEGAL_ARGUMENT: + * If the requested stream configuration is invalid. Some examples + * of invalid stream configurations include: + * - Including more than 1 INPUT stream + * - Not including any OUTPUT streams + * - Including streams with unsupported formats, or an unsupported + * size for that format. + * - Including too many output streams of a certain format. + * - Unsupported rotation configuration + * - Stream sizes/formats don't satisfy the + * StreamConfigurationMode requirements + * for non-NORMAL mode, or the requested operation_mode is not + * supported by the HAL. + * - Unsupported usage flag + * - Unsupported stream groupIds, or unsupported multi-resolution + * input stream. + * The camera service cannot filter out all possible illegal stream + * configurations, since some devices may support more simultaneous + * streams or larger stream resolutions than the minimum required + * for a given camera device hardware level. The HAL must return an + * ILLEGAL_ARGUMENT for any unsupported stream set, and then be + * ready to accept a future valid stream configuration in a later + * configureStreams call. + * @return halConfiguration The stream parameters desired by the HAL for + * each stream, including maximum buffers, the usage flags, and the + * override format. + */ + configureStreams_3_7(StreamConfiguration requestedConfiguration) + generates (Status status, @3.6::HalStreamConfiguration halConfiguration); + + /** + * processCaptureRequest_3_7: + * + * Identical to @3.4::ICameraDeviceSession.processCaptureRequest, except that: + * + * - The capture request can include width and height of the input buffer for + * a reprocessing request. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful start to processing the capture request + * ILLEGAL_ARGUMENT: + * If the input is malformed (the settings are empty when not + * allowed, the physical camera settings are invalid, there are 0 + * output buffers, etc) and capture processing + * cannot start. Failures during request processing must be + * handled by calling ICameraDeviceCallback::notify(). In case of + * this error, the framework retains responsibility for the + * stream buffers' fences and the buffer handles; the HAL must not + * close the fences or return these buffers with + * ICameraDeviceCallback::processCaptureResult(). + * In case of multi-resolution input image, this error must be returned + * if the caller passes in a CaptureRequest with an invalid + * [inputWith, inputHeight]. + * INTERNAL_ERROR: + * If the camera device has encountered a serious error. After this + * error is returned, only the close() method can be successfully + * called by the framework. + * @return numRequestProcessed Number of requests successfully processed by + * camera HAL. When status is OK, this must be equal to the size of + * requests. When the call fails, this number is the number of requests + * that HAL processed successfully before HAL runs into an error. + * + */ + processCaptureRequest_3_7(vec requests, vec cachesToRemove) + generates (Status status, uint32_t numRequestProcessed); +}; diff --git a/camera/device/3.7/ICameraInjectionSession.hal b/camera/device/3.7/ICameraInjectionSession.hal new file mode 100644 index 0000000..f5797c3 --- /dev/null +++ b/camera/device/3.7/ICameraInjectionSession.hal @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.7; + +import android.hardware.camera.common@1.0::Status; +import @3.2::BufferCache; +import @3.2::CameraMetadata; +import @3.5::StreamConfiguration; +import @3.6::HalStreamConfiguration; +import @3.7::ICameraDeviceSession; + +/** + * Injection Camera device active session interface. + * + * When an external camera is injected to replace the internal camera session, the + * injection session will be established in camera framework, and then + * configureInjectionStreams() will be called to ask the external camera to + * configure itself to match the stream configuration of the internal camera. + * + * Camera framework is responsible to close the injection session once the client + * switch back to internal camera streaming. + * + * If the external camera cannot support the configuration ILLEGAL_ARGUMENT will + * be returned. + */ +interface ICameraInjectionSession extends @3.7::ICameraDeviceSession { + /** + * configureInjectionStreams: + * + * Identical to @3.7::ICameraDeviceSession.configureStreams_3_7, except that: + * + * @param requestedConfiguration + * The current stream configuration of the internal camera session and + * the injection camera must follow the configuration without overriding + * any part of it. + * @param characteristics + * The characteristics of internal camera contains a list of keys so that + * the stream continuity can be maintained after the external camera is + * injected. + * + * @return status Status code for the operation, one of: + * OK: + * On successful stream configuration. + * INTERNAL_ERROR: + * If there has been a fatal error and the device is no longer + * operational. Only close() can be called successfully by the + * framework after this error is returned. + * ILLEGAL_ARGUMENT: + * If the requested stream configuration is invalid. Some examples + * of invalid stream configurations include: + * - Not including any OUTPUT streams + * - Including streams with unsupported formats, or an unsupported + * size for that format. + * - Including too many output streams of a certain format. + * - Unsupported rotation configuration + * - Stream sizes/formats don't satisfy the + * StreamConfigurationMode requirements + * for non-NORMAL mode, or the requested operation_mode is not + * supported by the HAL. + * - Unsupported usage flag + * The camera service cannot filter out all possible illegal stream + * configurations, since some devices may support more simultaneous + * streams or larger stream resolutions than the minimum required + * for a given camera device hardware level. The HAL must return an + * ILLEGAL_ARGUMENT for any unsupported stream set, and then be + * ready to accept a future valid stream configuration in a later + * configureInjectionStreams call. + */ + configureInjectionStreams(StreamConfiguration requestedConfiguration, + CameraMetadata characteristics) generates (Status status); +}; diff --git a/camera/device/3.7/types.hal b/camera/device/3.7/types.hal new file mode 100644 index 0000000..55aceb8 --- /dev/null +++ b/camera/device/3.7/types.hal @@ -0,0 +1,146 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.7; + +import @3.2::CameraMetadata; +import @3.2::StreamConfigurationMode; +import @3.4::CaptureRequest; +import @3.4::Stream; + +import android.hardware.camera.metadata@3.6::CameraMetadataEnumAndroidSensorPixelMode; + +/** + * Stream: + * + * A descriptor for a single camera input or output stream. A stream is defined + * by the framework by its buffer resolution and format, and additionally by the + * HAL with the gralloc usage flags and the maximum in-flight buffer count. + * + * This version extends the @3.4 Stream with the multi-resolution output surface + * group Id field. + */ +struct Stream { + /** + * The definition of Stream from the prior version. + */ + @3.4::Stream v3_4; + + /** + * The surface group id used for multi-resolution output streams. + * + * This works similar to the surfaceGroupId of OutputConfiguration in the + * public API, with the exception that this is for multi-resolution image + * reader and is used by the camera HAL to choose a target stream within + * the same group to which images are written. All streams in the same group + * will have the same image format, data space, and usage flag. + * + * The framework must only call processCaptureRequest on at most one of the + * streams within a surface group. Depending on current active physical + * camera backing the logical multi-camera, or the pixel mode the camera is + * running in, the HAL can choose to request and return a buffer from any + * stream within the same group. -1 means that this stream is an input + * stream, or is an output stream which doesn't belong to any group. + * + * Streams with the same non-negative group id must have the same format and + * usage flag. + */ + int32_t groupId; + + /** + * The sensor pixel modes used by this stream. This can assist the camera + * HAL in decision making about stream combination support. + * If this is empty, the HAL must assume that this stream will only be used + * with ANDROID_SENSOR_PIXEL_MODE set to ANDROID_SENSOR_PIXEL_MODE_DEFAULT. + */ + vec sensorPixelModesUsed; +}; + +/** + * StreamConfiguration: + * + * Identical to @3.5::StreamConfiguration, except that the streams + * vector contains @3.7::Stream. + */ +struct StreamConfiguration { + /** + * An array of camera stream pointers, defining the input/output + * configuration for the camera HAL device. + */ + vec streams; + + /** + * The definition of operation mode from prior version. + * + */ + @3.2::StreamConfigurationMode operationMode; + + /** + * The definition of session parameters from prior version. + */ + @3.2::CameraMetadata sessionParams; + + /** + * The definition of stream configuration counter from prior version. + */ + uint32_t streamConfigCounter; + + /** + * If an input stream is configured, whether the input stream is expected to + * receive variable resolution images. + * + * This flag can only be set to true if the camera device supports + * multi-resolution input streams by advertising input stream configurations in + * physicalCameraMultiResolutionStreamConfigurations in its physical cameras' + * characteristics. + * + * When this flag is set to true, the input stream's width and height can be + * any one of the supported multi-resolution input stream sizes. + */ + bool multiResolutionInputImage; +}; + +/** + * CaptureRequest: + * + * This version extends 3.4::CaptureRequest with the input buffer's width and + * height. + */ +struct CaptureRequest { + /** + * The definition of CaptureRequest from the prior version. + */ + @3.4::CaptureRequest v3_4; + + /** + * The width and height of the input buffer for this capture request. + * + * These fields will be [0, 0] if no input buffer exists in the capture + * request. + * + * If the stream configuration contains an input stream and has the + * multiResolutionInputImage flag set to true, the camera client may submit a + * reprocessing request with input buffer size different than the + * configured input stream size. In that case, the inputWith and inputHeight + * fields will be the actual size of the input image. + * + * If the stream configuration contains an input stream and the + * multiResolutionInputImage flag is false, the inputWidth and inputHeight must + * match the input stream size. + */ + uint32_t inputWidth; + uint32_t inputHeight; +}; diff --git a/camera/device/README.md b/camera/device/README.md new file mode 100644 index 0000000..3709cb8 --- /dev/null +++ b/camera/device/README.md @@ -0,0 +1,97 @@ +## Camera Device HAL ## +--- + +## Overview: ## + +The camera.device HAL interface is used by the Android camera service to operate +individual camera devices. Instances of camera.device HAL interface can be obtained +via one of the ICameraProvider::getCameraDeviceInterface_V_x() methods, where N +is the major version of the camera device interface. + +Obtaining the device interface does not turn on the respective camera device; +each camera device interface has an actual open() method to begin an active +camera session. Without invoking open(), the interface can be used for querying +camera static information. + +More complete information about the Android camera HAL and subsystem can be found at +[source.android.com](http://source.android.com/devices/camera/index.html). + +## Version history: ## + +### ICameraDevice.hal@1.0: + +HIDL version of the legacy camera device HAL. Intended as a shim for devices +needing to use the deprecated pre-HIDL camera device HAL v1.0. + +May be used in HIDL passthrough mode for devices upgrading to the Android O +release; must be used in binderized mode for devices launching in the O release. + +It is strongly recommended to not use this interface for new devices, as new +devices may not use this interface starting with the Android P release, and all +support for ICameraDevice@1.0 will be removed with the Android R release. + +This HAL interface version only allows support at the LEGACY level for the +android.hardware.camera2 API. + +Added in Android 8.0. + +Subsidiary HALs: + +#### ICameraDevice1PreviewCallback.hal@1.0: + +Callback interface for obtaining, filling, and returning graphics buffers for +preview operation with the ICameraDevice@1.0 inteface. + +#### ICameraDevice1Callback.hal@1.0: + +Callback interface for sending events and data buffers from the HAL to the +camera service. + +### ICameraDevice.hal@3.2: + +HIDL version of the baseline camera device HAL, required for LIMITED or FULL +operation through the android.hardware.camera2 API. + +The main HAL contains methods for static queries about the device, similar to +the HALv3-specific sections of the legacy camera module HAL. Simply obtaining an +instance of the camera device interface does not turn on the camera device. + +May be used in passthrough mode for devices upgrading to the Android O release; +must be used in binderized mode for all new devices launching with Android O or +later. + +The open() method actually opens the camera device for use, returning a Session +interface for operating the active camera. It takes a Callback interface as an +argument. + +Added in Android 8.0. + +Subsidiary HALs: + +#### ICameraDevice3Session.hal@3.2: + +Closely matches the features and operation of the pre-HIDL camera device HAL +v3.2, containing methods for configuring an active camera device and submitting +capture requests to it. + +#### ICameraDevice3Callback.hal@3.2: + +Callback interface for sending completed captures and other asynchronous events +from tehe HAL to the client. + +### ICameraDevice.hal@3.3: + +A minor revision to the ICameraDevice.hal@3.2. + + - Adds support for overriding the output dataspace of a stream, which was + supported in the legacy camera HAL. + +Added in Android 8.1. + +### ICameraDevice.hal@3.4: + +A minor revision to the ICameraDevice.hal@3.3. + + - Adds support for session parameters during stream configuration. + +Added in Android 9 diff --git a/camera/provider/2.4/ICameraProvider.hal b/camera/provider/2.4/ICameraProvider.hal new file mode 100644 index 0000000..105629d --- /dev/null +++ b/camera/provider/2.4/ICameraProvider.hal @@ -0,0 +1,191 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.4; + +import ICameraProviderCallback; +import android.hardware.camera.common@1.0::types; +import android.hardware.camera.device@1.0::ICameraDevice; +import android.hardware.camera.device@3.2::ICameraDevice; + +/** + * Camera provider HAL, which enumerates the available individual camera devices + * known to the provider, and provides updates about changes to device status, + * such as connection, disconnection, or torch mode enable/disable. + * + * The provider is responsible for generating a list of camera device service + * names that can then be opened via the hardware service manager. + * + * Multiple camera provider HALs may be present in a single system. + * For discovery, the service names, and process names, must be of the form + * "android.hardware.camera.provider@.//" + * where + * - / is the provider HAL HIDL version, + * - is the type of devices this provider knows about, such as + * "internal", "legacy", "external", "remote" etc. The camera framework + * must not differentiate or chage its behavior based on the specific type. + * - is a non-negative integer starting from 0 to disambiguate + * between multiple HALs of the same type. + * + * The "legacy" type is only used for passthrough legacy HAL mode, and must + * not be used by a standalone binderized HAL. + * + * The device instance names enumerated by the provider in getCameraIdList() or + * ICameraProviderCallback::cameraDeviceStatusChange() must be of the form + * "device@.//" where + * / is the HIDL version of the interface. is either a small + * incrementing integer for "internal" device types, with 0 being the main + * back-facing camera and 1 being the main front-facing camera, if they exist. + * Or, for external devices, a unique serial number (if possible) that can be + * used to identify the device reliably when it is disconnected and reconnected. + * + * Multiple providers must not enumerate the same device ID. + */ +interface ICameraProvider { + + /** + * setCallback: + * + * Provide a callback interface to the HAL provider to inform framework of + * asynchronous camera events. The framework must call this function once + * during camera service startup, before any other calls to the provider + * (note that in case the camera service restarts, this method must be + * invoked again during its startup). + * + * @param callback + * A non-null callback interface to invoke when camera events occur. + * @return status + * Status code for the operation, one of: + * OK: + * On success + * INTERNAL_ERROR: + * An unexpected internal error occurred while setting the callbacks + * ILLEGAL_ARGUMENT: + * The callback argument is invalid (for example, null). + * + */ + setCallback(ICameraProviderCallback callback) generates (Status status); + + /** + * getVendorTags: + * + * Retrieve all vendor tags supported by devices discoverable through this + * provider. The tags are grouped into sections. + * + * @return status + * Status code for the operation, one of: + * OK: + * On success + * INTERNAL_ERROR: + * An unexpected internal error occurred while setting the callbacks + * @return sections + * The supported vendor tag sections; empty if there are no supported + * vendor tags, or status is not OK. + * + */ + getVendorTags() generates (Status status, vec sections); + + /** + * getCameraIdList: + * + * Returns the list of internal camera device interfaces known to this + * camera provider. These devices can then be accessed via the hardware + * service manager. + * + * External camera devices (camera facing EXTERNAL) must be reported through + * the device status change callback, not in this list. Only devices with + * facing BACK or FRONT must be listed here. + * + * @return status Status code for the operation, one of: + * OK: + * On a succesful generation of camera ID list + * INTERNAL_ERROR: + * A camera ID list cannot be created. This may be due to + * a failure to initialize the camera subsystem, for example. + * @return cameraDeviceNames The vector of internal camera device + * names known to this provider. + */ + getCameraIdList() + generates (Status status, vec cameraDeviceNames); + + /** + * isSetTorchModeSupported: + * + * Returns if the camera devices known to this camera provider support + * setTorchMode API or not. If the provider does not support setTorchMode + * API, calling to setTorchMode will return METHOD_NOT_SUPPORTED. + * + * Note that not every camera device has a flash unit, so even this API + * returns true, setTorchMode call might still fail due to the camera device + * does not have a flash unit. In such case, the returned status will be + * OPERATION_NOT_SUPPORTED. + * + * @return status Status code for the operation, one of: + * OK: + * On a succesful call + * INTERNAL_ERROR: + * Torch API support cannot be queried. This may be due to + * a failure to initialize the camera subsystem, for example. + * @return support Whether the camera devices known to this provider + * supports setTorchMode API or not. Devices launched with SDK + * level 29 or higher must return true. + * + */ + isSetTorchModeSupported() generates (Status status, bool support); + + /** + * getCameraDeviceInterface_VN_x: + * + * Return a android.hardware.camera.device@N.x/ICameraDevice interface for + * the requested device name. This does not power on the camera device, but + * simply acquires the interface for querying the device static information, + * or to additionally open the device for active use. + * + * A separate method is required for each major revision of the camera device + * HAL interface, since they are not compatible with each other. + * + * Valid device names for this provider can be obtained via either + * getCameraIdList(), or via availability callbacks from + * ICameraProviderCallback::cameraDeviceStatusChange(). + * + * The returned interface must be of the highest defined minor version for + * the major version; it's the responsibility of the HAL client to ensure + * they do not use methods/etc that are not valid for the actual minor + * version of the device. + * + * @param cameraDeviceName the name of the device to get an interface to. + * @return status Status code for the operation, one of: + * OK: + * On a succesful generation of camera ID list + * ILLEGAL_ARGUMENT: + * This device name is unknown, or has been disconnected + * OPERATION_NOT_SUPPORTED: + * The specified device does not support this major version of the + * HAL interface. + * INTERNAL_ERROR: + * A camera interface cannot be returned due to an unexpected + * internal error. + * @return device The inteface to this camera device, or null in case of + * error. + */ + getCameraDeviceInterface_V1_x(string cameraDeviceName) generates + (Status status, + android.hardware.camera.device@1.0::ICameraDevice device); + getCameraDeviceInterface_V3_x(string cameraDeviceName) generates + (Status status, + android.hardware.camera.device@3.2::ICameraDevice device); + +}; diff --git a/camera/provider/2.4/ICameraProviderCallback.hal b/camera/provider/2.4/ICameraProviderCallback.hal new file mode 100644 index 0000000..8822305 --- /dev/null +++ b/camera/provider/2.4/ICameraProviderCallback.hal @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.4; + +import android.hardware.camera.common@1.0::types; + +/** + * Callback functions for a camera provider HAL to use to inform the camera + * service of changes to the camera subsystem. + */ +interface ICameraProviderCallback { + + /** + * cameraDeviceStatusChange: + * + * Callback to the camera service to indicate that the state of a specific + * camera device has changed. + * + * On camera service startup, when ICameraProvider::setCallback is invoked, + * the camera service must assume that all internal camera devices are in + * the CAMERA_DEVICE_STATUS_PRESENT state. + * + * The provider must call this method to inform the camera service of any + * initially NOT_PRESENT devices, and of any external camera devices that + * are already present, as soon as the callbacks are available through + * setCallback. + * + * @param cameraDeviceName The name of the camera device that has a + * new status. + * @param newStatus The new status that device is in. + * + */ + cameraDeviceStatusChange(string cameraDeviceName, + CameraDeviceStatus newStatus); + + /** + * torchModeStatusChange: + * + * Callback to the camera service to indicate that the state of the torch + * mode of the flash unit associated with a specific camera device has + * changed. At provider registration time, the camera service must assume + * the torch modes are in the TORCH_MODE_STATUS_AVAILABLE_OFF state if + * android.flash.info.available is reported as true via the + * ICameraDevice::getCameraCharacteristics call. + * + * @param cameraDeviceName The name of the camera device that has a + * new status. + * @param newStatus The new status that device is in. + * + */ + torchModeStatusChange(string cameraDeviceName, + TorchModeStatus newStatus); + +}; diff --git a/camera/provider/2.4/default/Android.bp b/camera/provider/2.4/default/Android.bp new file mode 100644 index 0000000..8ca0008 --- /dev/null +++ b/camera/provider/2.4/default/Android.bp @@ -0,0 +1,232 @@ + +cc_library_shared { + name: "android.vendor.hardware.camera.provider@2.4-legacy", + defaults: ["hidl_defaults"], + proprietary: true, + srcs: ["LegacyCameraProviderImpl_2_4.cpp"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "vendor.camera.device@1.0-impl", + "vendor.camera.device@3.2-impl", + "vendor.camera.device@3.3-impl", + "vendor.camera.device@3.4-impl", + "vendor.camera.device@3.5-impl", + "libcamera_metadata", + "libcutils", + "libhardware", + "libhidlbase", + "liblog", + "libutils", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "vendor.camera.device@3.4-impl_headers", + "vendor.camera.device@3.5-impl_headers", + ], + export_include_dirs: ["."], +} + +cc_library_shared { + name: "android.vendor.hardware.camera.provider@2.4-external", + proprietary: true, + srcs: ["ExternalCameraProviderImpl_2_4.cpp"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.device@3.6", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "vendor.camera.device@3.3-impl", + "vendor.camera.device@3.4-external-impl", + "vendor.camera.device@3.4-impl", + "vendor.camera.device@3.5-external-impl", + "vendor.camera.device@3.5-impl", + "vendor.camera.device@3.6-external-impl", + "libcamera_metadata", + "libcutils", + "libhardware", + "libhidlbase", + "liblog", + "libtinyxml2", + "libutils", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "vendor.camera.device@3.4-external-impl_headers", + "vendor.camera.device@3.5-external-impl_headers", + "vendor.camera.device@3.6-external-impl_headers", + ], + export_include_dirs: ["."], +} + +cc_library_shared { + name: "android.hardware.camera.provider@2.4-impl-intel", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["CameraProvider_2_4.cpp"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.vendor.hardware.camera.provider@2.4-external", + "android.vendor.hardware.camera.provider@2.4-legacy", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "vendor.camera.device@1.0-impl", + "vendor.camera.device@3.2-impl", + "vendor.camera.device@3.3-impl", + "vendor.camera.device@3.4-external-impl", + "vendor.camera.device@3.4-impl", + "vendor.camera.device@3.5-external-impl", + "vendor.camera.device@3.5-impl", + "libcamera_metadata", + "libcutils", + "libhardware", + "libhidlbase", + "liblog", + "libtinyxml2", + "libutils", + ], + header_libs: [ + "vendor.camera.device@3.4-external-impl_headers", + "vendor.camera.device@3.4-impl_headers", + "vendor.camera.device@3.5-external-impl_headers", + "vendor.camera.device@3.5-impl_headers", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + export_include_dirs: ["."], +} + +cc_defaults { + name: "vendor_camera_service_defaults", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["service.cpp"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "libbinder", + "libcamera_metadata", + "libcutils", + "libhardware", + "libhidlbase", + "liblog", + "libutils", + ], + static_libs: [ + "android.vendor.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "vendor.camera.device@3.4-external-impl_headers", + "vendor.camera.device@3.4-impl_headers", + "vendor.camera.device@3.5-external-impl_headers", + "vendor.camera.device@3.5-impl_headers", + ], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.4-service", + defaults: ["vendor_camera_service_defaults"], + compile_multilib: "32", + init_rc: ["android.vendor.hardware.camera.provider@2.4-service.rc"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.4-service_64", + defaults: ["vendor_camera_service_defaults"], + compile_multilib: "64", + init_rc: ["android.vendor.hardware.camera.provider@2.4-service_64.rc"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.4-service-lazy", + overrides: ["android.hardware.camera.provider@2.4-service"], + defaults: ["vendor_camera_service_defaults"], + compile_multilib: "32", + init_rc: ["android.vendor.hardware.camera.provider@2.4-service-lazy.rc"], + cflags: ["-DLAZY_SERVICE"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.4-service-lazy_64", + overrides: ["android.hardware.camera.provider@2.4-service_64"], + defaults: ["vendor_camera_service_defaults"], + compile_multilib: "64", + init_rc: ["android.vendor.hardware.camera.provider@2.4-service-lazy_64.rc"], + cflags: ["-DLAZY_SERVICE"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.4-external-service", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["external-service.cpp"], + compile_multilib: "32", + init_rc: ["android.vendor.hardware.camera.provider@2.4-external-service.rc"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.provider@2.4-impl-intel", + "libbinder", + "libhidlbase", + "liblog", + "libtinyxml2", + "libutils", + ], + header_libs: [ + "vendor.camera.device@3.4-external-impl_headers", + "vendor.camera.device@3.4-impl_headers", + "vendor.camera.device@3.5-external-impl_headers", + "vendor.camera.device@3.5-impl_headers", + ], +} diff --git a/camera/provider/2.4/default/CameraProvider_2_4.cpp b/camera/provider/2.4/default/CameraProvider_2_4.cpp new file mode 100644 index 0000000..15fc702 --- /dev/null +++ b/camera/provider/2.4/default/CameraProvider_2_4.cpp @@ -0,0 +1,69 @@ +/* + * Copyright 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "CameraProvider_2_4.h" +#include "LegacyCameraProviderImpl_2_4.h" +#include "ExternalCameraProviderImpl_2_4.h" + +const char *kLegacyProviderName = "legacy/0"; +const char *kExternalProviderName = "external/0"; + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +using android::hardware::camera::provider::V2_4::ICameraProvider; + +extern "C" ICameraProvider* HIDL_FETCH_ICameraProvider(const char* name); + +template +CameraProvider* getProviderImpl() { + CameraProvider *provider = new CameraProvider(); + if (provider == nullptr) { + ALOGE("%s: cannot allocate camera provider!", __FUNCTION__); + return nullptr; + } + if (provider->isInitFailed()) { + ALOGE("%s: camera provider init failed!", __FUNCTION__); + delete provider; + return nullptr; + } + return provider; +} + +ICameraProvider* HIDL_FETCH_ICameraProvider(const char* name) { + using namespace android::hardware::camera::provider::V2_4::implementation; + ICameraProvider* provider = nullptr; + if (strcmp(name, kLegacyProviderName) == 0) { + provider = getProviderImpl(); + } else if (strcmp(name, kExternalProviderName) == 0) { + provider = getProviderImpl(); + } else { + ALOGE("%s: unknown instance name: %s", __FUNCTION__, name); + } + + return provider; +} + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/provider/2.4/default/CameraProvider_2_4.h b/camera/provider/2.4/default/CameraProvider_2_4.h new file mode 100644 index 0000000..d2e5b94 --- /dev/null +++ b/camera/provider/2.4/default/CameraProvider_2_4.h @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_CAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_CAMERAPROVIDER_H + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::provider::V2_4::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::Return; +using ::android::hardware::hidl_string; +using ::android::sp; + +template +struct CameraProvider : public ICameraProvider { + CameraProvider() : impl() {} + ~CameraProvider() {} + + // Caller must use this method to check if CameraProvider ctor failed + bool isInitFailed() { return impl.isInitFailed(); } + + // Methods from ::android::hardware::camera::provider::V2_4::ICameraProvider follow. + Return setCallback(const sp& callback) override { + return impl.setCallback(callback); + } + + Return getVendorTags(getVendorTags_cb _hidl_cb) override { + return impl.getVendorTags(_hidl_cb); + } + + Return getCameraIdList(getCameraIdList_cb _hidl_cb) override { + return impl.getCameraIdList(_hidl_cb); + } + + Return isSetTorchModeSupported(isSetTorchModeSupported_cb _hidl_cb) override { + return impl.isSetTorchModeSupported(_hidl_cb); + } + + Return getCameraDeviceInterface_V1_x( + const hidl_string& cameraDeviceName, + getCameraDeviceInterface_V1_x_cb _hidl_cb) override { + return impl.getCameraDeviceInterface_V1_x(cameraDeviceName, _hidl_cb); + } + + Return getCameraDeviceInterface_V3_x( + const hidl_string& cameraDeviceName, + getCameraDeviceInterface_V3_x_cb _hidl_cb) override { + return impl.getCameraDeviceInterface_V3_x(cameraDeviceName, _hidl_cb); + } + +private: + IMPL impl; +}; + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_CAMERAPROVIDER_H diff --git a/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp b/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp new file mode 100644 index 0000000..04db7f3 --- /dev/null +++ b/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp @@ -0,0 +1,373 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamPrvdr@2.4-external" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include +#include +#include +#include +#include "ExternalCameraDevice_3_4.h" +#include "ExternalCameraDevice_3_5.h" +#include "ExternalCameraDevice_3_6.h" +#include "ExternalCameraProviderImpl_2_4.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +template struct CameraProvider; + +namespace { +// "device@/external/" +const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/external/(.+)"); +const int kMaxDevicePathLen = 256; +constexpr const char* kDevicePath = "/dev/"; +constexpr const char* kPrefix = "video"; +constexpr int kPrefixLen = std::char_traits::length(kPrefix); +constexpr int kDevicePrefixLen = std::char_traits::length(kDevicePath) + kPrefixLen; + +bool matchDeviceName(int cameraIdOffset, + const hidl_string& deviceName, std::string* deviceVersion, + std::string* cameraDevicePath) { + std::string deviceNameStd(deviceName.c_str()); + std::smatch sm; + if (std::regex_match(deviceNameStd, sm, kDeviceNameRE)) { + if (deviceVersion != nullptr) { + *deviceVersion = sm[1]; + } + if (cameraDevicePath != nullptr) { + *cameraDevicePath = "/dev/video" + std::to_string(std::stoi(sm[2]) - cameraIdOffset); + } + return true; + } + return false; +} + +} // anonymous namespace + +ExternalCameraProviderImpl_2_4::ExternalCameraProviderImpl_2_4() + : mCfg(ExternalCameraConfig::loadFromCfg()) { + mHotPlugThread = sp::make(this); + mHotPlugThread->run("ExtCamHotPlug", PRIORITY_BACKGROUND); + + mPreferredHal3MinorVersion = + property_get_int32("ro.vendor.camera.external.hal3TrebleMinorVersion", 4); + ALOGV("Preferred HAL 3 minor version is %d", mPreferredHal3MinorVersion); + switch(mPreferredHal3MinorVersion) { + case 4: + case 5: + case 6: + // OK + break; + default: + ALOGW("Unknown minor camera device HAL version %d in property " + "'camera.external.hal3TrebleMinorVersion', defaulting to 4", + mPreferredHal3MinorVersion); + mPreferredHal3MinorVersion = 4; + } +} + +ExternalCameraProviderImpl_2_4::~ExternalCameraProviderImpl_2_4() { + mHotPlugThread->requestExit(); +} + + +Return ExternalCameraProviderImpl_2_4::setCallback( + const sp& callback) { + { + Mutex::Autolock _l(mLock); + mCallbacks = callback; + } + if (mCallbacks == nullptr) { + return Status::OK; + } + // Send a callback for all devices to initialize + { + for (const auto& pair : mCameraStatusMap) { + mCallbacks->cameraDeviceStatusChange(pair.first, pair.second); + } + } + + return Status::OK; +} + +Return ExternalCameraProviderImpl_2_4::getVendorTags( + ICameraProvider::getVendorTags_cb _hidl_cb) { + // No vendor tag support for USB camera + hidl_vec zeroSections; + _hidl_cb(Status::OK, zeroSections); + return Void(); +} + +Return ExternalCameraProviderImpl_2_4::getCameraIdList( + ICameraProvider::getCameraIdList_cb _hidl_cb) { + // External camera HAL always report 0 camera, and extra cameras + // are just reported via cameraDeviceStatusChange callbacks + hidl_vec hidlDeviceNameList; + _hidl_cb(Status::OK, hidlDeviceNameList); + return Void(); +} + +Return ExternalCameraProviderImpl_2_4::isSetTorchModeSupported( + ICameraProvider::isSetTorchModeSupported_cb _hidl_cb) { + // setTorchMode API is supported, though right now no external camera device + // has a flash unit. + _hidl_cb (Status::OK, true); + return Void(); +} + +Return ExternalCameraProviderImpl_2_4::getCameraDeviceInterface_V1_x( + const hidl_string&, + ICameraProvider::getCameraDeviceInterface_V1_x_cb _hidl_cb) { + // External Camera HAL does not support HAL1 + _hidl_cb(Status::OPERATION_NOT_SUPPORTED, nullptr); + return Void(); +} + +Return ExternalCameraProviderImpl_2_4::getCameraDeviceInterface_V3_x( + const hidl_string& cameraDeviceName, + ICameraProvider::getCameraDeviceInterface_V3_x_cb _hidl_cb) { + + std::string cameraDevicePath, deviceVersion; + bool match = matchDeviceName(mCfg.cameraIdOffset, cameraDeviceName, + &deviceVersion, &cameraDevicePath); + if (!match) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + if (mCameraStatusMap.count(cameraDeviceName) == 0 || + mCameraStatusMap[cameraDeviceName] != CameraDeviceStatus::PRESENT) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + sp deviceImpl; + switch (mPreferredHal3MinorVersion) { + case 4: { + ALOGV("Constructing v3.4 external camera device"); + deviceImpl = new device::V3_4::implementation::ExternalCameraDevice( + cameraDevicePath, mCfg); + break; + } + case 5: { + ALOGV("Constructing v3.5 external camera device"); + deviceImpl = new device::V3_5::implementation::ExternalCameraDevice( + cameraDevicePath, mCfg); + break; + } + case 6: { + ALOGV("Constructing v3.6 external camera device"); + deviceImpl = new device::V3_6::implementation::ExternalCameraDevice( + cameraDevicePath, mCfg); + break; + } + default: + ALOGE("%s: Unknown HAL minor version %d!", __FUNCTION__, mPreferredHal3MinorVersion); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraDevicePath.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + IF_ALOGV() { + deviceImpl->getInterface()->interfaceChain([]( + ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) { + ALOGV("Device interface chain:"); + for (auto iface : interfaceChain) { + ALOGV(" %s", iface.c_str()); + } + }); + } + + _hidl_cb (Status::OK, deviceImpl->getInterface()); + + return Void(); +} + +void ExternalCameraProviderImpl_2_4::addExternalCamera(const char* devName) { + ALOGI("ExtCam: adding %s to External Camera HAL!", devName); + Mutex::Autolock _l(mLock); + std::string deviceName; + std::string cameraId = std::to_string(mCfg.cameraIdOffset + + std::atoi(devName + kDevicePrefixLen)); + if (mPreferredHal3MinorVersion == 6) { + deviceName = std::string("device@3.6/external/") + cameraId; + } else if (mPreferredHal3MinorVersion == 5) { + deviceName = std::string("device@3.5/external/") + cameraId; + } else { + deviceName = std::string("device@3.4/external/") + cameraId; + } + mCameraStatusMap[deviceName] = CameraDeviceStatus::PRESENT; + if (mCallbacks != nullptr) { + mCallbacks->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::PRESENT); + } +} + +void ExternalCameraProviderImpl_2_4::deviceAdded(const char* devName) { + { + base::unique_fd fd(::open(devName, O_RDWR)); + if (fd.get() < 0) { + ALOGE("%s open v4l2 device %s failed:%s", __FUNCTION__, devName, strerror(errno)); + return; + } + + struct v4l2_capability capability; + int ret = ioctl(fd.get(), VIDIOC_QUERYCAP, &capability); + if (ret < 0) { + ALOGE("%s v4l2 QUERYCAP %s failed", __FUNCTION__, devName); + return; + } + + if (!(capability.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + ALOGW("%s device %s does not support VIDEO_CAPTURE", __FUNCTION__, devName); + return; + } + } + // See if we can initialize ExternalCameraDevice correctly + sp deviceImpl = + new device::V3_4::implementation::ExternalCameraDevice(devName, mCfg); + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGW("%s: Attempt to init camera device %s failed!", __FUNCTION__, devName); + return; + } + deviceImpl.clear(); + + addExternalCamera(devName); + return; +} + +void ExternalCameraProviderImpl_2_4::deviceRemoved(const char* devName) { + Mutex::Autolock _l(mLock); + std::string deviceName; + std::string cameraId = std::to_string(mCfg.cameraIdOffset + + std::atoi(devName + kDevicePrefixLen)); + if (mPreferredHal3MinorVersion == 6) { + deviceName = std::string("device@3.6/external/") + cameraId; + } else if (mPreferredHal3MinorVersion == 5) { + deviceName = std::string("device@3.5/external/") + cameraId; + } else { + deviceName = std::string("device@3.4/external/") + cameraId; + } + if (mCameraStatusMap.find(deviceName) != mCameraStatusMap.end()) { + mCameraStatusMap.erase(deviceName); + if (mCallbacks != nullptr) { + mCallbacks->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::NOT_PRESENT); + } + } else { + ALOGE("%s: cannot find camera device %s", __FUNCTION__, devName); + } +} + +ExternalCameraProviderImpl_2_4::HotplugThread::HotplugThread( + ExternalCameraProviderImpl_2_4* parent) : + Thread(/*canCallJava*/false), + mParent(parent), + mInternalDevices(parent->mCfg.mInternalDevices) {} + +ExternalCameraProviderImpl_2_4::HotplugThread::~HotplugThread() {} + +bool ExternalCameraProviderImpl_2_4::HotplugThread::threadLoop() { + // Find existing /dev/video* devices + DIR* devdir = opendir(kDevicePath); + if(devdir == 0) { + ALOGE("%s: cannot open %s! Exiting threadloop", __FUNCTION__, kDevicePath); + return false; + } + + struct dirent* de; + while ((de = readdir(devdir)) != 0) { + // Find external v4l devices that's existing before we start watching and add them + if (!strncmp(kPrefix, de->d_name, kPrefixLen)) { + // TODO: This might reject some valid devices. Ex: internal is 33 and a device named 3 + // is added. + std::string deviceId(de->d_name + kPrefixLen); + if (mInternalDevices.count(deviceId) == 0) { + ALOGV("Non-internal v4l device %s found", de->d_name); + char v4l2DevicePath[kMaxDevicePathLen]; + snprintf(v4l2DevicePath, kMaxDevicePathLen, + "%s%s", kDevicePath, de->d_name); + mParent->deviceAdded(v4l2DevicePath); + } + } + } + closedir(devdir); + + // Watch new video devices + mINotifyFD = inotify_init(); + if (mINotifyFD < 0) { + ALOGE("%s: inotify init failed! Exiting threadloop", __FUNCTION__); + return true; + } + + mWd = inotify_add_watch(mINotifyFD, kDevicePath, IN_CREATE | IN_DELETE); + if (mWd < 0) { + ALOGE("%s: inotify add watch failed! Exiting threadloop", __FUNCTION__); + return true; + } + + ALOGI("%s start monitoring new V4L2 devices", __FUNCTION__); + + bool done = false; + char eventBuf[512]; + while (!done) { + int offset = 0; + int ret = read(mINotifyFD, eventBuf, sizeof(eventBuf)); + if (ret >= (int)sizeof(struct inotify_event)) { + while (offset < ret) { + struct inotify_event* event = (struct inotify_event*)&eventBuf[offset]; + if (event->wd == mWd) { + if (!strncmp(kPrefix, event->name, kPrefixLen)) { + std::string deviceId(event->name + kPrefixLen); + if (mInternalDevices.count(deviceId) == 0) { + char v4l2DevicePath[kMaxDevicePathLen]; + snprintf(v4l2DevicePath, kMaxDevicePathLen, + "%s%s", kDevicePath, event->name); + if (event->mask & IN_CREATE) { + mParent->deviceAdded(v4l2DevicePath); + } + if (event->mask & IN_DELETE) { + mParent->deviceRemoved(v4l2DevicePath); + } + } + } + } + offset += sizeof(struct inotify_event) + event->len; + } + } + } + + return true; +} + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.h b/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.h new file mode 100644 index 0000000..f1d8003 --- /dev/null +++ b/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.h @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H + +#include +#include +#include +#include +#include +#include +#include +#include "ExternalCameraUtils.h" + +#include "CameraProvider_2_4.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::VendorTagSection; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::provider::V2_4::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +/** + * The implementation of external webcam CameraProvider 2.4, separated + * from the HIDL interface layer to allow for implementation reuse by later + * provider versions. + * + * This camera provider supports standard UVC webcameras via the Linux V4L2 + * UVC driver. + */ +struct ExternalCameraProviderImpl_2_4 { + ExternalCameraProviderImpl_2_4(); + ~ExternalCameraProviderImpl_2_4(); + + // Caller must use this method to check if CameraProvider ctor failed + bool isInitFailed() { return false;} + + // Methods from ::android::hardware::camera::provider::V2_4::ICameraProvider follow. + Return setCallback(const sp& callback); + Return getVendorTags(ICameraProvider::getVendorTags_cb _hidl_cb); + Return getCameraIdList(ICameraProvider::getCameraIdList_cb _hidl_cb); + Return isSetTorchModeSupported(ICameraProvider::isSetTorchModeSupported_cb _hidl_cb); + Return getCameraDeviceInterface_V1_x( + const hidl_string&, + ICameraProvider::getCameraDeviceInterface_V1_x_cb); + Return getCameraDeviceInterface_V3_x( + const hidl_string&, + ICameraProvider::getCameraDeviceInterface_V3_x_cb); + +private: + + void addExternalCamera(const char* devName); + + void deviceAdded(const char* devName); + + void deviceRemoved(const char* devName); + + class HotplugThread : public android::Thread { + public: + HotplugThread(ExternalCameraProviderImpl_2_4* parent); + ~HotplugThread(); + + virtual bool threadLoop() override; + + private: + ExternalCameraProviderImpl_2_4* mParent = nullptr; + const std::unordered_set mInternalDevices; + + int mINotifyFD = -1; + int mWd = -1; + }; + + Mutex mLock; + sp mCallbacks = nullptr; + std::unordered_map mCameraStatusMap; // camera id -> status + const ExternalCameraConfig mCfg; + sp mHotPlugThread; + int mPreferredHal3MinorVersion; +}; + + + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H diff --git a/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp b/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp new file mode 100644 index 0000000..69318c7 --- /dev/null +++ b/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp @@ -0,0 +1,658 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamPrvdr@2.4-legacy" +//#define LOG_NDEBUG 0 +#include + +#include "LegacyCameraProviderImpl_2_4.h" +#include "CameraDevice_1_0.h" +#include "CameraDevice_3_3.h" +#include "CameraDevice_3_4.h" +#include "CameraDevice_3_5.h" +#include "CameraProvider_2_4.h" +#include +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +template struct CameraProvider; + +namespace { +// "device@/legacy/" +const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/legacy/(.+)"); +const char *kHAL3_4 = "3.4"; +const char *kHAL3_5 = "3.5"; +const int kMaxCameraDeviceNameLen = 128; +const int kMaxCameraIdLen = 16; + +bool matchDeviceName(const hidl_string& deviceName, std::string* deviceVersion, + std::string* cameraId) { + std::string deviceNameStd(deviceName.c_str()); + std::smatch sm; + if (std::regex_match(deviceNameStd, sm, kDeviceNameRE)) { + if (deviceVersion != nullptr) { + *deviceVersion = sm[1]; + } + if (cameraId != nullptr) { + *cameraId = sm[2]; + } + return true; + } + return false; +} + +} // anonymous namespace + +using ::android::hardware::camera::common::V1_0::CameraMetadataType; +using ::android::hardware::camera::common::V1_0::Status; + +void LegacyCameraProviderImpl_2_4::addDeviceNames(int camera_id, CameraDeviceStatus status, bool cam_new) +{ + char cameraId[kMaxCameraIdLen]; + snprintf(cameraId, sizeof(cameraId), "%d", camera_id); + std::string cameraIdStr(cameraId); + + mCameraIds.add(cameraIdStr); + + // initialize mCameraDeviceNames and mOpenLegacySupported + mOpenLegacySupported[cameraIdStr] = false; + int deviceVersion = mModule->getDeviceVersion(camera_id); + auto deviceNamePair = std::make_pair(cameraIdStr, + getHidlDeviceName(cameraIdStr, deviceVersion)); + mCameraDeviceNames.add(deviceNamePair); + if (cam_new) { + mCallbacks->cameraDeviceStatusChange(deviceNamePair.second, status); + } + if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 && + mModule->isOpenLegacyDefined()) { + // try open_legacy to see if it actually works + struct hw_device_t* halDev = nullptr; + int ret = mModule->openLegacy(cameraId, CAMERA_DEVICE_API_VERSION_1_0, &halDev); + if (ret == 0) { + mOpenLegacySupported[cameraIdStr] = true; + halDev->close(halDev); + deviceNamePair = std::make_pair(cameraIdStr, + getHidlDeviceName(cameraIdStr, CAMERA_DEVICE_API_VERSION_1_0)); + mCameraDeviceNames.add(deviceNamePair); + if (cam_new) { + mCallbacks->cameraDeviceStatusChange(deviceNamePair.second, status); + } + } else if (ret == -EBUSY || ret == -EUSERS) { + // Looks like this provider instance is not initialized during + // system startup and there are other camera users already. + // Not a good sign but not fatal. + ALOGW("%s: open_legacy try failed!", __FUNCTION__); + } + } +} + +void LegacyCameraProviderImpl_2_4::removeDeviceNames(int camera_id) +{ + std::string cameraIdStr = std::to_string(camera_id); + + mCameraIds.remove(cameraIdStr); + + int deviceVersion = mModule->getDeviceVersion(camera_id); + auto deviceNamePair = std::make_pair(cameraIdStr, + getHidlDeviceName(cameraIdStr, deviceVersion)); + mCameraDeviceNames.remove(deviceNamePair); + mCallbacks->cameraDeviceStatusChange(deviceNamePair.second, CameraDeviceStatus::NOT_PRESENT); + if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_2 && + mModule->isOpenLegacyDefined() && mOpenLegacySupported[cameraIdStr]) { + + deviceNamePair = std::make_pair(cameraIdStr, + getHidlDeviceName(cameraIdStr, CAMERA_DEVICE_API_VERSION_1_0)); + mCameraDeviceNames.remove(deviceNamePair); + mCallbacks->cameraDeviceStatusChange(deviceNamePair.second, + CameraDeviceStatus::NOT_PRESENT); + } + + mModule->removeCamera(camera_id); +} + +/** + * static callback forwarding methods from HAL to instance + */ +void LegacyCameraProviderImpl_2_4::sCameraDeviceStatusChange( + const struct camera_module_callbacks* callbacks, + int camera_id, + int new_status) { + LegacyCameraProviderImpl_2_4* cp = const_cast( + static_cast(callbacks)); + if (cp == nullptr) { + ALOGE("%s: callback ops is null", __FUNCTION__); + return; + } + + Mutex::Autolock _l(cp->mCbLock); + char cameraId[kMaxCameraIdLen]; + snprintf(cameraId, sizeof(cameraId), "%d", camera_id); + std::string cameraIdStr(cameraId); + cp->mCameraStatusMap[cameraIdStr] = (camera_device_status_t) new_status; + + if (cp->mCallbacks == nullptr) { + // For camera connected before mCallbacks is set, the corresponding + // addDeviceNames() would be called later in setCallbacks(). + return; + } + + bool found = false; + CameraDeviceStatus status = (CameraDeviceStatus)new_status; + for (auto const& deviceNamePair : cp->mCameraDeviceNames) { + if (cameraIdStr.compare(deviceNamePair.first) == 0) { + cp->mCallbacks->cameraDeviceStatusChange(deviceNamePair.second, status); + found = true; + } + } + + switch (status) { + case CameraDeviceStatus::PRESENT: + case CameraDeviceStatus::ENUMERATING: + if (!found) { + cp->addDeviceNames(camera_id, status, true); + } + break; + case CameraDeviceStatus::NOT_PRESENT: + if (found) { + cp->removeDeviceNames(camera_id); + } + } +} + +void LegacyCameraProviderImpl_2_4::sTorchModeStatusChange( + const struct camera_module_callbacks* callbacks, + const char* camera_id, + int new_status) { + LegacyCameraProviderImpl_2_4* cp = const_cast( + static_cast(callbacks)); + + if (cp == nullptr) { + ALOGE("%s: callback ops is null", __FUNCTION__); + return; + } + + Mutex::Autolock _l(cp->mCbLock); + if (cp->mCallbacks != nullptr) { + std::string cameraIdStr(camera_id); + TorchModeStatus status = (TorchModeStatus) new_status; + for (auto const& deviceNamePair : cp->mCameraDeviceNames) { + if (cameraIdStr.compare(deviceNamePair.first) == 0) { + cp->mCallbacks->torchModeStatusChange( + deviceNamePair.second, status); + } + } + } +} + +Status LegacyCameraProviderImpl_2_4::getHidlStatus(int status) { + switch (status) { + case 0: return Status::OK; + case -ENODEV: return Status::INTERNAL_ERROR; + case -EINVAL: return Status::ILLEGAL_ARGUMENT; + default: + ALOGE("%s: unknown HAL status code %d", __FUNCTION__, status); + return Status::INTERNAL_ERROR; + } +} + +std::string LegacyCameraProviderImpl_2_4::getLegacyCameraId(const hidl_string& deviceName) { + std::string cameraId; + matchDeviceName(deviceName, nullptr, &cameraId); + return cameraId; +} + +std::string LegacyCameraProviderImpl_2_4::getHidlDeviceName( + std::string cameraId, int deviceVersion) { + // Maybe consider create a version check method and SortedVec to speed up? + if (deviceVersion != CAMERA_DEVICE_API_VERSION_1_0 && + deviceVersion != CAMERA_DEVICE_API_VERSION_3_2 && + deviceVersion != CAMERA_DEVICE_API_VERSION_3_3 && + deviceVersion != CAMERA_DEVICE_API_VERSION_3_4 && + deviceVersion != CAMERA_DEVICE_API_VERSION_3_5 && + deviceVersion != CAMERA_DEVICE_API_VERSION_3_6) { + return hidl_string(""); + } + + // Supported combinations: + // CAMERA_DEVICE_API_VERSION_1_0 -> ICameraDevice@1.0 + // CAMERA_DEVICE_API_VERSION_3_[2-4] -> ICameraDevice@[3.2|3.3] + // CAMERA_DEVICE_API_VERSION_3_5 + CAMERA_MODULE_API_VERSION_2_4 -> ICameraDevice@3.4 + // CAMERA_DEVICE_API_VERSION_3_[5-6] + CAMERA_MODULE_API_VERSION_2_5 -> ICameraDevice@3.5 + bool isV1 = deviceVersion == CAMERA_DEVICE_API_VERSION_1_0; + int versionMajor = isV1 ? 1 : 3; + int versionMinor = isV1 ? 0 : mPreferredHal3MinorVersion; + if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_5) { + if (mModule->getModuleApiVersion() == CAMERA_MODULE_API_VERSION_2_5) { + versionMinor = 5; + } else { + versionMinor = 4; + } + } else if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_6) { + versionMinor = 5; + } + char deviceName[kMaxCameraDeviceNameLen]; + snprintf(deviceName, sizeof(deviceName), "device@%d.%d/legacy/%s", + versionMajor, versionMinor, cameraId.c_str()); + return deviceName; +} + +LegacyCameraProviderImpl_2_4::LegacyCameraProviderImpl_2_4() : + camera_module_callbacks_t({sCameraDeviceStatusChange, + sTorchModeStatusChange}) { + mInitFailed = initialize(); +} + +LegacyCameraProviderImpl_2_4::~LegacyCameraProviderImpl_2_4() {} + +bool LegacyCameraProviderImpl_2_4::initialize() { + camera_module_t *rawModule; + int err = hw_get_module(CAMERA_HARDWARE_MODULE_ID, + (const hw_module_t **)&rawModule); + if (err < 0) { + ALOGE("Could not load camera HAL module: %d (%s)", err, strerror(-err)); + return true; + } + + mModule = new CameraModule(rawModule); + err = mModule->init(); + if (err != OK) { + ALOGE("Could not initialize camera HAL module: %d (%s)", err, strerror(-err)); + mModule.clear(); + return true; + } + ALOGI("Loaded \"%s\" camera module", mModule->getModuleName()); + + // Setup vendor tags here so HAL can setup vendor keys in camera characteristics + VendorTagDescriptor::clearGlobalVendorTagDescriptor(); + if (!setUpVendorTags()) { + ALOGE("%s: Vendor tag setup failed, will not be available.", __FUNCTION__); + } + + // Setup callback now because we are going to try openLegacy next + err = mModule->setCallbacks(this); + if (err != OK) { + ALOGE("Could not set camera module callback: %d (%s)", err, strerror(-err)); + mModule.clear(); + return true; + } + + mPreferredHal3MinorVersion = + property_get_int32("ro.vendor.camera.wrapper.hal3TrebleMinorVersion", 3); + ALOGV("Preferred HAL 3 minor version is %d", mPreferredHal3MinorVersion); + switch(mPreferredHal3MinorVersion) { + case 2: + case 3: + // OK + break; + default: + ALOGW("Unknown minor camera device HAL version %d in property " + "'camera.wrapper.hal3TrebleMinorVersion', defaulting to 3", + mPreferredHal3MinorVersion); + mPreferredHal3MinorVersion = 3; + } + + mNumberOfLegacyCameras = mModule->getNumberOfCameras(); + for (int i = 0; i < mNumberOfLegacyCameras; i++) { + struct camera_info info; + auto rc = mModule->getCameraInfo(i, &info); + if (rc != NO_ERROR) { + ALOGE("%s: Camera info query failed!", __func__); + mModule.clear(); + return true; + } + + if (checkCameraVersion(i, info) != OK) { + ALOGE("%s: Camera version check failed!", __func__); + mModule.clear(); + return true; + } + + char cameraId[kMaxCameraIdLen]; + snprintf(cameraId, sizeof(cameraId), "%d", i); + std::string cameraIdStr(cameraId); + mCameraStatusMap[cameraIdStr] = CAMERA_DEVICE_STATUS_PRESENT; + + addDeviceNames(i); + } + + return false; // mInitFailed +} + +/** + * Check that the device HAL version is still in supported. + */ +int LegacyCameraProviderImpl_2_4::checkCameraVersion(int id, camera_info info) { + if (mModule == nullptr) { + return NO_INIT; + } + + // device_version undefined in CAMERA_MODULE_API_VERSION_1_0, + // All CAMERA_MODULE_API_VERSION_1_0 devices are backward-compatible + uint16_t moduleVersion = mModule->getModuleApiVersion(); + if (moduleVersion >= CAMERA_MODULE_API_VERSION_2_0) { + // Verify the device version is in the supported range + switch (info.device_version) { + case CAMERA_DEVICE_API_VERSION_1_0: + case CAMERA_DEVICE_API_VERSION_3_2: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_5: + // in support + break; + case CAMERA_DEVICE_API_VERSION_3_6: + /** + * ICameraDevice@3.5 contains APIs from both + * CAMERA_DEVICE_API_VERSION_3_6 and CAMERA_MODULE_API_VERSION_2_5 + * so we require HALs to uprev both for simplified supported combinations. + * HAL can still opt in individual new APIs indepedently. + */ + if (moduleVersion < CAMERA_MODULE_API_VERSION_2_5) { + ALOGE("%s: Device %d has unsupported version combination:" + "HAL version %x and module version %x", + __FUNCTION__, id, info.device_version, moduleVersion); + return NO_INIT; + } + break; + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + case CAMERA_DEVICE_API_VERSION_3_0: + case CAMERA_DEVICE_API_VERSION_3_1: + // no longer supported + default: + ALOGE("%s: Device %d has HAL version %x, which is not supported", + __FUNCTION__, id, info.device_version); + return NO_INIT; + } + } + + return OK; +} + +bool LegacyCameraProviderImpl_2_4::setUpVendorTags() { + ATRACE_CALL(); + vendor_tag_ops_t vOps = vendor_tag_ops_t(); + + // Check if vendor operations have been implemented + if (!mModule->isVendorTagDefined()) { + ALOGI("%s: No vendor tags defined for this device.", __FUNCTION__); + return true; + } + + mModule->getVendorTagOps(&vOps); + + // Ensure all vendor operations are present + if (vOps.get_tag_count == nullptr || vOps.get_all_tags == nullptr || + vOps.get_section_name == nullptr || vOps.get_tag_name == nullptr || + vOps.get_tag_type == nullptr) { + ALOGE("%s: Vendor tag operations not fully defined. Ignoring definitions." + , __FUNCTION__); + return false; + } + + // Read all vendor tag definitions into a descriptor + sp desc; + status_t res; + if ((res = VendorTagDescriptor::createDescriptorFromOps(&vOps, /*out*/desc)) + != OK) { + ALOGE("%s: Could not generate descriptor from vendor tag operations," + "received error %s (%d). Camera clients will not be able to use" + "vendor tags", __FUNCTION__, strerror(res), res); + return false; + } + + // Set the global descriptor to use with camera metadata + VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc); + const SortedVector* sectionNames = desc->getAllSectionNames(); + size_t numSections = sectionNames->size(); + std::vector> tagsBySection(numSections); + int tagCount = desc->getTagCount(); + std::vector tags(tagCount); + desc->getTagArray(tags.data()); + for (int i = 0; i < tagCount; i++) { + VendorTag vt; + vt.tagId = tags[i]; + vt.tagName = desc->getTagName(tags[i]); + vt.tagType = (CameraMetadataType) desc->getTagType(tags[i]); + ssize_t sectionIdx = desc->getSectionIndex(tags[i]); + tagsBySection[sectionIdx].push_back(vt); + } + mVendorTagSections.resize(numSections); + for (size_t s = 0; s < numSections; s++) { + mVendorTagSections[s].sectionName = (*sectionNames)[s].c_str(); + mVendorTagSections[s].tags = tagsBySection[s]; + } + return true; +} + +// Methods from ::android::hardware::camera::provider::V2_4::ICameraProvider follow. +Return LegacyCameraProviderImpl_2_4::setCallback( + const sp& callback) { + Mutex::Autolock _l(mCbLock); + mCallbacks = callback; + if (mCallbacks == nullptr) { + return Status::OK; + } + // Add and report all presenting external cameras. + for (auto const& statusPair : mCameraStatusMap) { + int id = std::stoi(statusPair.first); + auto status = static_cast(statusPair.second); + if (id >= mNumberOfLegacyCameras && status != CameraDeviceStatus::NOT_PRESENT) { + addDeviceNames(id, status, true); + } + } + + return Status::OK; +} + +Return LegacyCameraProviderImpl_2_4::getVendorTags( + ICameraProvider::getVendorTags_cb _hidl_cb) { + _hidl_cb(Status::OK, mVendorTagSections); + return Void(); +} + +Return LegacyCameraProviderImpl_2_4::getCameraIdList( + ICameraProvider::getCameraIdList_cb _hidl_cb) { + std::vector deviceNameList; + for (auto const& deviceNamePair : mCameraDeviceNames) { + if (std::stoi(deviceNamePair.first) >= mNumberOfLegacyCameras) { + // External camera devices must be reported through the device status change callback, + // not in this list. + continue; + } + if (mCameraStatusMap[deviceNamePair.first] == CAMERA_DEVICE_STATUS_PRESENT) { + deviceNameList.push_back(deviceNamePair.second); + } + } + hidl_vec hidlDeviceNameList(deviceNameList); + _hidl_cb(Status::OK, hidlDeviceNameList); + return Void(); +} + +Return LegacyCameraProviderImpl_2_4::isSetTorchModeSupported( + ICameraProvider::isSetTorchModeSupported_cb _hidl_cb) { + bool support = mModule->isSetTorchModeSupported(); + _hidl_cb (Status::OK, support); + return Void(); +} + +Return LegacyCameraProviderImpl_2_4::getCameraDeviceInterface_V1_x( + const hidl_string& cameraDeviceName, + ICameraProvider::getCameraDeviceInterface_V1_x_cb _hidl_cb) { + std::string cameraId, deviceVersion; + bool match = matchDeviceName(cameraDeviceName, &deviceVersion, &cameraId); + if (!match) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + std::string deviceName(cameraDeviceName.c_str()); + ssize_t index = mCameraDeviceNames.indexOf(std::make_pair(cameraId, deviceName)); + if (index == NAME_NOT_FOUND) { // Either an illegal name or a device version mismatch + Status status = Status::OK; + ssize_t idx = mCameraIds.indexOf(cameraId); + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: cannot find camera %s!", __FUNCTION__, cameraId.c_str()); + status = Status::ILLEGAL_ARGUMENT; + } else { // invalid version + ALOGE("%s: camera device %s does not support version %s!", + __FUNCTION__, cameraId.c_str(), deviceVersion.c_str()); + status = Status::OPERATION_NOT_SUPPORTED; + } + _hidl_cb(status, nullptr); + return Void(); + } + + if (mCameraStatusMap.count(cameraId) == 0 || + mCameraStatusMap[cameraId] != CAMERA_DEVICE_STATUS_PRESENT) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + sp device = + new android::hardware::camera::device::V1_0::implementation::CameraDevice( + mModule, cameraId, mCameraDeviceNames); + + if (device == nullptr) { + ALOGE("%s: cannot allocate camera device for id %s", __FUNCTION__, cameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + if (device->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str()); + device = nullptr; + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + _hidl_cb (Status::OK, device); + return Void(); +} + +Return LegacyCameraProviderImpl_2_4::getCameraDeviceInterface_V3_x( + const hidl_string& cameraDeviceName, + ICameraProvider::getCameraDeviceInterface_V3_x_cb _hidl_cb) { + std::string cameraId, deviceVersion; + bool match = matchDeviceName(cameraDeviceName, &deviceVersion, &cameraId); + if (!match) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + std::string deviceName(cameraDeviceName.c_str()); + ssize_t index = mCameraDeviceNames.indexOf(std::make_pair(cameraId, deviceName)); + if (index == NAME_NOT_FOUND) { // Either an illegal name or a device version mismatch + Status status = Status::OK; + ssize_t idx = mCameraIds.indexOf(cameraId); + if (idx == NAME_NOT_FOUND) { + ALOGE("%s: cannot find camera %s!", __FUNCTION__, cameraId.c_str()); + status = Status::ILLEGAL_ARGUMENT; + } else { // invalid version + ALOGE("%s: camera device %s does not support version %s!", + __FUNCTION__, cameraId.c_str(), deviceVersion.c_str()); + status = Status::OPERATION_NOT_SUPPORTED; + } + _hidl_cb(status, nullptr); + return Void(); + } + + if (mCameraStatusMap.count(cameraId) == 0 || + mCameraStatusMap[cameraId] != CAMERA_DEVICE_STATUS_PRESENT) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + sp deviceImpl; + + // ICameraDevice 3.4 or upper + if (deviceVersion >= kHAL3_4) { + ALOGV("Constructing v3.4+ camera device"); + if (deviceVersion == kHAL3_4) { + deviceImpl = new android::hardware::camera::device::V3_4::implementation::CameraDevice( + mModule, cameraId, mCameraDeviceNames); + } else if (deviceVersion == kHAL3_5) { + deviceImpl = new android::hardware::camera::device::V3_5::implementation::CameraDevice( + mModule, cameraId, mCameraDeviceNames); + } + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + IF_ALOGV() { + deviceImpl->getInterface()->interfaceChain([]( + ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) { + ALOGV("Device interface chain:"); + for (auto iface : interfaceChain) { + ALOGV(" %s", iface.c_str()); + } + }); + } + _hidl_cb (Status::OK, deviceImpl->getInterface()); + return Void(); + } + + // ICameraDevice 3.2 and 3.3 + // Since some Treble HAL revisions can map to the same legacy HAL version(s), we default + // to the newest possible Treble HAL revision, but allow for override if needed via + // system property. + switch (mPreferredHal3MinorVersion) { + case 2: { // Map legacy camera device v3 HAL to Treble camera device HAL v3.2 + ALOGV("Constructing v3.2 camera device"); + deviceImpl = new android::hardware::camera::device::V3_2::implementation::CameraDevice( + mModule, cameraId, mCameraDeviceNames); + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + break; + } + case 3: { // Map legacy camera device v3 HAL to Treble camera device HAL v3.3 + ALOGV("Constructing v3.3 camera device"); + deviceImpl = new android::hardware::camera::device::V3_3::implementation::CameraDevice( + mModule, cameraId, mCameraDeviceNames); + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + break; + } + default: + ALOGE("%s: Unknown HAL minor version %d!", __FUNCTION__, mPreferredHal3MinorVersion); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + _hidl_cb (Status::OK, deviceImpl->getInterface()); + return Void(); +} + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.h b/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.h new file mode 100644 index 0000000..b4914b3 --- /dev/null +++ b/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.h @@ -0,0 +1,134 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_LEGACYCAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_LEGACYCAMERAPROVIDER_H + +#include +#include "hardware/camera_common.h" +#include "utils/Mutex.h" +#include "utils/SortedVector.h" + +#include "CameraModule.h" +#include "VendorTagDescriptor.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::TorchModeStatus; +using ::android::hardware::camera::common::V1_0::VendorTag; +using ::android::hardware::camera::common::V1_0::VendorTagSection; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor; +using ::android::hardware::camera::provider::V2_4::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +/** + * The implementation of legacy wrapper CameraProvider 2.4, separated + * from the HIDL interface layer to allow for implementation reuse by later + * provider versions. + * + * This implementation supports cameras implemented via the legacy libhardware + * camera HAL definitions. + */ +struct LegacyCameraProviderImpl_2_4 : public camera_module_callbacks_t { + LegacyCameraProviderImpl_2_4(); + ~LegacyCameraProviderImpl_2_4(); + + // Caller must use this method to check if CameraProvider ctor failed + bool isInitFailed() { return mInitFailed; } + + // Methods from ::android::hardware::camera::provider::V2_4::ICameraProvider follow. + Return setCallback(const sp& callback); + Return getVendorTags(ICameraProvider::getVendorTags_cb _hidl_cb); + Return getCameraIdList(ICameraProvider::getCameraIdList_cb _hidl_cb); + Return isSetTorchModeSupported(ICameraProvider::isSetTorchModeSupported_cb _hidl_cb); + Return getCameraDeviceInterface_V1_x( + const hidl_string& cameraDeviceName, + ICameraProvider::getCameraDeviceInterface_V1_x_cb _hidl_cb); + Return getCameraDeviceInterface_V3_x( + const hidl_string& cameraDeviceName, + ICameraProvider::getCameraDeviceInterface_V3_x_cb _hidl_cb); + +protected: + Mutex mCbLock; + sp mCallbacks = nullptr; + + sp mModule; + + int mNumberOfLegacyCameras; + std::map mCameraStatusMap; // camera id -> status + std::map mOpenLegacySupported; // camera id -> open_legacy HAL1.0 supported + SortedVector mCameraIds; // the "0"/"1" legacy camera Ids + // (cameraId string, hidl device name) pairs + SortedVector> mCameraDeviceNames; + + int mPreferredHal3MinorVersion; + + // Must be queried before using any APIs. + // APIs will only work when this returns true + bool mInitFailed; + bool initialize(); + + hidl_vec mVendorTagSections; + bool setUpVendorTags(); + int checkCameraVersion(int id, camera_info info); + + // create HIDL device name from camera ID and legacy device version + std::string getHidlDeviceName(std::string cameraId, int deviceVersion); + + // extract legacy camera ID/device version from a HIDL device name + static std::string getLegacyCameraId(const hidl_string& deviceName); + + // convert conventional HAL status to HIDL Status + static Status getHidlStatus(int); + + // static callback forwarding methods + static void sCameraDeviceStatusChange( + const struct camera_module_callbacks* callbacks, + int camera_id, + int new_status); + static void sTorchModeStatusChange( + const struct camera_module_callbacks* callbacks, + const char* camera_id, + int new_status); + + void addDeviceNames(int camera_id, CameraDeviceStatus status = CameraDeviceStatus::PRESENT, + bool cam_new = false); + void removeDeviceNames(int camera_id); + +}; + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_LEGACYCAMERAPROVIDER_H diff --git a/camera/provider/2.4/default/OWNERS b/camera/provider/2.4/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/provider/2.4/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-external-service.rc b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-external-service.rc new file mode 100644 index 0000000..52ade97 --- /dev/null +++ b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-external-service.rc @@ -0,0 +1,8 @@ +service vendor.camera-provider-2-4-ext /vendor/bin/hw/android.hardware.camera.provider@2.4-external-service + interface android.hardware.camera.provider@2.4::ICameraProvider external/0 + class hal + user cameraserver + group audio camera input drmrpc usb + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy.rc b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy.rc new file mode 100644 index 0000000..63ded90 --- /dev/null +++ b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy.rc @@ -0,0 +1,10 @@ +service vendor.camera-provider-2-4 /vendor/bin/hw/android.hardware.camera.provider@2.4-service-lazy + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + oneshot + disabled + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy_64.rc b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy_64.rc new file mode 100644 index 0000000..953d1af --- /dev/null +++ b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service-lazy_64.rc @@ -0,0 +1,10 @@ +service vendor.camera-provider-2-4 /vendor/bin/hw/android.hardware.camera.provider@2.4-service-lazy_64 + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + oneshot + disabled + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service.rc b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service.rc new file mode 100644 index 0000000..f7ac9f8 --- /dev/null +++ b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service.rc @@ -0,0 +1,8 @@ +service vendor.camera-provider-2-4 /vendor/bin/hw/android.hardware.camera.provider@2.4-service + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service_64.rc b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service_64.rc new file mode 100644 index 0000000..a32dd46 --- /dev/null +++ b/camera/provider/2.4/default/android.vendor.hardware.camera.provider@2.4-service_64.rc @@ -0,0 +1,8 @@ +service vendor.camera-provider-2-4 /vendor/bin/hw/android.hardware.camera.provider@2.4-service_64 + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.4/default/external-service.cpp b/camera/provider/2.4/default/external-service.cpp new file mode 100644 index 0000000..f91aa59 --- /dev/null +++ b/camera/provider/2.4/default/external-service.cpp @@ -0,0 +1,34 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.provider@2.4-external-service" + +#include +#include + +#include + +using android::hardware::camera::provider::V2_4::ICameraProvider; +using android::hardware::defaultPassthroughServiceImplementation; + +int main() +{ + ALOGI("External camera provider service is starting."); + // The camera HAL may communicate to other vendor components via + // /dev/vndbinder + android::ProcessState::initWithDriver("/dev/vndbinder"); + return defaultPassthroughServiceImplementation("external/0", /*maxThreads*/ 6); +} diff --git a/camera/provider/2.4/default/service.cpp b/camera/provider/2.4/default/service.cpp new file mode 100644 index 0000000..0a4f787 --- /dev/null +++ b/camera/provider/2.4/default/service.cpp @@ -0,0 +1,67 @@ +/* + * Copyright 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifdef LAZY_SERVICE +#define LOG_TAG "android.hardware.camera.provider@2.4-service-lazy" +#else +#define LOG_TAG "android.hardware.camera.provider@2.4-service" +#endif + +#include +#include +#include +#include +#include + +using android::status_t; +using android::hardware::defaultLazyPassthroughServiceImplementation; +using android::hardware::defaultPassthroughServiceImplementation; +using android::hardware::camera::provider::V2_4::ICameraProvider; + +#ifdef LAZY_SERVICE +const bool kLazyService = true; +#else +const bool kLazyService = false; +#endif + +int main() +{ + ALOGI("CameraProvider@2.4 legacy service is starting."); + // The camera HAL may communicate to other vendor components via + // /dev/vndbinder + android::ProcessState::initWithDriver("/dev/vndbinder"); + + // b/166675194 + if (property_get_bool("ro.vendor.camera.provider24.disable_mem_init", false)) { + if (mallopt(M_BIONIC_ZERO_INIT, 0) == 0) { + // Note - heap initialization is only present on devices with Scudo. + // Devices with jemalloc don't have heap-init, and thus the mallopt + // will fail. On these devices, you probably just want to remove the + // property. + ALOGE("Disabling heap initialization failed."); + } + } + + status_t status; + if (kLazyService) { + status = defaultLazyPassthroughServiceImplementation("legacy/0", + /*maxThreads*/ 6); + } else { + status = defaultPassthroughServiceImplementation("legacy/0", + /*maxThreads*/ 6); + } + return status; +} diff --git a/camera/provider/2.4/vts/OWNERS b/camera/provider/2.4/vts/OWNERS new file mode 100644 index 0000000..b8f6b04 --- /dev/null +++ b/camera/provider/2.4/vts/OWNERS @@ -0,0 +1,6 @@ +# Camera team +include platform/frameworks/av:/camera/OWNERS + +# VTS team +yim@google.com +zhuoyao@google.com diff --git a/camera/provider/2.4/vts/functional/Android.bp b/camera/provider/2.4/vts/functional/Android.bp new file mode 100644 index 0000000..805e130 --- /dev/null +++ b/camera/provider/2.4/vts/functional/Android.bp @@ -0,0 +1,55 @@ +// +// Copyright (C) 2016 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalVendorCameraProviderV2_4TargetTest", + defaults: ["VtsHalTargetTestDefaults"], + srcs: ["VtsHalCameraProviderV2_4TargetTest.cpp"], + + // TODO(b/64437680): Assume these are always available on the device. + shared_libs: [ + "libbinder", + "libcamera_metadata", + "libcutils", + "libfmq", + "libgui", + "libui", + ], + + // Statically link to libs not guaranteed to be present on the device. + static_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.common@1.0-helper", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.device@3.6", + "android.hardware.camera.device@3.7", + "android.hardware.camera.metadata@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.provider@2.5", + "android.hardware.camera.provider@2.6", + "android.hardware.camera.provider@2.7", + "android.hardware.graphics.common@1.0", + "android.hidl.allocator@1.0", + "libgrallocusage", + "libhidlmemory", + "libgralloctypes", + ], + test_suites: ["general-tests", "vts"], +} diff --git a/camera/provider/2.4/vts/functional/AndroidTest.xml b/camera/provider/2.4/vts/functional/AndroidTest.xml new file mode 100644 index 0000000..3000c0e --- /dev/null +++ b/camera/provider/2.4/vts/functional/AndroidTest.xml @@ -0,0 +1,33 @@ + + + + diff --git a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp new file mode 100644 index 0000000..052103d --- /dev/null +++ b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp @@ -0,0 +1,8831 @@ +/* + * Copyright (C) 2016-2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "camera_hidl_hal_test" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +using namespace ::android::hardware::camera::device; +using ::android::BufferItemConsumer; +using ::android::BufferQueue; +using ::android::GraphicBuffer; +using ::android::IGraphicBufferConsumer; +using ::android::IGraphicBufferProducer; +using ::android::sp; +using ::android::Surface; +using ::android::wp; +using ::android::hardware::hidl_bitfield; +using ::android::hardware::hidl_handle; +using ::android::hardware::hidl_string; +using ::android::hardware::hidl_vec; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::TorchModeStatus; +using ::android::hardware::camera::common::V1_0::helper::CameraParameters; +using ::android::hardware::camera::common::V1_0::helper::Size; +using ::android::hardware::camera::device::V1_0::CameraFacing; +using ::android::hardware::camera::device::V1_0::CameraFrameMetadata; +using ::android::hardware::camera::device::V1_0::CommandType; +using ::android::hardware::camera::device::V1_0::DataCallbackMsg; +using ::android::hardware::camera::device::V1_0::FrameCallbackFlag; +using ::android::hardware::camera::device::V1_0::HandleTimestampMessage; +using ::android::hardware::camera::device::V1_0::ICameraDevicePreviewCallback; +using ::android::hardware::camera::device::V1_0::NotifyCallbackMsg; +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_2::ErrorCode; +using ::android::hardware::camera::device::V3_2::ErrorMsg; +using ::android::hardware::camera::device::V3_2::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_2::ICameraDevice; +using ::android::hardware::camera::device::V3_2::ICameraDeviceSession; +using ::android::hardware::camera::device::V3_2::MsgType; +using ::android::hardware::camera::device::V3_2::NotifyMsg; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::StreamBuffer; +using ::android::hardware::camera::device::V3_2::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfigurationMode; +using ::android::hardware::camera::device::V3_2::StreamRotation; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_4::PhysicalCameraMetadata; +using ::android::hardware::camera::metadata::V3_4:: + CameraMetadataEnumAndroidSensorInfoColorFilterArrangement; +using ::android::hardware::camera::metadata::V3_4::CameraMetadataTag; +using ::android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode; +using ::android::hardware::camera::provider::V2_4::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::camera::provider::V2_6::CameraIdAndStreamCombination; +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hidl::allocator::V1_0::IAllocator; +using ::android::hidl::memory::V1_0::IMapper; +using ::android::hidl::memory::V1_0::IMemory; +using ResultMetadataQueue = MessageQueue; +using ::android::hidl::manager::V1_0::IServiceManager; + +using namespace ::android::hardware::camera; + +const uint32_t kMaxPreviewWidth = 1920; +const uint32_t kMaxPreviewHeight = 1080; +const uint32_t kMaxStillWidth = 2048; +const uint32_t kMaxStillHeight = 1536; +const uint32_t kMaxVideoWidth = 4096; +const uint32_t kMaxVideoHeight = 2160; +const int64_t kStreamBufferTimeoutSec = 3; +const int64_t kAutoFocusTimeoutSec = 5; +const int64_t kTorchTimeoutSec = 1; +const int64_t kEmptyFlushTimeoutMSec = 200; +const char kDumpOutput[] = "/dev/null"; +const uint32_t kBurstFrameCount = 10; +const int64_t kBufferReturnTimeoutSec = 1; + +struct AvailableStream { + int32_t width; + int32_t height; + int32_t format; +}; + +struct AvailableZSLInputOutput { + int32_t inputFormat; + int32_t outputFormat; +}; + +enum ReprocessType { + PRIV_REPROCESS, + YUV_REPROCESS, +}; + +enum SystemCameraKind { + /** + * These camera devices are visible to all apps and system components alike + */ + PUBLIC = 0, + + /** + * These camera devices are visible only to processes having the + * android.permission.SYSTEM_CAMERA permission. They are not exposed to 3P + * apps. + */ + SYSTEM_ONLY_CAMERA, + + /** + * These camera devices are visible only to HAL clients (that try to connect + * on a hwbinder thread). + */ + HIDDEN_SECURE_CAMERA +}; + +namespace { + // "device@/legacy/" + const char *kDeviceNameRE = "device@([0-9]+\\.[0-9]+)/%s/(.+)"; + const int CAMERA_DEVICE_API_VERSION_3_7 = 0x307; + const int CAMERA_DEVICE_API_VERSION_3_6 = 0x306; + const int CAMERA_DEVICE_API_VERSION_3_5 = 0x305; + const int CAMERA_DEVICE_API_VERSION_3_4 = 0x304; + const int CAMERA_DEVICE_API_VERSION_3_3 = 0x303; + const int CAMERA_DEVICE_API_VERSION_3_2 = 0x302; + const int CAMERA_DEVICE_API_VERSION_1_0 = 0x100; + const char *kHAL3_7 = "3.7"; + const char *kHAL3_6 = "3.6"; + const char *kHAL3_5 = "3.5"; + const char *kHAL3_4 = "3.4"; + const char *kHAL3_3 = "3.3"; + const char *kHAL3_2 = "3.2"; + const char *kHAL1_0 = "1.0"; + + bool matchDeviceName(const hidl_string& deviceName, + const hidl_string &providerType, + std::string* deviceVersion, + std::string* cameraId) { + ::android::String8 pattern; + pattern.appendFormat(kDeviceNameRE, providerType.c_str()); + std::regex e(pattern.string()); + std::string deviceNameStd(deviceName.c_str()); + std::smatch sm; + if (std::regex_match(deviceNameStd, sm, e)) { + if (deviceVersion != nullptr) { + *deviceVersion = sm[1]; + } + if (cameraId != nullptr) { + *cameraId = sm[2]; + } + return true; + } + return false; + } + + int getCameraDeviceVersionAndId(const hidl_string& deviceName, + const hidl_string &providerType, std::string* id) { + std::string version; + bool match = matchDeviceName(deviceName, providerType, &version, id); + if (!match) { + return -1; + } + + if (version.compare(kHAL3_7) == 0) { + return CAMERA_DEVICE_API_VERSION_3_7; + } else if (version.compare(kHAL3_6) == 0) { + return CAMERA_DEVICE_API_VERSION_3_6; + } else if (version.compare(kHAL3_5) == 0) { + return CAMERA_DEVICE_API_VERSION_3_5; + } else if (version.compare(kHAL3_4) == 0) { + return CAMERA_DEVICE_API_VERSION_3_4; + } else if (version.compare(kHAL3_3) == 0) { + return CAMERA_DEVICE_API_VERSION_3_3; + } else if (version.compare(kHAL3_2) == 0) { + return CAMERA_DEVICE_API_VERSION_3_2; + } else if (version.compare(kHAL1_0) == 0) { + return CAMERA_DEVICE_API_VERSION_1_0; + } + return 0; + } + + int getCameraDeviceVersion(const hidl_string& deviceName, + const hidl_string &providerType) { + return getCameraDeviceVersionAndId(deviceName, providerType, nullptr); + } + + bool parseProviderName(const std::string& name, std::string *type /*out*/, + uint32_t *id /*out*/) { + if (!type || !id) { + ADD_FAILURE(); + return false; + } + + std::string::size_type slashIdx = name.find('/'); + if (slashIdx == std::string::npos || slashIdx == name.size() - 1) { + ADD_FAILURE() << "Provider name does not have / separator between type" + "and id"; + return false; + } + + std::string typeVal = name.substr(0, slashIdx); + + char *endPtr; + errno = 0; + long idVal = strtol(name.c_str() + slashIdx + 1, &endPtr, 10); + if (errno != 0) { + ADD_FAILURE() << "cannot parse provider id as an integer:" << + name.c_str() << strerror(errno) << errno; + return false; + } + if (endPtr != name.c_str() + name.size()) { + ADD_FAILURE() << "provider id has unexpected length " << name.c_str(); + return false; + } + if (idVal < 0) { + ADD_FAILURE() << "id is negative: " << name.c_str() << idVal; + return false; + } + + *type = typeVal; + *id = static_cast(idVal); + + return true; + } + + Status mapToStatus(::android::status_t s) { + switch(s) { + case ::android::OK: + return Status::OK ; + case ::android::BAD_VALUE: + return Status::ILLEGAL_ARGUMENT ; + case -EBUSY: + return Status::CAMERA_IN_USE; + case -EUSERS: + return Status::MAX_CAMERAS_IN_USE; + case ::android::UNKNOWN_TRANSACTION: + return Status::METHOD_NOT_SUPPORTED; + case ::android::INVALID_OPERATION: + return Status::OPERATION_NOT_SUPPORTED; + case ::android::DEAD_OBJECT: + return Status::CAMERA_DISCONNECTED; + } + ALOGW("Unexpected HAL status code %d", s); + return Status::OPERATION_NOT_SUPPORTED; + } + + void getFirstApiLevel(/*out*/int32_t* outApiLevel) { + int32_t firstApiLevel = property_get_int32("ro.product.first_api_level", /*default*/-1); + if (firstApiLevel < 0) { + firstApiLevel = property_get_int32("ro.build.version.sdk", /*default*/-1); + } + ASSERT_GT(firstApiLevel, 0); // first_api_level must exist + *outApiLevel = firstApiLevel; + return; + } +} + +struct BufferItemHander: public BufferItemConsumer::FrameAvailableListener { + BufferItemHander(wp consumer) : mConsumer(consumer) {} + + void onFrameAvailable(const android::BufferItem&) override { + sp consumer = mConsumer.promote(); + ASSERT_NE(nullptr, consumer.get()); + + android::BufferItem buffer; + ASSERT_EQ(android::OK, consumer->acquireBuffer(&buffer, 0)); + ASSERT_EQ(android::OK, consumer->releaseBuffer(buffer)); + } + + private: + wp mConsumer; +}; + +struct PreviewWindowCb : public ICameraDevicePreviewCallback { + PreviewWindowCb(sp anw) : mPreviewWidth(0), + mPreviewHeight(0), mFormat(0), mPreviewUsage(0), + mPreviewSwapInterval(-1), mCrop{-1, -1, -1, -1}, mAnw(anw) {} + + using dequeueBuffer_cb = + std::function; + Return dequeueBuffer(dequeueBuffer_cb _hidl_cb) override; + + Return enqueueBuffer(uint64_t bufferId) override; + + Return cancelBuffer(uint64_t bufferId) override; + + Return setBufferCount(uint32_t count) override; + + Return setBuffersGeometry(uint32_t w, + uint32_t h, PixelFormat format) override; + + Return setCrop(int32_t left, int32_t top, + int32_t right, int32_t bottom) override; + + Return setUsage(BufferUsage usage) override; + + Return setSwapInterval(int32_t interval) override; + + using getMinUndequeuedBufferCount_cb = + std::function; + Return getMinUndequeuedBufferCount( + getMinUndequeuedBufferCount_cb _hidl_cb) override; + + Return setTimestamp(int64_t timestamp) override; + + private: + struct BufferHasher { + size_t operator()(const buffer_handle_t& buf) const { + if (buf == nullptr) + return 0; + + size_t result = 1; + result = 31 * result + buf->numFds; + for (int i = 0; i < buf->numFds; i++) { + result = 31 * result + buf->data[i]; + } + return result; + } + }; + + struct BufferComparator { + bool operator()(const buffer_handle_t& buf1, + const buffer_handle_t& buf2) const { + if (buf1->numFds == buf2->numFds) { + for (int i = 0; i < buf1->numFds; i++) { + if (buf1->data[i] != buf2->data[i]) { + return false; + } + } + return true; + } + return false; + } + }; + + std::pair getBufferId(ANativeWindowBuffer* anb); + void cleanupCirculatingBuffers(); + + std::mutex mBufferIdMapLock; // protecting mBufferIdMap and mNextBufferId + typedef std::unordered_map BufferIdMap; + + BufferIdMap mBufferIdMap; // stream ID -> per stream buffer ID map + std::unordered_map mReversedBufMap; + uint64_t mNextBufferId = 1; + + uint32_t mPreviewWidth, mPreviewHeight; + int mFormat, mPreviewUsage; + int32_t mPreviewSwapInterval; + android_native_rect_t mCrop; + sp mAnw; //Native window reference +}; + +std::pair PreviewWindowCb::getBufferId( + ANativeWindowBuffer* anb) { + std::lock_guard lock(mBufferIdMapLock); + + buffer_handle_t& buf = anb->handle; + auto it = mBufferIdMap.find(buf); + if (it == mBufferIdMap.end()) { + uint64_t bufId = mNextBufferId++; + mBufferIdMap[buf] = bufId; + mReversedBufMap[bufId] = anb; + return std::make_pair(true, bufId); + } else { + return std::make_pair(false, it->second); + } +} + +void PreviewWindowCb::cleanupCirculatingBuffers() { + std::lock_guard lock(mBufferIdMapLock); + mBufferIdMap.clear(); + mReversedBufMap.clear(); +} + +Return PreviewWindowCb::dequeueBuffer(dequeueBuffer_cb _hidl_cb) { + ANativeWindowBuffer* anb; + auto rc = native_window_dequeue_buffer_and_wait(mAnw.get(), &anb); + uint64_t bufferId = 0; + uint32_t stride = 0; + hidl_handle buf = nullptr; + if (rc == ::android::OK) { + auto pair = getBufferId(anb); + buf = (pair.first) ? anb->handle : nullptr; + bufferId = pair.second; + stride = anb->stride; + } + + _hidl_cb(mapToStatus(rc), bufferId, buf, stride); + return Void(); +} + +Return PreviewWindowCb::enqueueBuffer(uint64_t bufferId) { + if (mReversedBufMap.count(bufferId) == 0) { + ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId); + return Status::ILLEGAL_ARGUMENT; + } + return mapToStatus(mAnw->queueBuffer(mAnw.get(), + mReversedBufMap.at(bufferId), -1)); +} + +Return PreviewWindowCb::cancelBuffer(uint64_t bufferId) { + if (mReversedBufMap.count(bufferId) == 0) { + ALOGE("%s: bufferId %" PRIu64 " not found", __FUNCTION__, bufferId); + return Status::ILLEGAL_ARGUMENT; + } + return mapToStatus(mAnw->cancelBuffer(mAnw.get(), + mReversedBufMap.at(bufferId), -1)); +} + +Return PreviewWindowCb::setBufferCount(uint32_t count) { + if (mAnw.get() != nullptr) { + // WAR for b/27039775 + native_window_api_disconnect(mAnw.get(), NATIVE_WINDOW_API_CAMERA); + native_window_api_connect(mAnw.get(), NATIVE_WINDOW_API_CAMERA); + if (mPreviewWidth != 0) { + native_window_set_buffers_dimensions(mAnw.get(), + mPreviewWidth, mPreviewHeight); + native_window_set_buffers_format(mAnw.get(), mFormat); + } + if (mPreviewUsage != 0) { + native_window_set_usage(mAnw.get(), mPreviewUsage); + } + if (mPreviewSwapInterval >= 0) { + mAnw->setSwapInterval(mAnw.get(), mPreviewSwapInterval); + } + if (mCrop.left >= 0) { + native_window_set_crop(mAnw.get(), &(mCrop)); + } + } + + auto rc = native_window_set_buffer_count(mAnw.get(), count); + if (rc == ::android::OK) { + cleanupCirculatingBuffers(); + } + + return mapToStatus(rc); +} + +Return PreviewWindowCb::setBuffersGeometry(uint32_t w, uint32_t h, + PixelFormat format) { + auto rc = native_window_set_buffers_dimensions(mAnw.get(), w, h); + if (rc == ::android::OK) { + mPreviewWidth = w; + mPreviewHeight = h; + rc = native_window_set_buffers_format(mAnw.get(), + static_cast(format)); + if (rc == ::android::OK) { + mFormat = static_cast(format); + } + } + + return mapToStatus(rc); +} + +Return PreviewWindowCb::setCrop(int32_t left, int32_t top, + int32_t right, int32_t bottom) { + android_native_rect_t crop = { left, top, right, bottom }; + auto rc = native_window_set_crop(mAnw.get(), &crop); + if (rc == ::android::OK) { + mCrop = crop; + } + return mapToStatus(rc); +} + +Return PreviewWindowCb::setUsage(BufferUsage usage) { + auto rc = native_window_set_usage(mAnw.get(), static_cast(usage)); + if (rc == ::android::OK) { + mPreviewUsage = static_cast(usage); + } + return mapToStatus(rc); +} + +Return PreviewWindowCb::setSwapInterval(int32_t interval) { + auto rc = mAnw->setSwapInterval(mAnw.get(), interval); + if (rc == ::android::OK) { + mPreviewSwapInterval = interval; + } + return mapToStatus(rc); +} + +Return PreviewWindowCb::getMinUndequeuedBufferCount( + getMinUndequeuedBufferCount_cb _hidl_cb) { + int count = 0; + auto rc = mAnw->query(mAnw.get(), + NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &count); + _hidl_cb(mapToStatus(rc), count); + return Void(); +} + +Return PreviewWindowCb::setTimestamp(int64_t timestamp) { + return mapToStatus(native_window_set_buffers_timestamp(mAnw.get(), + timestamp)); +} + +// The main test class for camera HIDL HAL. +class CameraHidlTest : public ::testing::TestWithParam { +public: + virtual void SetUp() override { + std::string service_name = GetParam(); + ALOGI("get service with name: %s", service_name.c_str()); + mProvider = ICameraProvider::getService(service_name); + + ASSERT_NE(mProvider, nullptr); + + uint32_t id; + ASSERT_TRUE(parseProviderName(service_name, &mProviderType, &id)); + + castProvider(mProvider, &mProvider2_5, &mProvider2_6, &mProvider2_7); + notifyDeviceState(provider::V2_5::DeviceState::NORMAL); + } + virtual void TearDown() override {} + + hidl_vec getCameraDeviceNames(sp provider, + bool addSecureOnly = false); + + bool isSecureOnly(sp provider, const hidl_string& name); + + std::map getCameraDeviceIdToNameMap(sp provider); + + hidl_vec> getConcurrentDeviceCombinations( + sp<::android::hardware::camera::provider::V2_6::ICameraProvider>&); + + struct EmptyDeviceCb : public V3_5::ICameraDeviceCallback { + virtual Return processCaptureResult( + const hidl_vec& /*results*/) override { + ALOGI("processCaptureResult callback"); + ADD_FAILURE(); // Empty callback should not reach here + return Void(); + } + + virtual Return processCaptureResult_3_4( + const hidl_vec& /*results*/) override { + ALOGI("processCaptureResult_3_4 callback"); + ADD_FAILURE(); // Empty callback should not reach here + return Void(); + } + + virtual Return notify(const hidl_vec& /*msgs*/) override { + ALOGI("notify callback"); + ADD_FAILURE(); // Empty callback should not reach here + return Void(); + } + + virtual Return requestStreamBuffers( + const hidl_vec&, + requestStreamBuffers_cb _hidl_cb) override { + ALOGI("requestStreamBuffers callback"); + // HAL might want to request buffer after configureStreams, but tests with EmptyDeviceCb + // doesn't actually need to send capture requests, so just return an error. + hidl_vec emptyBufRets; + _hidl_cb(V3_5::BufferRequestStatus::FAILED_UNKNOWN, emptyBufRets); + return Void(); + } + + virtual Return returnStreamBuffers(const hidl_vec&) override { + ALOGI("returnStreamBuffers"); + ADD_FAILURE(); // Empty callback should not reach here + return Void(); + } + }; + + struct DeviceCb : public V3_5::ICameraDeviceCallback { + DeviceCb(CameraHidlTest *parent, int deviceVersion, const camera_metadata_t *staticMeta) : + mParent(parent), mDeviceVersion(deviceVersion) { + mStaticMetadata = staticMeta; + } + + Return processCaptureResult_3_4( + const hidl_vec& results) override; + Return processCaptureResult(const hidl_vec& results) override; + Return notify(const hidl_vec& msgs) override; + + Return requestStreamBuffers( + const hidl_vec& bufReqs, + requestStreamBuffers_cb _hidl_cb) override; + + Return returnStreamBuffers(const hidl_vec& buffers) override; + + void setCurrentStreamConfig(const hidl_vec& streams, + const hidl_vec& halStreams); + + void waitForBuffersReturned(); + + private: + bool processCaptureResultLocked(const CaptureResult& results, + hidl_vec physicalCameraMetadata); + + CameraHidlTest *mParent; // Parent object + int mDeviceVersion; + android::hardware::camera::common::V1_0::helper::CameraMetadata mStaticMetadata; + bool hasOutstandingBuffersLocked(); + + /* members for requestStreamBuffers() and returnStreamBuffers()*/ + std::mutex mLock; // protecting members below + bool mUseHalBufManager = false; + hidl_vec mStreams; + hidl_vec mHalStreams; + uint64_t mNextBufferId = 1; + using OutstandingBuffers = std::unordered_map; + // size == mStreams.size(). Tracking each streams outstanding buffers + std::vector mOutstandingBufferIds; + std::condition_variable mFlushedCondition; + }; + + struct TorchProviderCb : public ICameraProviderCallback { + TorchProviderCb(CameraHidlTest *parent) : mParent(parent) {} + virtual Return cameraDeviceStatusChange( + const hidl_string&, CameraDeviceStatus) override { + return Void(); + } + + virtual Return torchModeStatusChange( + const hidl_string&, TorchModeStatus newStatus) override { + std::lock_guard l(mParent->mTorchLock); + mParent->mTorchStatus = newStatus; + mParent->mTorchCond.notify_one(); + return Void(); + } + + private: + CameraHidlTest *mParent; // Parent object + }; + + struct Camera1DeviceCb : + public ::android::hardware::camera::device::V1_0::ICameraDeviceCallback { + Camera1DeviceCb(CameraHidlTest *parent) : mParent(parent) {} + + Return notifyCallback(NotifyCallbackMsg msgType, + int32_t ext1, int32_t ext2) override; + + Return registerMemory(const hidl_handle& descriptor, + uint32_t bufferSize, uint32_t bufferCount) override; + + Return unregisterMemory(uint32_t memId) override; + + Return dataCallback(DataCallbackMsg msgType, + uint32_t data, uint32_t bufferIndex, + const CameraFrameMetadata& metadata) override; + + Return dataCallbackTimestamp(DataCallbackMsg msgType, + uint32_t data, uint32_t bufferIndex, + int64_t timestamp) override; + + Return handleCallbackTimestamp(DataCallbackMsg msgType, + const hidl_handle& frameData,uint32_t data, + uint32_t bufferIndex, int64_t timestamp) override; + + Return handleCallbackTimestampBatch(DataCallbackMsg msgType, + const ::android::hardware::hidl_vec& batch) override; + + + private: + CameraHidlTest *mParent; // Parent object + }; + + void notifyDeviceState(::android::hardware::camera::provider::V2_5::DeviceState newState); + + void openCameraDevice(const std::string &name, sp provider, + sp<::android::hardware::camera::device::V1_0::ICameraDevice> *device /*out*/); + void setupPreviewWindow( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device, + sp *bufferItemConsumer /*out*/, + sp *bufferHandler /*out*/); + void stopPreviewAndClose( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device); + void startPreview( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device); + void enableMsgType(unsigned int msgType, + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device); + void disableMsgType(unsigned int msgType, + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device); + void getParameters( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device, + CameraParameters *cameraParams /*out*/); + void setParameters( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device, + const CameraParameters &cameraParams); + void allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage, + PixelFormat format, hidl_handle *buffer_handle /*out*/); + void waitForFrameLocked(DataCallbackMsg msgFrame, + std::unique_lock &l); + void openEmptyDeviceSession(const std::string &name, + sp provider, + sp *session /*out*/, + camera_metadata_t **staticMeta /*out*/, + ::android::sp *device = nullptr/*out*/); + void castProvider(const sp& provider, + sp* provider2_5 /*out*/, + sp* provider2_6 /*out*/, + sp* provider2_7 /*out*/); + void castSession(const sp &session, int32_t deviceVersion, + sp *session3_3 /*out*/, + sp *session3_4 /*out*/, + sp *session3_5 /*out*/, + sp *session3_6 /*out*/, + sp *session3_7 /*out*/); + void castInjectionSession( + const sp& session, + sp* injectionSession3_7 /*out*/); + void castDevice(const sp& device, int32_t deviceVersion, + sp* device3_5 /*out*/, + sp* device3_7 /*out*/); + void createStreamConfiguration( + const ::android::hardware::hidl_vec& streams3_2, + StreamConfigurationMode configMode, + ::android::hardware::camera::device::V3_2::StreamConfiguration* config3_2, + ::android::hardware::camera::device::V3_4::StreamConfiguration* config3_4, + ::android::hardware::camera::device::V3_5::StreamConfiguration* config3_5, + ::android::hardware::camera::device::V3_7::StreamConfiguration* config3_7, + uint32_t jpegBufferSize = 0); + + void configureOfflineStillStream(const std::string &name, int32_t deviceVersion, + sp provider, + const AvailableStream *threshold, + sp *session/*out*/, + V3_2::Stream *stream /*out*/, + device::V3_6::HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/, + sp *outCb /*out*/, + uint32_t *jpegBufferSize /*out*/, + bool *useHalBufManager /*out*/); + void configureStreams3_7(const std::string& name, int32_t deviceVersion, + sp provider, PixelFormat format, + sp* session3_7 /*out*/, + V3_2::Stream* previewStream /*out*/, + device::V3_6::HalStreamConfiguration* halStreamConfig /*out*/, + bool* supportsPartialResults /*out*/, + uint32_t* partialResultCount /*out*/, bool* useHalBufManager /*out*/, + sp* outCb /*out*/, uint32_t streamConfigCounter, + bool maxResolution); + + void configurePreviewStreams3_4(const std::string &name, int32_t deviceVersion, + sp provider, + const AvailableStream *previewThreshold, + const std::unordered_set& physicalIds, + sp *session3_4 /*out*/, + sp *session3_5 /*out*/, + V3_2::Stream* previewStream /*out*/, + device::V3_4::HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/, + bool *useHalBufManager /*out*/, + sp *cb /*out*/, + uint32_t streamConfigCounter = 0, + bool allowUnsupport = false); + void configurePreviewStream(const std::string &name, int32_t deviceVersion, + sp provider, + const AvailableStream *previewThreshold, + sp *session /*out*/, + V3_2::Stream *previewStream /*out*/, + HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/, + bool *useHalBufManager /*out*/, + sp *cb /*out*/, + uint32_t streamConfigCounter = 0); + void configureSingleStream(const std::string& name, int32_t deviceVersion, + sp provider, + const AvailableStream* previewThreshold, uint64_t bufferUsage, + RequestTemplate reqTemplate, + sp* session /*out*/, + V3_2::Stream* previewStream /*out*/, + HalStreamConfiguration* halStreamConfig /*out*/, + bool* supportsPartialResults /*out*/, + uint32_t* partialResultCount /*out*/, bool* useHalBufManager /*out*/, + sp* cb /*out*/, uint32_t streamConfigCounter = 0); + + void verifyLogicalOrUltraHighResCameraMetadata( + const std::string& cameraName, + const ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice>& device, + const CameraMetadata& chars, int deviceVersion, + const hidl_vec& deviceNames); + void verifyCameraCharacteristics(Status status, const CameraMetadata& chars); + void verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata); + void verifyZoomCharacteristics(const camera_metadata_t* metadata); + void verifyRecommendedConfigs(const CameraMetadata& metadata); + void verifyMonochromeCharacteristics(const CameraMetadata& chars, int deviceVersion); + void verifyMonochromeCameraResult( + const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata); + void verifyStreamCombination( + sp cameraDevice3_7, + const ::android::hardware::camera::device::V3_7::StreamConfiguration& config3_7, + sp cameraDevice3_5, + const ::android::hardware::camera::device::V3_4::StreamConfiguration& config3_4, + bool expectedStatus, bool expectStreamCombQuery); + void verifyLogicalCameraResult(const camera_metadata_t* staticMetadata, + const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& resultMetadata); + + void verifyBuffersReturned(sp session, + int deviceVerison, int32_t streamId, sp cb, + uint32_t streamConfigCounter = 0); + + void verifyBuffersReturned(sp session, + hidl_vec streamIds, sp cb, + uint32_t streamConfigCounter = 0); + + void verifyBuffersReturned(sp session, + hidl_vec streamIds, sp cb, + uint32_t streamConfigCounter = 0); + + void verifySessionReconfigurationQuery(sp session3_5, + camera_metadata* oldSessionParams, camera_metadata* newSessionParams); + + void verifyRequestTemplate(const camera_metadata_t* metadata, RequestTemplate requestTemplate); + static void overrideRotateAndCrop(::android::hardware::hidl_vec *settings /*in/out*/); + + static bool isDepthOnly(const camera_metadata_t* staticMeta); + + static bool isUltraHighResolution(const camera_metadata_t* staticMeta); + + static Status getAvailableOutputStreams(const camera_metadata_t* staticMeta, + std::vector& outputStreams, + const AvailableStream* threshold = nullptr, + bool maxResolution = false); + + static Status getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta, PixelFormat format, + Size* size, bool maxResolution = false); + + static Status getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta, + std::vector* outputStreams); + + static Status getJpegBufferSize(camera_metadata_t *staticMeta, + uint32_t* outBufSize); + static Status isConstrainedModeAvailable(camera_metadata_t *staticMeta); + static Status isLogicalMultiCamera(const camera_metadata_t *staticMeta); + static Status isOfflineSessionSupported(const camera_metadata_t *staticMeta); + static Status getPhysicalCameraIds(const camera_metadata_t *staticMeta, + std::unordered_set *physicalIds/*out*/); + static Status getSupportedKeys(camera_metadata_t *staticMeta, + uint32_t tagId, std::unordered_set *requestIDs/*out*/); + static void fillOutputStreams(camera_metadata_ro_entry_t* entry, + std::vector& outputStreams, + const AvailableStream *threshold = nullptr, + const int32_t availableConfigOutputTag = 0u); + static void constructFilteredSettings(const sp& session, + const std::unordered_set& availableKeys, RequestTemplate reqTemplate, + android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings/*out*/, + android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings + /*out*/); + static Status pickConstrainedModeSize(camera_metadata_t *staticMeta, + AvailableStream &hfrStream); + static Status isZSLModeAvailable(const camera_metadata_t *staticMeta); + static Status isZSLModeAvailable(const camera_metadata_t *staticMeta, ReprocessType reprocType); + static Status getZSLInputOutputMap(camera_metadata_t *staticMeta, + std::vector &inputOutputMap); + static Status findLargestSize( + const std::vector &streamSizes, + int32_t format, AvailableStream &result); + static Status isAutoFocusModeAvailable( + CameraParameters &cameraParams, const char *mode) ; + static Status isMonochromeCamera(const camera_metadata_t *staticMeta); + static Status getSystemCameraKind(const camera_metadata_t* staticMeta, + SystemCameraKind* systemCameraKind); + static void getMultiResolutionStreamConfigurations( + camera_metadata_ro_entry* multiResStreamConfigs, + camera_metadata_ro_entry* streamConfigs, + camera_metadata_ro_entry* maxResolutionStreamConfigs, + const camera_metadata_t* staticMetadata); + void getPrivacyTestPatternModes( + const camera_metadata_t* staticMetadata, + std::unordered_set* privacyTestPatternModes/*out*/); + + static V3_2::DataspaceFlags getDataspace(PixelFormat format); + + void processCaptureRequestInternal(uint64_t bufferusage, RequestTemplate reqTemplate, + bool useSecureOnlyCameras); + + // Used by switchToOffline where a new result queue is created for offline reqs + void updateInflightResultQueue(std::shared_ptr resultQueue); + +protected: + + // In-flight queue for tracking completion of capture requests. + struct InFlightRequest { + // Set by notify() SHUTTER call. + nsecs_t shutterTimestamp; + + bool errorCodeValid; + ErrorCode errorCode; + + //Is partial result supported + bool usePartialResult; + + //Partial result count expected + uint32_t numPartialResults; + + // Message queue + std::shared_ptr resultQueue; + + // Set by process_capture_result call with valid metadata + bool haveResultMetadata; + + // Decremented by calls to process_capture_result with valid output + // and input buffers + ssize_t numBuffersLeft; + + // A 64bit integer to index the frame number associated with this result. + int64_t frameNumber; + + // The partial result count (index) for this capture result. + int32_t partialResultCount; + + // For buffer drop errors, the stream ID for the stream that lost a buffer. + // For physical sub-camera result errors, the Id of the physical stream + // for the physical sub-camera. + // Otherwise -1. + int32_t errorStreamId; + + // If this request has any input buffer + bool hasInputBuffer; + + // Result metadata + ::android::hardware::camera::common::V1_0::helper::CameraMetadata collectedResult; + + // Buffers are added by process_capture_result when output buffers + // return from HAL but framework. + ::android::Vector resultOutputBuffers; + + std::unordered_set expectedPhysicalResults; + + InFlightRequest() : + shutterTimestamp(0), + errorCodeValid(false), + errorCode(ErrorCode::ERROR_BUFFER), + usePartialResult(false), + numPartialResults(0), + resultQueue(nullptr), + haveResultMetadata(false), + numBuffersLeft(0), + frameNumber(0), + partialResultCount(0), + errorStreamId(-1), + hasInputBuffer(false), + collectedResult(1, 10) {} + + InFlightRequest(ssize_t numBuffers, bool hasInput, + bool partialResults, uint32_t partialCount, + std::shared_ptr queue = nullptr) : + shutterTimestamp(0), + errorCodeValid(false), + errorCode(ErrorCode::ERROR_BUFFER), + usePartialResult(partialResults), + numPartialResults(partialCount), + resultQueue(queue), + haveResultMetadata(false), + numBuffersLeft(numBuffers), + frameNumber(0), + partialResultCount(0), + errorStreamId(-1), + hasInputBuffer(hasInput), + collectedResult(1, 10) {} + + InFlightRequest(ssize_t numBuffers, bool hasInput, + bool partialResults, uint32_t partialCount, + const std::unordered_set& extraPhysicalResult, + std::shared_ptr queue = nullptr) : + shutterTimestamp(0), + errorCodeValid(false), + errorCode(ErrorCode::ERROR_BUFFER), + usePartialResult(partialResults), + numPartialResults(partialCount), + resultQueue(queue), + haveResultMetadata(false), + numBuffersLeft(numBuffers), + frameNumber(0), + partialResultCount(0), + errorStreamId(-1), + hasInputBuffer(hasInput), + collectedResult(1, 10), + expectedPhysicalResults(extraPhysicalResult) {} + }; + + // Map from frame number to the in-flight request state + typedef ::android::KeyedVector InFlightMap; + + std::mutex mLock; // Synchronize access to member variables + std::condition_variable mResultCondition; // Condition variable for incoming results + InFlightMap mInflightMap; // Map of all inflight requests + + DataCallbackMsg mDataMessageTypeReceived; // Most recent message type received through data callbacks + uint32_t mVideoBufferIndex; // Buffer index of the most recent video buffer + uint32_t mVideoData; // Buffer data of the most recent video buffer + hidl_handle mVideoNativeHandle; // Most recent video buffer native handle + NotifyCallbackMsg mNotifyMessage; // Current notification message + + std::mutex mTorchLock; // Synchronize access to torch status + std::condition_variable mTorchCond; // Condition variable for torch status + TorchModeStatus mTorchStatus; // Current torch status + + // Holds camera registered buffers + std::unordered_map > mMemoryPool; + + // Camera provider service + sp mProvider; + sp<::android::hardware::camera::provider::V2_5::ICameraProvider> mProvider2_5; + sp<::android::hardware::camera::provider::V2_6::ICameraProvider> mProvider2_6; + sp<::android::hardware::camera::provider::V2_7::ICameraProvider> mProvider2_7; + + // Camera provider type. + std::string mProviderType; +}; + +Return CameraHidlTest::Camera1DeviceCb::notifyCallback( + NotifyCallbackMsg msgType, int32_t ext1 __unused, + int32_t ext2 __unused) { + std::unique_lock l(mParent->mLock); + mParent->mNotifyMessage = msgType; + mParent->mResultCondition.notify_one(); + + return Void(); +} + +Return CameraHidlTest::Camera1DeviceCb::registerMemory( + const hidl_handle& descriptor, uint32_t bufferSize, + uint32_t bufferCount) { + if (descriptor->numFds != 1) { + ADD_FAILURE() << "camera memory descriptor has" + " numFds " << descriptor->numFds << " (expect 1)" ; + return 0; + } + if (descriptor->data[0] < 0) { + ADD_FAILURE() << "camera memory descriptor has" + " FD " << descriptor->data[0] << " (expect >= 0)"; + return 0; + } + + sp<::android::MemoryHeapBase> pool = new ::android::MemoryHeapBase( + descriptor->data[0], bufferSize*bufferCount, 0, 0); + mParent->mMemoryPool.emplace(pool->getHeapID(), pool); + + return pool->getHeapID(); +} + +Return CameraHidlTest::Camera1DeviceCb::unregisterMemory(uint32_t memId) { + if (mParent->mMemoryPool.count(memId) == 0) { + ALOGE("%s: memory pool ID %d not found", __FUNCTION__, memId); + ADD_FAILURE(); + return Void(); + } + + mParent->mMemoryPool.erase(memId); + return Void(); +} + +Return CameraHidlTest::Camera1DeviceCb::dataCallback( + DataCallbackMsg msgType __unused, uint32_t data __unused, + uint32_t bufferIndex __unused, + const CameraFrameMetadata& metadata __unused) { + std::unique_lock l(mParent->mLock); + mParent->mDataMessageTypeReceived = msgType; + mParent->mResultCondition.notify_one(); + + return Void(); +} + +Return CameraHidlTest::Camera1DeviceCb::dataCallbackTimestamp( + DataCallbackMsg msgType, uint32_t data, + uint32_t bufferIndex, int64_t timestamp __unused) { + std::unique_lock l(mParent->mLock); + mParent->mDataMessageTypeReceived = msgType; + mParent->mVideoBufferIndex = bufferIndex; + if (mParent->mMemoryPool.count(data) == 0) { + ADD_FAILURE() << "memory pool ID " << data << "not found"; + } + mParent->mVideoData = data; + mParent->mResultCondition.notify_one(); + + return Void(); +} + +Return CameraHidlTest::Camera1DeviceCb::handleCallbackTimestamp( + DataCallbackMsg msgType, const hidl_handle& frameData, + uint32_t data __unused, uint32_t bufferIndex, + int64_t timestamp __unused) { + std::unique_lock l(mParent->mLock); + mParent->mDataMessageTypeReceived = msgType; + mParent->mVideoBufferIndex = bufferIndex; + if (mParent->mMemoryPool.count(data) == 0) { + ADD_FAILURE() << "memory pool ID " << data << " not found"; + } + mParent->mVideoData = data; + mParent->mVideoNativeHandle = frameData; + mParent->mResultCondition.notify_one(); + + return Void(); +} + +Return CameraHidlTest::Camera1DeviceCb::handleCallbackTimestampBatch( + DataCallbackMsg msgType, + const hidl_vec& batch) { + std::unique_lock l(mParent->mLock); + for (auto& msg : batch) { + mParent->mDataMessageTypeReceived = msgType; + mParent->mVideoBufferIndex = msg.bufferIndex; + if (mParent->mMemoryPool.count(msg.data) == 0) { + ADD_FAILURE() << "memory pool ID " << msg.data << " not found"; + } + mParent->mVideoData = msg.data; + mParent->mVideoNativeHandle = msg.frameData; + mParent->mResultCondition.notify_one(); + } + return Void(); +} + +Return CameraHidlTest::DeviceCb::processCaptureResult_3_4( + const hidl_vec& results) { + + if (nullptr == mParent) { + return Void(); + } + + bool notify = false; + std::unique_lock l(mParent->mLock); + for (size_t i = 0 ; i < results.size(); i++) { + notify = processCaptureResultLocked(results[i].v3_2, results[i].physicalCameraMetadata); + } + + l.unlock(); + if (notify) { + mParent->mResultCondition.notify_one(); + } + + return Void(); +} + +Return CameraHidlTest::DeviceCb::processCaptureResult( + const hidl_vec& results) { + if (nullptr == mParent) { + return Void(); + } + + bool notify = false; + std::unique_lock l(mParent->mLock); + ::android::hardware::hidl_vec noPhysMetadata; + for (size_t i = 0 ; i < results.size(); i++) { + notify = processCaptureResultLocked(results[i], noPhysMetadata); + } + + l.unlock(); + if (notify) { + mParent->mResultCondition.notify_one(); + } + + return Void(); +} + +bool CameraHidlTest::DeviceCb::processCaptureResultLocked(const CaptureResult& results, + hidl_vec physicalCameraMetadata) { + bool notify = false; + uint32_t frameNumber = results.frameNumber; + + if ((results.result.size() == 0) && + (results.outputBuffers.size() == 0) && + (results.inputBuffer.buffer == nullptr) && + (results.fmqResultSize == 0)) { + ALOGE("%s: No result data provided by HAL for frame %d result count: %d", + __func__, frameNumber, (int) results.fmqResultSize); + ADD_FAILURE(); + return notify; + } + + ssize_t idx = mParent->mInflightMap.indexOfKey(frameNumber); + if (::android::NAME_NOT_FOUND == idx) { + ALOGE("%s: Unexpected frame number! received: %u", + __func__, frameNumber); + ADD_FAILURE(); + return notify; + } + + bool isPartialResult = false; + bool hasInputBufferInRequest = false; + InFlightRequest *request = mParent->mInflightMap.editValueAt(idx); + ::android::hardware::camera::device::V3_2::CameraMetadata resultMetadata; + size_t resultSize = 0; + if (results.fmqResultSize > 0) { + resultMetadata.resize(results.fmqResultSize); + if (request->resultQueue == nullptr) { + ADD_FAILURE(); + return notify; + } + if (!request->resultQueue->read(resultMetadata.data(), + results.fmqResultSize)) { + ALOGE("%s: Frame %d: Cannot read camera metadata from fmq," + "size = %" PRIu64, __func__, frameNumber, + results.fmqResultSize); + ADD_FAILURE(); + return notify; + } + + // Physical device results are only expected in the last/final + // partial result notification. + bool expectPhysicalResults = !(request->usePartialResult && + (results.partialResult < request->numPartialResults)); + if (expectPhysicalResults && + (physicalCameraMetadata.size() != request->expectedPhysicalResults.size())) { + ALOGE("%s: Frame %d: Returned physical metadata count %zu " + "must be equal to expected count %zu", __func__, frameNumber, + physicalCameraMetadata.size(), request->expectedPhysicalResults.size()); + ADD_FAILURE(); + return notify; + } + std::vector<::android::hardware::camera::device::V3_2::CameraMetadata> physResultMetadata; + physResultMetadata.resize(physicalCameraMetadata.size()); + for (size_t i = 0; i < physicalCameraMetadata.size(); i++) { + physResultMetadata[i].resize(physicalCameraMetadata[i].fmqMetadataSize); + if (!request->resultQueue->read(physResultMetadata[i].data(), + physicalCameraMetadata[i].fmqMetadataSize)) { + ALOGE("%s: Frame %d: Cannot read physical camera metadata from fmq," + "size = %" PRIu64, __func__, frameNumber, + physicalCameraMetadata[i].fmqMetadataSize); + ADD_FAILURE(); + return notify; + } + } + resultSize = resultMetadata.size(); + } else if (results.result.size() > 0) { + resultMetadata.setToExternal(const_cast( + results.result.data()), results.result.size()); + resultSize = resultMetadata.size(); + } + + if (!request->usePartialResult && (resultSize > 0) && + (results.partialResult != 1)) { + ALOGE("%s: Result is malformed for frame %d: partial_result %u " + "must be 1 if partial result is not supported", __func__, + frameNumber, results.partialResult); + ADD_FAILURE(); + return notify; + } + + if (results.partialResult != 0) { + request->partialResultCount = results.partialResult; + } + + // Check if this result carries only partial metadata + if (request->usePartialResult && (resultSize > 0)) { + if ((results.partialResult > request->numPartialResults) || + (results.partialResult < 1)) { + ALOGE("%s: Result is malformed for frame %d: partial_result %u" + " must be in the range of [1, %d] when metadata is " + "included in the result", __func__, frameNumber, + results.partialResult, request->numPartialResults); + ADD_FAILURE(); + return notify; + } + + // Verify no duplicate tags between partial results + const camera_metadata_t* partialMetadata = + reinterpret_cast(resultMetadata.data()); + const camera_metadata_t* collectedMetadata = request->collectedResult.getAndLock(); + camera_metadata_ro_entry_t searchEntry, foundEntry; + for (size_t i = 0; i < get_camera_metadata_entry_count(partialMetadata); i++) { + if (0 != get_camera_metadata_ro_entry(partialMetadata, i, &searchEntry)) { + ADD_FAILURE(); + request->collectedResult.unlock(collectedMetadata); + return notify; + } + if (-ENOENT != + find_camera_metadata_ro_entry(collectedMetadata, searchEntry.tag, &foundEntry)) { + ADD_FAILURE(); + request->collectedResult.unlock(collectedMetadata); + return notify; + } + } + request->collectedResult.unlock(collectedMetadata); + request->collectedResult.append(partialMetadata); + + isPartialResult = + (results.partialResult < request->numPartialResults); + } else if (resultSize > 0) { + request->collectedResult.append(reinterpret_cast( + resultMetadata.data())); + isPartialResult = false; + } + + hasInputBufferInRequest = request->hasInputBuffer; + + // Did we get the (final) result metadata for this capture? + if ((resultSize > 0) && !isPartialResult) { + if (request->haveResultMetadata) { + ALOGE("%s: Called multiple times with metadata for frame %d", + __func__, frameNumber); + ADD_FAILURE(); + return notify; + } + request->haveResultMetadata = true; + request->collectedResult.sort(); + + // Verify final result metadata + bool isAtLeast_3_5 = mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_5; + if (isAtLeast_3_5) { + auto staticMetadataBuffer = mStaticMetadata.getAndLock(); + bool isMonochrome = Status::OK == + CameraHidlTest::isMonochromeCamera(staticMetadataBuffer); + if (isMonochrome) { + mParent->verifyMonochromeCameraResult(request->collectedResult); + } + + // Verify logical camera result metadata + bool isLogicalCamera = + Status::OK == CameraHidlTest::isLogicalMultiCamera(staticMetadataBuffer); + if (isLogicalCamera) { + mParent->verifyLogicalCameraResult(staticMetadataBuffer, request->collectedResult); + } + mStaticMetadata.unlock(staticMetadataBuffer); + } + } + + uint32_t numBuffersReturned = results.outputBuffers.size(); + if (results.inputBuffer.buffer != nullptr) { + if (hasInputBufferInRequest) { + numBuffersReturned += 1; + } else { + ALOGW("%s: Input buffer should be NULL if there is no input" + " buffer sent in the request", __func__); + } + } + request->numBuffersLeft -= numBuffersReturned; + if (request->numBuffersLeft < 0) { + ALOGE("%s: Too many buffers returned for frame %d", __func__, + frameNumber); + ADD_FAILURE(); + return notify; + } + + request->resultOutputBuffers.appendArray(results.outputBuffers.data(), + results.outputBuffers.size()); + // If shutter event is received notify the pending threads. + if (request->shutterTimestamp != 0) { + notify = true; + } + + if (mUseHalBufManager) { + // Don't return buffers of bufId 0 (empty buffer) + std::vector buffers; + for (const auto& sb : results.outputBuffers) { + if (sb.bufferId != 0) { + buffers.push_back(sb); + } + } + returnStreamBuffers(buffers); + } + return notify; +} + +void CameraHidlTest::DeviceCb::setCurrentStreamConfig( + const hidl_vec& streams, const hidl_vec& halStreams) { + ASSERT_EQ(streams.size(), halStreams.size()); + ASSERT_NE(streams.size(), 0); + for (size_t i = 0; i < streams.size(); i++) { + ASSERT_EQ(streams[i].v3_2.id, halStreams[i].id); + } + std::lock_guard l(mLock); + mUseHalBufManager = true; + mStreams = streams; + mHalStreams = halStreams; + mOutstandingBufferIds.clear(); + for (size_t i = 0; i < streams.size(); i++) { + mOutstandingBufferIds.emplace_back(); + } +} + +bool CameraHidlTest::DeviceCb::hasOutstandingBuffersLocked() { + if (!mUseHalBufManager) { + return false; + } + for (const auto& outstandingBuffers : mOutstandingBufferIds) { + if (!outstandingBuffers.empty()) { + return true; + } + } + return false; +} + +void CameraHidlTest::DeviceCb::waitForBuffersReturned() { + std::unique_lock lk(mLock); + if (hasOutstandingBuffersLocked()) { + auto timeout = std::chrono::seconds(kBufferReturnTimeoutSec); + auto st = mFlushedCondition.wait_for(lk, timeout); + ASSERT_NE(std::cv_status::timeout, st); + } +} + +Return CameraHidlTest::DeviceCb::notify( + const hidl_vec& messages) { + std::lock_guard l(mParent->mLock); + + for (size_t i = 0; i < messages.size(); i++) { + switch(messages[i].type) { + case MsgType::ERROR: + if (ErrorCode::ERROR_DEVICE == messages[i].msg.error.errorCode) { + ALOGE("%s: Camera reported serious device error", + __func__); + ADD_FAILURE(); + } else { + ssize_t idx = mParent->mInflightMap.indexOfKey( + messages[i].msg.error.frameNumber); + if (::android::NAME_NOT_FOUND == idx) { + ALOGE("%s: Unexpected error frame number! received: %u", + __func__, messages[i].msg.error.frameNumber); + ADD_FAILURE(); + break; + } + InFlightRequest *r = mParent->mInflightMap.editValueAt(idx); + + if (ErrorCode::ERROR_RESULT == messages[i].msg.error.errorCode && + messages[i].msg.error.errorStreamId != -1) { + if (r->haveResultMetadata) { + ALOGE("%s: Camera must report physical camera result error before " + "the final capture result!", __func__); + ADD_FAILURE(); + } else { + for (size_t j = 0; j < mStreams.size(); j++) { + if (mStreams[j].v3_2.id == messages[i].msg.error.errorStreamId) { + hidl_string physicalCameraId = mStreams[j].physicalCameraId; + bool idExpected = r->expectedPhysicalResults.find( + physicalCameraId) != r->expectedPhysicalResults.end(); + if (!idExpected) { + ALOGE("%s: ERROR_RESULT's error stream's physicalCameraId " + "%s must be expected", __func__, + physicalCameraId.c_str()); + ADD_FAILURE(); + } else { + r->expectedPhysicalResults.erase(physicalCameraId); + } + break; + } + } + } + } else { + r->errorCodeValid = true; + r->errorCode = messages[i].msg.error.errorCode; + r->errorStreamId = messages[i].msg.error.errorStreamId; + } + } + break; + case MsgType::SHUTTER: + { + ssize_t idx = mParent->mInflightMap.indexOfKey(messages[i].msg.shutter.frameNumber); + if (::android::NAME_NOT_FOUND == idx) { + ALOGE("%s: Unexpected shutter frame number! received: %u", + __func__, messages[i].msg.shutter.frameNumber); + ADD_FAILURE(); + break; + } + InFlightRequest *r = mParent->mInflightMap.editValueAt(idx); + r->shutterTimestamp = messages[i].msg.shutter.timestamp; + } + break; + default: + ALOGE("%s: Unsupported notify message %d", __func__, + messages[i].type); + ADD_FAILURE(); + break; + } + } + + mParent->mResultCondition.notify_one(); + return Void(); +} + +Return CameraHidlTest::DeviceCb::requestStreamBuffers( + const hidl_vec& bufReqs, + requestStreamBuffers_cb _hidl_cb) { + using V3_5::BufferRequestStatus; + using V3_5::StreamBufferRet; + using V3_5::StreamBufferRequestError; + hidl_vec bufRets; + std::unique_lock l(mLock); + + if (!mUseHalBufManager) { + ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__); + ADD_FAILURE(); + _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets); + return Void(); + } + + if (bufReqs.size() > mStreams.size()) { + ALOGE("%s: illegal buffer request: too many requests!", __FUNCTION__); + ADD_FAILURE(); + _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets); + return Void(); + } + + std::vector indexes(bufReqs.size()); + for (size_t i = 0; i < bufReqs.size(); i++) { + bool found = false; + for (size_t idx = 0; idx < mStreams.size(); idx++) { + if (bufReqs[i].streamId == mStreams[idx].v3_2.id) { + found = true; + indexes[i] = idx; + break; + } + } + if (!found) { + ALOGE("%s: illegal buffer request: unknown streamId %d!", + __FUNCTION__, bufReqs[i].streamId); + ADD_FAILURE(); + _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets); + return Void(); + } + } + + bool allStreamOk = true; + bool atLeastOneStreamOk = false; + bufRets.resize(bufReqs.size()); + for (size_t i = 0; i < bufReqs.size(); i++) { + int32_t idx = indexes[i]; + const auto& stream = mStreams[idx]; + const auto& halStream = mHalStreams[idx]; + const V3_5::BufferRequest& bufReq = bufReqs[i]; + if (mOutstandingBufferIds[idx].size() + bufReq.numBuffersRequested > halStream.maxBuffers) { + bufRets[i].streamId = stream.v3_2.id; + bufRets[i].val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED); + allStreamOk = false; + continue; + } + + hidl_vec tmpRetBuffers(bufReq.numBuffersRequested); + for (size_t j = 0; j < bufReq.numBuffersRequested; j++) { + hidl_handle buffer_handle; + uint32_t w = stream.v3_2.width; + uint32_t h = stream.v3_2.height; + if (stream.v3_2.format == PixelFormat::BLOB) { + w = stream.bufferSize; + h = 1; + } + mParent->allocateGraphicBuffer(w, h, + android_convertGralloc1To0Usage( + halStream.producerUsage, halStream.consumerUsage), + halStream.overrideFormat, &buffer_handle); + + tmpRetBuffers[j] = {stream.v3_2.id, mNextBufferId, buffer_handle, BufferStatus::OK, + nullptr, nullptr}; + mOutstandingBufferIds[idx].insert(std::make_pair(mNextBufferId++, buffer_handle)); + } + atLeastOneStreamOk = true; + bufRets[i].streamId = stream.v3_2.id; + bufRets[i].val.buffers(std::move(tmpRetBuffers)); + } + + if (allStreamOk) { + _hidl_cb(BufferRequestStatus::OK, bufRets); + } else if (atLeastOneStreamOk) { + _hidl_cb(BufferRequestStatus::FAILED_PARTIAL, bufRets); + } else { + _hidl_cb(BufferRequestStatus::FAILED_UNKNOWN, bufRets); + } + + if (!hasOutstandingBuffersLocked()) { + l.unlock(); + mFlushedCondition.notify_one(); + } + return Void(); +} + +Return CameraHidlTest::DeviceCb::returnStreamBuffers( + const hidl_vec& buffers) { + if (!mUseHalBufManager) { + ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__); + ADD_FAILURE(); + } + + std::unique_lock l(mLock); + for (const auto& buf : buffers) { + bool found = false; + for (size_t idx = 0; idx < mOutstandingBufferIds.size(); idx++) { + if (mStreams[idx].v3_2.id == buf.streamId && + mOutstandingBufferIds[idx].count(buf.bufferId) == 1) { + mOutstandingBufferIds[idx].erase(buf.bufferId); + // TODO: check do we need to close/delete native handle or assume we have enough + // memory to run till the test finish? since we do not capture much requests (and + // most of time one buffer is sufficient) + found = true; + break; + } + } + if (found) { + continue; + } + ALOGE("%s: unknown buffer ID %" PRIu64, __FUNCTION__, buf.bufferId); + ADD_FAILURE(); + } + if (!hasOutstandingBuffersLocked()) { + l.unlock(); + mFlushedCondition.notify_one(); + } + return Void(); +} + +std::map CameraHidlTest::getCameraDeviceIdToNameMap( + sp provider) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(provider); + std::map idToNameMap; + for (auto& name : cameraDeviceNames) { + std::string version, cameraId; + if (!matchDeviceName(name, mProviderType, &version, &cameraId)) { + ADD_FAILURE(); + } + idToNameMap.insert(std::make_pair(hidl_string(cameraId), name)); + } + return idToNameMap; +} + +hidl_vec CameraHidlTest::getCameraDeviceNames(sp provider, + bool addSecureOnly) { + std::vector cameraDeviceNames; + Return ret; + ret = provider->getCameraIdList( + [&](auto status, const auto& idList) { + ALOGI("getCameraIdList returns status:%d", (int)status); + for (size_t i = 0; i < idList.size(); i++) { + ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str()); + } + ASSERT_EQ(Status::OK, status); + for (const auto& id : idList) { + cameraDeviceNames.push_back(id); + } + }); + if (!ret.isOk()) { + ADD_FAILURE(); + } + + // External camera devices are reported through cameraDeviceStatusChange + struct ProviderCb : public ICameraProviderCallback { + virtual Return cameraDeviceStatusChange( + const hidl_string& devName, + CameraDeviceStatus newStatus) override { + ALOGI("camera device status callback name %s, status %d", + devName.c_str(), (int) newStatus); + if (newStatus == CameraDeviceStatus::PRESENT) { + externalCameraDeviceNames.push_back(devName); + + } + return Void(); + } + + virtual Return torchModeStatusChange( + const hidl_string&, TorchModeStatus) override { + return Void(); + } + + std::vector externalCameraDeviceNames; + }; + sp cb = new ProviderCb; + auto status = mProvider->setCallback(cb); + + for (const auto& devName : cb->externalCameraDeviceNames) { + if (cameraDeviceNames.end() == std::find( + cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) { + cameraDeviceNames.push_back(devName); + } + } + + std::vector retList; + for (size_t i = 0; i < cameraDeviceNames.size(); i++) { + bool isSecureOnlyCamera = isSecureOnly(mProvider, cameraDeviceNames[i]); + if (addSecureOnly) { + if (isSecureOnlyCamera) { + retList.emplace_back(cameraDeviceNames[i]); + } + } else if (!isSecureOnlyCamera) { + retList.emplace_back(cameraDeviceNames[i]); + } + } + hidl_vec finalRetList = std::move(retList); + return finalRetList; +} + +bool CameraHidlTest::isSecureOnly(sp provider, const hidl_string& name) { + Return ret; + ::android::sp device3_x; + bool retVal = false; + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + return false; + } + ret = provider->getCameraDeviceInterface_V3_x(name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + if (!ret.isOk()) { + ADD_FAILURE() << "Failed to get camera device interface for " << name; + } + ret = device3_x->getCameraCharacteristics([&](Status s, CameraMetadata metadata) { + ASSERT_EQ(Status::OK, s); + camera_metadata_t* chars = (camera_metadata_t*)metadata.data(); + SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC; + Status status = getSystemCameraKind(chars, &systemCameraKind); + ASSERT_EQ(status, Status::OK); + if (systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA) { + retVal = true; + } + }); + if (!ret.isOk()) { + ADD_FAILURE() << "Failed to get camera characteristics for device " << name; + } + return retVal; +} + +hidl_vec> CameraHidlTest::getConcurrentDeviceCombinations( + sp<::android::hardware::camera::provider::V2_6::ICameraProvider>& provider2_6) { + hidl_vec> combinations; + Return ret = provider2_6->getConcurrentStreamingCameraIds( + [&combinations](Status concurrentIdStatus, + const hidl_vec>& cameraDeviceIdCombinations) { + ASSERT_EQ(concurrentIdStatus, Status::OK); + combinations = cameraDeviceIdCombinations; + }); + if (!ret.isOk()) { + ADD_FAILURE(); + } + return combinations; +} + +// Test devices with first_api_level >= P does not advertise device@1.0 +TEST_P(CameraHidlTest, noHal1AfterP) { + constexpr int32_t HAL1_PHASE_OUT_API_LEVEL = 28; + int32_t firstApiLevel = 0; + getFirstApiLevel(&firstApiLevel); + + // all devices with first API level == 28 and <= 1GB of RAM must set low_ram + // and thus be allowed to continue using HAL1 + if ((firstApiLevel == HAL1_PHASE_OUT_API_LEVEL) && + (property_get_bool("ro.config.low_ram", /*default*/ false))) { + ALOGI("Hal1 allowed for low ram device"); + return; + } + + if (firstApiLevel >= HAL1_PHASE_OUT_API_LEVEL) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + ASSERT_NE(deviceVersion, 0); // Must be a valid device version + ASSERT_NE(deviceVersion, CAMERA_DEVICE_API_VERSION_1_0); // Must not be device@1.0 + } + } +} + +// Test if ICameraProvider::isTorchModeSupported returns Status::OK +// Also if first_api_level >= Q torch API must be supported. +TEST_P(CameraHidlTest, isTorchModeSupported) { + constexpr int32_t API_LEVEL_Q = 29; + int32_t firstApiLevel = 0; + getFirstApiLevel(&firstApiLevel); + + Return ret; + ret = mProvider->isSetTorchModeSupported([&](auto status, bool support) { + ALOGI("isSetTorchModeSupported returns status:%d supported:%d", (int)status, support); + ASSERT_EQ(Status::OK, status); + if (firstApiLevel >= API_LEVEL_Q) { + ASSERT_EQ(true, support); + } + }); + ASSERT_TRUE(ret.isOk()); +} + +// TODO: consider removing this test if getCameraDeviceNames() has the same coverage +TEST_P(CameraHidlTest, getCameraIdList) { + Return ret; + ret = mProvider->getCameraIdList([&](auto status, const auto& idList) { + ALOGI("getCameraIdList returns status:%d", (int)status); + for (size_t i = 0; i < idList.size(); i++) { + ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str()); + } + ASSERT_EQ(Status::OK, status); + }); + ASSERT_TRUE(ret.isOk()); +} + +// Test if ICameraProvider::getVendorTags returns Status::OK +TEST_P(CameraHidlTest, getVendorTags) { + Return ret; + ret = mProvider->getVendorTags([&](auto status, const auto& vendorTagSecs) { + ALOGI("getVendorTags returns status:%d numSections %zu", (int)status, vendorTagSecs.size()); + for (size_t i = 0; i < vendorTagSecs.size(); i++) { + ALOGI("Vendor tag section %zu name %s", i, vendorTagSecs[i].sectionName.c_str()); + for (size_t j = 0; j < vendorTagSecs[i].tags.size(); j++) { + const auto& tag = vendorTagSecs[i].tags[j]; + ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(), + (int)tag.tagType); + } + } + ASSERT_EQ(Status::OK, status); + }); + ASSERT_TRUE(ret.isOk()); +} + +// Test if ICameraProvider::setCallback returns Status::OK +TEST_P(CameraHidlTest, setCallback) { + struct ProviderCb : public ICameraProviderCallback { + virtual Return cameraDeviceStatusChange( + const hidl_string& cameraDeviceName, + CameraDeviceStatus newStatus) override { + ALOGI("camera device status callback name %s, status %d", + cameraDeviceName.c_str(), (int) newStatus); + return Void(); + } + + virtual Return torchModeStatusChange( + const hidl_string& cameraDeviceName, + TorchModeStatus newStatus) override { + ALOGI("Torch mode status callback name %s, status %d", + cameraDeviceName.c_str(), (int) newStatus); + return Void(); + } + }; + + struct ProviderCb2_6 + : public ::android::hardware::camera::provider::V2_6::ICameraProviderCallback { + virtual Return cameraDeviceStatusChange(const hidl_string& cameraDeviceName, + CameraDeviceStatus newStatus) override { + ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(), + (int)newStatus); + return Void(); + } + + virtual Return torchModeStatusChange(const hidl_string& cameraDeviceName, + TorchModeStatus newStatus) override { + ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(), + (int)newStatus); + return Void(); + } + + virtual Return physicalCameraDeviceStatusChange( + const hidl_string& cameraDeviceName, const hidl_string& physicalCameraDeviceName, + CameraDeviceStatus newStatus) override { + ALOGI("physical camera device status callback name %s, physical camera name %s," + " status %d", + cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus); + return Void(); + } + }; + + sp cb = new ProviderCb; + auto status = mProvider->setCallback(cb); + ASSERT_TRUE(status.isOk()); + ASSERT_EQ(Status::OK, status); + status = mProvider->setCallback(nullptr); + ASSERT_TRUE(status.isOk()); + ASSERT_EQ(Status::OK, status); + + if (mProvider2_6.get() != nullptr) { + sp cb = new ProviderCb2_6; + auto status = mProvider2_6->setCallback(cb); + ASSERT_TRUE(status.isOk()); + ASSERT_EQ(Status::OK, status); + status = mProvider2_6->setCallback(nullptr); + ASSERT_TRUE(status.isOk()); + ASSERT_EQ(Status::OK, status); + } +} + +// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device +TEST_P(CameraHidlTest, getCameraDeviceInterface) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + Return ret; + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device3_x) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device3_x, nullptr); + }); + ASSERT_TRUE(ret.isOk()); + } + break; + case CAMERA_DEVICE_API_VERSION_1_0: { + Return ret; + ret = mProvider->getCameraDeviceInterface_V1_x( + name, [&](auto status, const auto& device1) { + ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device1, nullptr); + }); + ASSERT_TRUE(ret.isOk()); + } + break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } + break; + } + } +} + +// Verify that the device resource cost can be retrieved and the values are +// correct. +TEST_P(CameraHidlTest, getResourceCost) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; + ALOGI("getResourceCost: Testing camera device %s", name.c_str()); + Return ret; + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + ret = device3_x->getResourceCost([&](auto status, const auto& resourceCost) { + ALOGI("getResourceCost returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ALOGI(" Resource cost is %d", resourceCost.resourceCost); + ASSERT_LE(resourceCost.resourceCost, 100u); + for (const auto& name : resourceCost.conflictingDevices) { + ALOGI(" Conflicting device: %s", name.c_str()); + } + }); + ASSERT_TRUE(ret.isOk()); + } + break; + case CAMERA_DEVICE_API_VERSION_1_0: { + ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + ALOGI("getResourceCost: Testing camera device %s", name.c_str()); + Return ret; + ret = mProvider->getCameraDeviceInterface_V1_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device1 = device; + }); + ASSERT_TRUE(ret.isOk()); + + ret = device1->getResourceCost([&](auto status, const auto& resourceCost) { + ALOGI("getResourceCost returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ALOGI(" Resource cost is %d", resourceCost.resourceCost); + ASSERT_LE(resourceCost.resourceCost, 100u); + for (const auto& name : resourceCost.conflictingDevices) { + ALOGI(" Conflicting device: %s", name.c_str()); + } + }); + ASSERT_TRUE(ret.isOk()); + } + break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } + break; + } + } +} + +// Verify that the static camera info can be retrieved +// successfully. +TEST_P(CameraHidlTest, getCameraInfo) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str()); + Return ret; + ret = mProvider->getCameraDeviceInterface_V1_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device1 = device; + }); + ASSERT_TRUE(ret.isOk()); + + ret = device1->getCameraInfo([&](auto status, const auto& info) { + ALOGI("getCameraInfo returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + switch (info.orientation) { + case 0: + case 90: + case 180: + case 270: + // Expected cases + ALOGI("camera orientation: %d", info.orientation); + break; + default: + FAIL() << "Unexpected camera orientation:" << info.orientation; + } + switch (info.facing) { + case CameraFacing::BACK: + case CameraFacing::FRONT: + case CameraFacing::EXTERNAL: + // Expected cases + ALOGI("camera facing: %d", info.facing); + break; + default: + FAIL() << "Unexpected camera facing:" << static_cast(info.facing); + } + }); + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Check whether preview window can be configured +TEST_P(CameraHidlTest, setPreviewWindow) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + + Return ret; + ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Verify that setting preview window fails in case device is not open +TEST_P(CameraHidlTest, setPreviewWindowInvalid) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str()); + Return ret; + ret = mProvider->getCameraDeviceInterface_V1_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device1 = device; + }); + ASSERT_TRUE(ret.isOk()); + + Return returnStatus = device1->setPreviewWindow(nullptr); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OPERATION_NOT_SUPPORTED, returnStatus); + } + } +} + +// Start and stop preview checking whether it gets enabled in between. +TEST_P(CameraHidlTest, startStopPreview) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + + startPreview(device1); + + Return returnBoolStatus = device1->previewEnabled(); + ASSERT_TRUE(returnBoolStatus.isOk()); + ASSERT_TRUE(returnBoolStatus); + + stopPreviewAndClose(device1); + } + } +} + +// Start preview without active preview window. Preview should start as soon +// as a valid active window gets configured. +TEST_P(CameraHidlTest, startStopPreviewDelayed) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + Return returnStatus = device1->setPreviewWindow(nullptr); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + startPreview(device1); + + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + + // Preview should get enabled now + Return returnBoolStatus = device1->previewEnabled(); + ASSERT_TRUE(returnBoolStatus.isOk()); + ASSERT_TRUE(returnBoolStatus); + + stopPreviewAndClose(device1); + } + } +} + +// Verify that image capture behaves as expected along with preview callbacks. +TEST_P(CameraHidlTest, takePicture) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + + { + std::unique_lock l(mLock); + mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY; + } + + enableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME, device1); + startPreview(device1); + + { + std::unique_lock l(mLock); + waitForFrameLocked(DataCallbackMsg::PREVIEW_FRAME, l); + } + + disableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME, device1); + enableMsgType((unsigned int)DataCallbackMsg::COMPRESSED_IMAGE, device1); + + { + std::unique_lock l(mLock); + mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY; + } + + Return returnStatus = device1->takePicture(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mLock); + waitForFrameLocked(DataCallbackMsg::COMPRESSED_IMAGE, l); + } + + disableMsgType((unsigned int)DataCallbackMsg::COMPRESSED_IMAGE, device1); + stopPreviewAndClose(device1); + } + } +} + +// Image capture should fail in case preview didn't get enabled first. +TEST_P(CameraHidlTest, takePictureFail) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + Return returnStatus = device1->takePicture(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_NE(Status::OK, returnStatus); + + Return ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Verify that image capture can be cancelled. +TEST_P(CameraHidlTest, cancelPicture) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + startPreview(device1); + + Return returnStatus = device1->takePicture(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + returnStatus = device1->cancelPicture(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + stopPreviewAndClose(device1); + } + } +} + +// Image capture cancel is a no-op when image capture is not running. +TEST_P(CameraHidlTest, cancelPictureNOP) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + startPreview(device1); + + Return returnStatus = device1->cancelPicture(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + stopPreviewAndClose(device1); + } + } +} + +// Test basic video recording. +TEST_P(CameraHidlTest, startStopRecording) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + + { + std::unique_lock l(mLock); + mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY; + } + + enableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME, device1); + startPreview(device1); + + { + std::unique_lock l(mLock); + waitForFrameLocked(DataCallbackMsg::PREVIEW_FRAME, l); + mDataMessageTypeReceived = DataCallbackMsg::RAW_IMAGE_NOTIFY; + mVideoBufferIndex = UINT32_MAX; + } + + disableMsgType((unsigned int)DataCallbackMsg::PREVIEW_FRAME, device1); + + bool videoMetaEnabled = false; + Return returnStatus = device1->storeMetaDataInBuffers(true); + ASSERT_TRUE(returnStatus.isOk()); + // It is allowed for devices to not support this feature + ASSERT_TRUE((Status::OK == returnStatus) || + (Status::OPERATION_NOT_SUPPORTED == returnStatus)); + if (Status::OK == returnStatus) { + videoMetaEnabled = true; + } + + enableMsgType((unsigned int)DataCallbackMsg::VIDEO_FRAME, device1); + Return returnBoolStatus = device1->recordingEnabled(); + ASSERT_TRUE(returnBoolStatus.isOk()); + ASSERT_FALSE(returnBoolStatus); + + returnStatus = device1->startRecording(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mLock); + waitForFrameLocked(DataCallbackMsg::VIDEO_FRAME, l); + ASSERT_NE(UINT32_MAX, mVideoBufferIndex); + disableMsgType((unsigned int)DataCallbackMsg::VIDEO_FRAME, device1); + } + + returnBoolStatus = device1->recordingEnabled(); + ASSERT_TRUE(returnBoolStatus.isOk()); + ASSERT_TRUE(returnBoolStatus); + + Return ret; + if (videoMetaEnabled) { + ret = device1->releaseRecordingFrameHandle(mVideoData, mVideoBufferIndex, + mVideoNativeHandle); + ASSERT_TRUE(ret.isOk()); + } else { + ret = device1->releaseRecordingFrame(mVideoData, mVideoBufferIndex); + ASSERT_TRUE(ret.isOk()); + } + + ret = device1->stopRecording(); + ASSERT_TRUE(ret.isOk()); + + stopPreviewAndClose(device1); + } + } +} + +// It shouldn't be possible to start recording without enabling preview first. +TEST_P(CameraHidlTest, startRecordingFail) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + Return returnBoolStatus = device1->recordingEnabled(); + ASSERT_TRUE(returnBoolStatus.isOk()); + ASSERT_FALSE(returnBoolStatus); + + Return returnStatus = device1->startRecording(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_NE(Status::OK, returnStatus); + + Return ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Check autofocus support if available. +TEST_P(CameraHidlTest, autoFocus) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector focusModes = {CameraParameters::FOCUS_MODE_AUTO, + CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, + CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO}; + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + CameraParameters cameraParams; + getParameters(device1, &cameraParams /*out*/); + + if (Status::OK != + isAutoFocusModeAvailable(cameraParams, CameraParameters::FOCUS_MODE_AUTO)) { + Return ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + startPreview(device1); + enableMsgType((unsigned int)NotifyCallbackMsg::FOCUS, device1); + + for (auto& iter : focusModes) { + if (Status::OK != isAutoFocusModeAvailable(cameraParams, iter)) { + continue; + } + + cameraParams.set(CameraParameters::KEY_FOCUS_MODE, iter); + setParameters(device1, cameraParams); + { + std::unique_lock l(mLock); + mNotifyMessage = NotifyCallbackMsg::ERROR; + } + + Return returnStatus = device1->autoFocus(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mLock); + while (NotifyCallbackMsg::FOCUS != mNotifyMessage) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kAutoFocusTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + } + } + + disableMsgType((unsigned int)NotifyCallbackMsg::FOCUS, device1); + stopPreviewAndClose(device1); + } + } +} + +// In case autofocus is supported verify that it can be cancelled. +TEST_P(CameraHidlTest, cancelAutoFocus) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + CameraParameters cameraParams; + getParameters(device1, &cameraParams /*out*/); + + if (Status::OK != + isAutoFocusModeAvailable(cameraParams, CameraParameters::FOCUS_MODE_AUTO)) { + Return ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + // It should be fine to call before preview starts. + ASSERT_EQ(Status::OK, device1->cancelAutoFocus()); + + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + startPreview(device1); + + // It should be fine to call after preview starts too. + Return returnStatus = device1->cancelAutoFocus(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + returnStatus = device1->autoFocus(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + returnStatus = device1->cancelAutoFocus(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + stopPreviewAndClose(device1); + } + } +} + +// Check whether face detection is available and try to enable&disable. +TEST_P(CameraHidlTest, sendCommandFaceDetection) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + CameraParameters cameraParams; + getParameters(device1, &cameraParams /*out*/); + + int32_t hwFaces = cameraParams.getInt(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW); + int32_t swFaces = cameraParams.getInt(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW); + if ((0 >= hwFaces) && (0 >= swFaces)) { + Return ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + startPreview(device1); + + if (0 < hwFaces) { + Return returnStatus = device1->sendCommand( + CommandType::START_FACE_DETECTION, CAMERA_FACE_DETECTION_HW, 0); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + // TODO(epeev) : Enable and check for face notifications + returnStatus = device1->sendCommand(CommandType::STOP_FACE_DETECTION, + CAMERA_FACE_DETECTION_HW, 0); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + } + + if (0 < swFaces) { + Return returnStatus = device1->sendCommand( + CommandType::START_FACE_DETECTION, CAMERA_FACE_DETECTION_SW, 0); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + // TODO(epeev) : Enable and check for face notifications + returnStatus = device1->sendCommand(CommandType::STOP_FACE_DETECTION, + CAMERA_FACE_DETECTION_SW, 0); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + } + + stopPreviewAndClose(device1); + } + } +} + +// Check whether smooth zoom is available and try to enable&disable. +TEST_P(CameraHidlTest, sendCommandSmoothZoom) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + CameraParameters cameraParams; + getParameters(device1, &cameraParams /*out*/); + + const char* smoothZoomStr = + cameraParams.get(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED); + bool smoothZoomSupported = + ((nullptr != smoothZoomStr) && (strcmp(smoothZoomStr, CameraParameters::TRUE) == 0)) + ? true + : false; + if (!smoothZoomSupported) { + Return ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + int32_t maxZoom = cameraParams.getInt(CameraParameters::KEY_MAX_ZOOM); + ASSERT_TRUE(0 < maxZoom); + + sp bufferItemConsumer; + sp bufferHandler; + setupPreviewWindow(device1, &bufferItemConsumer /*out*/, &bufferHandler /*out*/); + startPreview(device1); + setParameters(device1, cameraParams); + + Return returnStatus = + device1->sendCommand(CommandType::START_SMOOTH_ZOOM, maxZoom, 0); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + // TODO(epeev) : Enable and check for face notifications + returnStatus = device1->sendCommand(CommandType::STOP_SMOOTH_ZOOM, 0, 0); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + stopPreviewAndClose(device1); + } + } +} + +// Basic correctness tests related to camera parameters. +TEST_P(CameraHidlTest, getSetParameters) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + if (getCameraDeviceVersion(name, mProviderType) == CAMERA_DEVICE_API_VERSION_1_0) { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + CameraParameters cameraParams; + getParameters(device1, &cameraParams /*out*/); + + int32_t width, height; + cameraParams.getPictureSize(&width, &height); + ASSERT_TRUE((0 < width) && (0 < height)); + cameraParams.getPreviewSize(&width, &height); + ASSERT_TRUE((0 < width) && (0 < height)); + int32_t minFps, maxFps; + cameraParams.getPreviewFpsRange(&minFps, &maxFps); + ASSERT_TRUE((0 < minFps) && (0 < maxFps)); + ASSERT_NE(nullptr, cameraParams.getPreviewFormat()); + ASSERT_NE(nullptr, cameraParams.getPictureFormat()); + ASSERT_TRUE( + strcmp(CameraParameters::PIXEL_FORMAT_JPEG, cameraParams.getPictureFormat()) == 0); + + const char* flashMode = cameraParams.get(CameraParameters::KEY_FLASH_MODE); + ASSERT_TRUE((nullptr == flashMode) || + (strcmp(CameraParameters::FLASH_MODE_OFF, flashMode) == 0)); + + const char* wbMode = cameraParams.get(CameraParameters::KEY_WHITE_BALANCE); + ASSERT_TRUE((nullptr == wbMode) || + (strcmp(CameraParameters::WHITE_BALANCE_AUTO, wbMode) == 0)); + + const char* effect = cameraParams.get(CameraParameters::KEY_EFFECT); + ASSERT_TRUE((nullptr == effect) || + (strcmp(CameraParameters::EFFECT_NONE, effect) == 0)); + + ::android::Vector previewSizes; + cameraParams.getSupportedPreviewSizes(previewSizes); + ASSERT_FALSE(previewSizes.empty()); + ::android::Vector pictureSizes; + cameraParams.getSupportedPictureSizes(pictureSizes); + ASSERT_FALSE(pictureSizes.empty()); + const char* previewFormats = + cameraParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS); + ASSERT_NE(nullptr, previewFormats); + ::android::String8 previewFormatsString(previewFormats); + ASSERT_TRUE(previewFormatsString.contains(CameraParameters::PIXEL_FORMAT_YUV420SP)); + ASSERT_NE(nullptr, cameraParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS)); + ASSERT_NE(nullptr, + cameraParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)); + const char* focusModes = cameraParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES); + ASSERT_NE(nullptr, focusModes); + ::android::String8 focusModesString(focusModes); + const char* focusMode = cameraParams.get(CameraParameters::KEY_FOCUS_MODE); + ASSERT_NE(nullptr, focusMode); + // Auto focus mode should be default + if (focusModesString.contains(CameraParameters::FOCUS_MODE_AUTO)) { + ASSERT_TRUE(strcmp(CameraParameters::FOCUS_MODE_AUTO, focusMode) == 0); + } + ASSERT_TRUE(0 < cameraParams.getInt(CameraParameters::KEY_FOCAL_LENGTH)); + int32_t horizontalViewAngle = + cameraParams.getInt(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE); + ASSERT_TRUE((0 < horizontalViewAngle) && (360 >= horizontalViewAngle)); + int32_t verticalViewAngle = + cameraParams.getInt(CameraParameters::KEY_VERTICAL_VIEW_ANGLE); + ASSERT_TRUE((0 < verticalViewAngle) && (360 >= verticalViewAngle)); + int32_t jpegQuality = cameraParams.getInt(CameraParameters::KEY_JPEG_QUALITY); + ASSERT_TRUE((1 <= jpegQuality) && (100 >= jpegQuality)); + int32_t jpegThumbQuality = + cameraParams.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + ASSERT_TRUE((1 <= jpegThumbQuality) && (100 >= jpegThumbQuality)); + + cameraParams.setPictureSize(pictureSizes[0].width, pictureSizes[0].height); + cameraParams.setPreviewSize(previewSizes[0].width, previewSizes[0].height); + + setParameters(device1, cameraParams); + getParameters(device1, &cameraParams /*out*/); + + cameraParams.getPictureSize(&width, &height); + ASSERT_TRUE((pictureSizes[0].width == width) && (pictureSizes[0].height == height)); + cameraParams.getPreviewSize(&width, &height); + ASSERT_TRUE((previewSizes[0].width == width) && (previewSizes[0].height == height)); + + Return ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + } + } +} + +TEST_P(CameraHidlTest, systemCameraTest) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::map> hiddenPhysicalIdToLogicalMap; + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; + ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str()); + Return ret; + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + ret = device3_x->getCameraCharacteristics([&](auto status, const auto& chars) { + ASSERT_EQ(status, Status::OK); + const camera_metadata_t* staticMeta = + reinterpret_cast(chars.data()); + ASSERT_NE(staticMeta, nullptr); + Status rc = isLogicalMultiCamera(staticMeta); + ASSERT_TRUE(Status::OK == rc || Status::METHOD_NOT_SUPPORTED == rc); + if (Status::METHOD_NOT_SUPPORTED == rc) { + return; + } + std::unordered_set physicalIds; + ASSERT_EQ(Status::OK, getPhysicalCameraIds(staticMeta, &physicalIds)); + SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC; + rc = getSystemCameraKind(staticMeta, &systemCameraKind); + ASSERT_EQ(rc, Status::OK); + for (auto physicalId : physicalIds) { + bool isPublicId = false; + for (auto& deviceName : cameraDeviceNames) { + std::string publicVersion, publicId; + ASSERT_TRUE(::matchDeviceName(deviceName, mProviderType, &publicVersion, + &publicId)); + if (physicalId == publicId) { + isPublicId = true; + break; + } + } + // For hidden physical cameras, collect their associated logical cameras + // and store the system camera kind. + if (!isPublicId) { + auto it = hiddenPhysicalIdToLogicalMap.find(physicalId); + if (it == hiddenPhysicalIdToLogicalMap.end()) { + hiddenPhysicalIdToLogicalMap.insert(std::make_pair( + physicalId, std::list(systemCameraKind))); + } else { + it->second.push_back(systemCameraKind); + } + } + } + }); + ASSERT_TRUE(ret.isOk()); + } break; + case CAMERA_DEVICE_API_VERSION_1_0: { + // Not applicable + } break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } break; + } + } + + // Check that the system camera kind of the logical cameras associated with + // each hidden physical camera is the same. + for (const auto& it : hiddenPhysicalIdToLogicalMap) { + SystemCameraKind neededSystemCameraKind = it.second.front(); + for (auto foundSystemCamera : it.second) { + ASSERT_EQ(neededSystemCameraKind, foundSystemCamera); + } + } +} + +// Verify that the static camera characteristics can be retrieved +// successfully. +TEST_P(CameraHidlTest, getCameraCharacteristics) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; + ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str()); + Return ret; + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + ret = device3_x->getCameraCharacteristics([&](auto status, const auto& chars) { + verifyCameraCharacteristics(status, chars); + verifyMonochromeCharacteristics(chars, deviceVersion); + verifyRecommendedConfigs(chars); + verifyLogicalOrUltraHighResCameraMetadata(name, device3_x, chars, deviceVersion, + cameraDeviceNames); + }); + ASSERT_TRUE(ret.isOk()); + + //getPhysicalCameraCharacteristics will fail for publicly + //advertised camera IDs. + if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_5) { + auto castResult = device::V3_5::ICameraDevice::castFrom(device3_x); + ASSERT_TRUE(castResult.isOk()); + ::android::sp<::android::hardware::camera::device::V3_5::ICameraDevice> + device3_5 = castResult; + ASSERT_NE(device3_5, nullptr); + + std::string version, cameraId; + ASSERT_TRUE(::matchDeviceName(name, mProviderType, &version, &cameraId)); + Return ret = device3_5->getPhysicalCameraCharacteristics(cameraId, + [&](auto status, const auto& chars) { + ASSERT_TRUE(Status::ILLEGAL_ARGUMENT == status); + ASSERT_EQ(0, chars.size()); + }); + ASSERT_TRUE(ret.isOk()); + } + } + break; + case CAMERA_DEVICE_API_VERSION_1_0: { + //Not applicable + } + break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } + break; + } + } +} + +//In case it is supported verify that torch can be enabled. +//Check for corresponding toch callbacks as well. +TEST_P(CameraHidlTest, setTorchMode) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + bool torchControlSupported = false; + Return ret; + + ret = mProvider->isSetTorchModeSupported([&](auto status, bool support) { + ALOGI("isSetTorchModeSupported returns status:%d supported:%d", (int)status, support); + ASSERT_EQ(Status::OK, status); + torchControlSupported = support; + }); + + sp cb = new TorchProviderCb(this); + Return returnStatus = mProvider->setCallback(cb); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; + ALOGI("setTorchMode: Testing camera device %s", name.c_str()); + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + returnStatus = device3_x->setTorchMode(TorchMode::ON); + ASSERT_TRUE(returnStatus.isOk()); + if (!torchControlSupported) { + ASSERT_EQ(Status::METHOD_NOT_SUPPORTED, returnStatus); + } else { + ASSERT_TRUE(returnStatus == Status::OK || + returnStatus == Status::OPERATION_NOT_SUPPORTED); + if (returnStatus == Status::OK) { + { + std::unique_lock l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus); + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + } + + returnStatus = device3_x->setTorchMode(TorchMode::OFF); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus); + } + } + } + } + break; + case CAMERA_DEVICE_API_VERSION_1_0: { + ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + ALOGI("dumpState: Testing camera device %s", name.c_str()); + ret = mProvider->getCameraDeviceInterface_V1_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device1 = device; + }); + ASSERT_TRUE(ret.isOk()); + + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + returnStatus = device1->setTorchMode(TorchMode::ON); + ASSERT_TRUE(returnStatus.isOk()); + if (!torchControlSupported) { + ASSERT_EQ(Status::METHOD_NOT_SUPPORTED, returnStatus); + } else { + ASSERT_TRUE(returnStatus == Status::OK || + returnStatus == Status::OPERATION_NOT_SUPPORTED); + if (returnStatus == Status::OK) { + { + std::unique_lock l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, + timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus); + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + } + + returnStatus = device1->setTorchMode(TorchMode::OFF); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, + timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus); + } + } + } + ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + } + break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } + break; + } + } + + returnStatus = mProvider->setCallback(nullptr); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); +} + +// Check dump functionality. +TEST_P(CameraHidlTest, dumpState) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + Return ret; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + ::android::sp device3_x; + ALOGI("dumpState: Testing camera device %s", name.c_str()); + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + native_handle_t* raw_handle = native_handle_create(1, 0); + raw_handle->data[0] = open(kDumpOutput, O_RDWR); + ASSERT_GE(raw_handle->data[0], 0); + hidl_handle handle = raw_handle; + ret = device3_x->dumpState(handle); + ASSERT_TRUE(ret.isOk()); + close(raw_handle->data[0]); + native_handle_delete(raw_handle); + } + break; + case CAMERA_DEVICE_API_VERSION_1_0: { + ::android::sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + ALOGI("dumpState: Testing camera device %s", name.c_str()); + ret = mProvider->getCameraDeviceInterface_V1_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V1_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device1 = device; + }); + ASSERT_TRUE(ret.isOk()); + + native_handle_t* raw_handle = native_handle_create(1, 0); + raw_handle->data[0] = open(kDumpOutput, O_RDWR); + ASSERT_GE(raw_handle->data[0], 0); + hidl_handle handle = raw_handle; + Return returnStatus = device1->dumpState(handle); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + close(raw_handle->data[0]); + native_handle_delete(raw_handle); + } + break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } + break; + } + } +} + +// Open, dumpStates, then close +TEST_P(CameraHidlTest, openClose) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + Return ret; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; + ALOGI("openClose: Testing camera device %s", name.c_str()); + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + sp cb = new EmptyDeviceCb; + sp session; + ret = device3_x->open(cb, [&](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + session = newSession; + }); + ASSERT_TRUE(ret.isOk()); + // Ensure that a device labeling itself as 3.3/3.4 can have its session interface + // cast the 3.3/3.4 interface, and that lower versions can't be cast to it. + sp sessionV3_3; + sp sessionV3_4; + sp sessionV3_5; + sp sessionV3_6; + sp sessionV3_7; + castSession(session, deviceVersion, &sessionV3_3, + &sessionV3_4, &sessionV3_5, &sessionV3_6, + &sessionV3_7); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_7) { + ASSERT_TRUE(sessionV3_7.get() != nullptr); + } else if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_6) { + ASSERT_TRUE(sessionV3_6.get() != nullptr); + } else if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_5) { + ASSERT_TRUE(sessionV3_5.get() != nullptr); + } else if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_4) { + ASSERT_TRUE(sessionV3_4.get() != nullptr); + } else if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_3) { + ASSERT_TRUE(sessionV3_3.get() != nullptr); + } else { //V3_2 + ASSERT_TRUE(sessionV3_3.get() == nullptr); + ASSERT_TRUE(sessionV3_4.get() == nullptr); + ASSERT_TRUE(sessionV3_5.get() == nullptr); + } + native_handle_t* raw_handle = native_handle_create(1, 0); + raw_handle->data[0] = open(kDumpOutput, O_RDWR); + ASSERT_GE(raw_handle->data[0], 0); + hidl_handle handle = raw_handle; + ret = device3_x->dumpState(handle); + ASSERT_TRUE(ret.isOk()); + close(raw_handle->data[0]); + native_handle_delete(raw_handle); + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + // TODO: test all session API calls return INTERNAL_ERROR after close + // TODO: keep a wp copy here and verify session cannot be promoted out of this scope + } + break; + case CAMERA_DEVICE_API_VERSION_1_0: { + sp<::android::hardware::camera::device::V1_0::ICameraDevice> device1; + openCameraDevice(name, mProvider, &device1 /*out*/); + ASSERT_NE(nullptr, device1.get()); + + native_handle_t* raw_handle = native_handle_create(1, 0); + raw_handle->data[0] = open(kDumpOutput, O_RDWR); + ASSERT_GE(raw_handle->data[0], 0); + hidl_handle handle = raw_handle; + Return returnStatus = device1->dumpState(handle); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + close(raw_handle->data[0]); + native_handle_delete(raw_handle); + + ret = device1->close(); + ASSERT_TRUE(ret.isOk()); + } + break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } + break; + } + } +} + +// Check whether all common default request settings can be sucessfully +// constructed. +TEST_P(CameraHidlTest, constructDefaultRequestSettings) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: + case CAMERA_DEVICE_API_VERSION_3_6: + case CAMERA_DEVICE_API_VERSION_3_5: + case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_3: + case CAMERA_DEVICE_API_VERSION_3_2: { + ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; + Return ret; + ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str()); + ret = mProvider->getCameraDeviceInterface_V3_x( + name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + sp cb = new EmptyDeviceCb; + sp session; + ret = device3_x->open(cb, [&](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + session = newSession; + }); + ASSERT_TRUE(ret.isOk()); + + for (uint32_t t = (uint32_t)RequestTemplate::PREVIEW; + t <= (uint32_t)RequestTemplate::MANUAL; t++) { + RequestTemplate reqTemplate = (RequestTemplate)t; + ret = + session->constructDefaultRequestSettings( + reqTemplate, [&](auto status, const auto& req) { + ALOGI("constructDefaultRequestSettings returns status:%d", + (int)status); + if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG || + reqTemplate == RequestTemplate::MANUAL) { + // optional templates + ASSERT_TRUE((status == Status::OK) || + (status == Status::ILLEGAL_ARGUMENT)); + } else { + ASSERT_EQ(Status::OK, status); + } + + if (status == Status::OK) { + const camera_metadata_t* metadata = + (camera_metadata_t*) req.data(); + size_t expectedSize = req.size(); + int result = validate_camera_metadata_structure( + metadata, &expectedSize); + ASSERT_TRUE((result == 0) || + (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + verifyRequestTemplate(metadata, reqTemplate); + } else { + ASSERT_EQ(0u, req.size()); + } + }); + ASSERT_TRUE(ret.isOk()); + } + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } + break; + case CAMERA_DEVICE_API_VERSION_1_0: { + //Not applicable + } + break; + default: { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + } + break; + } + } +} + +// Verify that all supported stream formats and sizes can be configured +// successfully. +TEST_P(CameraHidlTest, configureStreamsAvailableOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputStreams; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + sp cameraDevice; + sp cameraDevice3_5; + sp cameraDevice3_7; + openEmptyDeviceSession(name, mProvider, + &session /*out*/, &staticMeta /*out*/, &cameraDevice /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + castDevice(cameraDevice, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + for (auto& it : outputStreams) { + V3_2::Stream stream3_2; + V3_2::DataspaceFlags dataspaceFlag = getDataspace(static_cast(it.format)); + stream3_2 = {streamId, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(it.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + dataspaceFlag, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams3_2 = {stream3_2}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams3_2, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + + if (session3_5 != nullptr) { + bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK); + verifyStreamCombination(cameraDevice3_7, config3_7, cameraDevice3_5, config3_4, + /*expectedStatus*/ true, expectStreamCombQuery); + } + + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, + [streamId](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_4.v3_3.v3_2.id, streamId); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [streamId](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_3.v3_2.id, streamId); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [streamId](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_3.v3_2.id, streamId); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId); + }); + } else { + ret = session->configureStreams(config3_2, + [streamId](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].id, streamId); + }); + } + ASSERT_TRUE(ret.isOk()); + streamId++; + } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that mandatory concurrent streams and outputs are supported. +TEST_P(CameraHidlTest, configureConcurrentStreamsAvailableOutputs) { + struct CameraTestInfo { + camera_metadata_t* staticMeta = nullptr; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + sp cameraDevice; + sp cameraDevice3_5; + sp cameraDevice3_7; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + }; + if (mProvider2_6 == nullptr) { + // This test is provider@2.6 specific + ALOGW("%s provider not 2_6, skipping", __func__); + return; + } + + std::map idToNameMap = getCameraDeviceIdToNameMap(mProvider2_6); + hidl_vec> concurrentDeviceCombinations = + getConcurrentDeviceCombinations(mProvider2_6); + std::vector outputStreams; + for (const auto& cameraDeviceIds : concurrentDeviceCombinations) { + std::vector cameraIdsAndStreamCombinations; + std::vector cameraTestInfos; + size_t i = 0; + for (const auto& id : cameraDeviceIds) { + CameraTestInfo cti; + Return ret; + auto it = idToNameMap.find(id); + ASSERT_TRUE(idToNameMap.end() != it); + hidl_string name = it->second; + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + openEmptyDeviceSession(name, mProvider2_6, &cti.session /*out*/, + &cti.staticMeta /*out*/, &cti.cameraDevice /*out*/); + castSession(cti.session, deviceVersion, &cti.session3_3, &cti.session3_4, + &cti.session3_5, &cti.session3_6, &cti.session3_7); + castDevice(cti.cameraDevice, deviceVersion, &cti.cameraDevice3_5, &cti.cameraDevice3_7); + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(cti.staticMeta, &outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(cti.staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + ::android::hardware::hidl_vec streams3_2(outputStreams.size()); + size_t j = 0; + for (const auto& it : outputStreams) { + V3_2::Stream stream3_2; + V3_2::DataspaceFlags dataspaceFlag = getDataspace( + static_cast(it.format)); + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(it.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + dataspaceFlag, + StreamRotation::ROTATION_0}; + streams3_2[j] = stream3_2; + j++; + } + + // Add the created stream configs to cameraIdsAndStreamCombinations + createStreamConfiguration(streams3_2, StreamConfigurationMode::NORMAL_MODE, + &cti.config3_2, &cti.config3_4, &cti.config3_5, + &cti.config3_7, jpegBufferSize); + + cti.config3_5.streamConfigCounter = outputStreams.size(); + CameraIdAndStreamCombination cameraIdAndStreamCombination; + cameraIdAndStreamCombination.cameraId = id; + cameraIdAndStreamCombination.streamConfiguration = cti.config3_4; + cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination); + i++; + cameraTestInfos.push_back(cti); + } + // Now verify that concurrent streams are supported + auto cb = [](Status s, bool supported) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(supported, true); + }; + + auto ret = mProvider2_6->isConcurrentStreamCombinationSupported( + cameraIdsAndStreamCombinations, cb); + + // Test the stream can actually be configured + for (const auto& cti : cameraTestInfos) { + if (cti.session3_5 != nullptr) { + bool expectStreamCombQuery = (isLogicalMultiCamera(cti.staticMeta) == Status::OK); + verifyStreamCombination(cti.cameraDevice3_7, cti.config3_7, cti.cameraDevice3_5, + cti.config3_4, + /*expectedStatus*/ true, expectStreamCombQuery); + } + + if (cti.session3_7 != nullptr) { + ret = cti.session3_7->configureStreams_3_7( + cti.config3_7, + [&cti](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(cti.config3_7.streams.size(), halConfig.streams.size()); + }); + } else if (cti.session3_5 != nullptr) { + ret = cti.session3_5->configureStreams_3_5( + cti.config3_5, + [&cti](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(cti.config3_5.v3_4.streams.size(), halConfig.streams.size()); + }); + } else if (cti.session3_4 != nullptr) { + ret = cti.session3_4->configureStreams_3_4( + cti.config3_4, + [&cti](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(cti.config3_4.streams.size(), halConfig.streams.size()); + }); + } else if (cti.session3_3 != nullptr) { + ret = cti.session3_3->configureStreams_3_3( + cti.config3_2, + [&cti](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(cti.config3_2.streams.size(), halConfig.streams.size()); + }); + } else { + ret = cti.session->configureStreams( + cti.config3_2, [&cti](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(cti.config3_2.streams.size(), halConfig.streams.size()); + }); + } + ASSERT_TRUE(ret.isOk()); + } + + for (const auto& cti : cameraTestInfos) { + free_camera_metadata(cti.staticMeta); + ret = cti.session->close(); + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Check for correct handling of invalid/incorrect configuration parameters. +TEST_P(CameraHidlTest, configureStreamsInvalidOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputStreams; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + sp cameraDevice; + sp cameraDevice3_5; + sp cameraDevice3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/, + &cameraDevice /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + castDevice(cameraDevice, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + V3_2::Stream stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(0), + static_cast(0), + static_cast(outputStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + uint32_t streamConfigCounter = 0; + ::android::hardware::hidl_vec streams = {stream3_2}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + + if (session3_5 != nullptr) { + verifyStreamCombination(cameraDevice3_7, config3_7, cameraDevice3_5, config3_4, + /*expectedStatus*/ false, /*expectStreamCombQuery*/ false); + } + + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, [](Status s, device::V3_6::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(UINT32_MAX), + static_cast(UINT32_MAX), + static_cast(outputStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, [](Status s, + device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if(session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, [](Status s, + device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if(session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, [](Status s, + device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, [](Status s, + HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); + + for (auto& it : outputStreams) { + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(UINT32_MAX), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if(session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if(session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(it.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + static_cast(UINT32_MAX)}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if(session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if(session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); + } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether all supported ZSL output stream combinations can be +// configured successfully. +TEST_P(CameraHidlTest, configureStreamsZSLInputOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector inputStreams; + std::vector inputOutputMap; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + sp cameraDevice; + sp cameraDevice3_5; + sp cameraDevice3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/, + &cameraDevice /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + castDevice(cameraDevice, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + + Status rc = isZSLModeAvailable(staticMeta); + if (Status::METHOD_NOT_SUPPORTED == rc) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + ASSERT_EQ(Status::OK, rc); + + inputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams)); + ASSERT_NE(0u, inputStreams.size()); + + inputOutputMap.clear(); + ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap)); + ASSERT_NE(0u, inputOutputMap.size()); + + bool supportMonoY8 = false; + if (Status::OK == isMonochromeCamera(staticMeta)) { + for (auto& it : inputStreams) { + if (it.format == static_cast(PixelFormat::Y8)) { + supportMonoY8 = true; + break; + } + } + } + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false; + uint32_t streamConfigCounter = 0; + for (auto& inputIter : inputOutputMap) { + AvailableStream input; + ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, + input)); + ASSERT_NE(0u, inputStreams.size()); + + if (inputIter.inputFormat == static_cast(PixelFormat::IMPLEMENTATION_DEFINED) + && inputIter.outputFormat == static_cast(PixelFormat::Y8)) { + hasPrivToY8 = true; + } else if (inputIter.inputFormat == static_cast(PixelFormat::Y8)) { + if (inputIter.outputFormat == static_cast(PixelFormat::BLOB)) { + hasY8ToBlob = true; + } else if (inputIter.outputFormat == static_cast(PixelFormat::Y8)) { + hasY8ToY8 = true; + } + } + AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, + inputIter.outputFormat}; + std::vector outputStreams; + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputStreams, + &outputThreshold)); + for (auto& outputIter : outputStreams) { + V3_2::DataspaceFlags outputDataSpace = + getDataspace(static_cast(outputIter.format)); + V3_2::Stream zslStream = {streamId++, + StreamType::OUTPUT, + static_cast(input.width), + static_cast(input.height), + static_cast(input.format), + GRALLOC_USAGE_HW_CAMERA_ZSL, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream inputStream = {streamId++, + StreamType::INPUT, + static_cast(input.width), + static_cast(input.height), + static_cast(input.format), + 0, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream outputStream = {streamId++, + StreamType::OUTPUT, + static_cast(outputIter.width), + static_cast(outputIter.height), + static_cast(outputIter.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + outputDataSpace, + StreamRotation::ROTATION_0}; + + ::android::hardware::hidl_vec streams = {inputStream, zslStream, + outputStream}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + if (session3_5 != nullptr) { + verifyStreamCombination(cameraDevice3_7, config3_7, cameraDevice3_5, config3_4, + /*expectedStatus*/ true, + /*expectStreamCombQuery*/ false); + } + + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, + [](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); + } + ASSERT_TRUE(ret.isOk()); + } + } + + if (supportMonoY8) { + if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) { + ASSERT_TRUE(hasPrivToY8); + } + if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) { + ASSERT_TRUE(hasY8ToY8); + ASSERT_TRUE(hasY8ToBlob); + } + } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether session parameters are supported. If Hal support for them +// exist, then try to configure a preview stream using them. +TEST_P(CameraHidlTest, configureStreamsWithSessionParameters) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } else if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_4) { + continue; + } + + camera_metadata_t* staticMetaBuffer; + Return ret; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_4) { + ASSERT_NE(session3_4, nullptr); + } else { + ASSERT_NE(session3_5, nullptr); + } + + std::unordered_set availableSessionKeys; + auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, + &availableSessionKeys); + ASSERT_TRUE(Status::OK == rc); + if (availableSessionKeys.empty()) { + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams, + modifiedSessionParams; + constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW, + &previewRequestSettings, &sessionParams); + if (sessionParams.isEmpty()) { + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputPreviewStreams.clear(); + + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams, + &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + V3_4::Stream previewStream; + previewStream.v3_2 = {0, + StreamType::OUTPUT, + static_cast(outputPreviewStreams[0].width), + static_cast(outputPreviewStreams[0].height), + static_cast(outputPreviewStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + previewStream.bufferSize = 0; + ::android::hardware::hidl_vec streams = {previewStream}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + config.streams = streams; + config.operationMode = StreamConfigurationMode::NORMAL_MODE; + modifiedSessionParams = sessionParams; + auto sessionParamsBuffer = sessionParams.release(); + config.sessionParams.setToExternal(reinterpret_cast (sessionParamsBuffer), + get_camera_metadata_size(sessionParamsBuffer)); + config3_5.v3_4 = config; + config3_5.streamConfigCounter = 0; + config3_7.streams = {{previewStream, -1, {ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}}; + config3_7.operationMode = config.operationMode; + config3_7.sessionParams.setToExternal(reinterpret_cast(sessionParamsBuffer), + get_camera_metadata_size(sessionParamsBuffer)); + config3_7.streamConfigCounter = 0; + config3_7.multiResolutionInputImage = false; + + if (session3_5 != nullptr) { + bool newSessionParamsAvailable = false; + for (const auto& it : availableSessionKeys) { + if (modifiedSessionParams.exists(it)) { + modifiedSessionParams.erase(it); + newSessionParamsAvailable = true; + break; + } + } + if (newSessionParamsAvailable) { + auto modifiedSessionParamsBuffer = modifiedSessionParams.release(); + verifySessionReconfigurationQuery(session3_5, sessionParamsBuffer, + modifiedSessionParamsBuffer); + modifiedSessionParams.acquire(modifiedSessionParamsBuffer); + } + } + + if (session3_7 != nullptr) { + ret = session3_7->configureStreams_3_7( + config3_7, [](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + }); + } else if (session3_5 != nullptr) { + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + }); + } else { + ret = session3_4->configureStreams_3_4(config, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + }); + } + sessionParams.acquire(sessionParamsBuffer); + ASSERT_TRUE(ret.isOk()); + + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that all supported preview + still capture stream combinations +// can be configured successfully. +TEST_P(CameraHidlTest, configureStreamsPreviewStillOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputBlobStreams; + std::vector outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, + static_cast(PixelFormat::BLOB)}; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + sp cameraDevice; + sp cameraDevice3_5; + sp cameraDevice3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/, + &cameraDevice /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + castDevice(cameraDevice, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + + // Check if camera support depth only + if (isDepthOnly(staticMeta)) { + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputBlobStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputBlobStreams, + &blobThreshold)); + ASSERT_NE(0u, outputBlobStreams.size()); + + outputPreviewStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputPreviewStreams, + &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + for (auto& blobIter : outputBlobStreams) { + for (auto& previewIter : outputPreviewStreams) { + V3_2::Stream previewStream = {streamId++, + StreamType::OUTPUT, + static_cast(previewIter.width), + static_cast(previewIter.height), + static_cast(previewIter.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream blobStream = {streamId++, + StreamType::OUTPUT, + static_cast(blobIter.width), + static_cast(blobIter.height), + static_cast(blobIter.format), + GRALLOC1_CONSUMER_USAGE_CPU_READ, + static_cast(Dataspace::V0_JFIF), + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {previewStream, + blobStream}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + if (session3_5 != nullptr) { + verifyStreamCombination(cameraDevice3_7, config3_7, cameraDevice3_5, config3_4, + /*expectedStatus*/ true, + /*expectStreamCombQuery*/ false); + } + + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, + [](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } + ASSERT_TRUE(ret.isOk()); + } + } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// In case constrained mode is supported, test whether it can be +// configured. Additionally check for common invalid inputs when +// using this mode. +TEST_P(CameraHidlTest, configureStreamsConstrainedOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + sp cameraDevice; + sp cameraDevice3_5; + sp cameraDevice3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/, + &cameraDevice /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + castDevice(cameraDevice, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + + Status rc = isConstrainedModeAvailable(staticMeta); + if (Status::METHOD_NOT_SUPPORTED == rc) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + ASSERT_EQ(Status::OK, rc); + + AvailableStream hfrStream; + rc = pickConstrainedModeSize(staticMeta, hfrStream); + ASSERT_EQ(Status::OK, rc); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + V3_2::Stream stream = {streamId, + StreamType::OUTPUT, + static_cast(hfrStream.width), + static_cast(hfrStream.height), + static_cast(hfrStream.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {stream}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4, &config3_5, &config3_7); + if (session3_5 != nullptr) { + verifyStreamCombination(cameraDevice3_7, config3_7, cameraDevice3_5, config3_4, + /*expectedStatus*/ true, /*expectStreamCombQuery*/ false); + } + + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, + [streamId](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_4.v3_3.v3_2.id, streamId); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [streamId](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_3.v3_2.id, streamId); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [streamId](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_3.v3_2.id, streamId); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId); + }); + } else { + ret = session->configureStreams(config3_2, + [streamId](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].id, streamId); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream = {streamId++, + StreamType::OUTPUT, + static_cast(0), + static_cast(0), + static_cast(hfrStream.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4, &config3_5, &config3_7); + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, [](Status s, device::V3_6::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream = {streamId++, + StreamType::OUTPUT, + static_cast(UINT32_MAX), + static_cast(UINT32_MAX), + static_cast(hfrStream.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4, &config3_5, &config3_7); + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, [](Status s, device::V3_6::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream = {streamId++, + StreamType::OUTPUT, + static_cast(hfrStream.width), + static_cast(hfrStream.height), + static_cast(UINT32_MAX), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4, &config3_5, &config3_7); + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, [](Status s, device::V3_6::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that all supported video + snapshot stream combinations can +// be configured successfully. +TEST_P(CameraHidlTest, configureStreamsVideoStillOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputBlobStreams; + std::vector outputVideoStreams; + AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight, + static_cast(PixelFormat::BLOB)}; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + sp cameraDevice; + sp cameraDevice3_5; + sp cameraDevice3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/, + &cameraDevice /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + castDevice(cameraDevice, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + + // Check if camera support depth only + if (isDepthOnly(staticMeta)) { + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputBlobStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputBlobStreams, + &blobThreshold)); + ASSERT_NE(0u, outputBlobStreams.size()); + + outputVideoStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputVideoStreams, + &videoThreshold)); + ASSERT_NE(0u, outputVideoStreams.size()); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + for (auto& blobIter : outputBlobStreams) { + for (auto& videoIter : outputVideoStreams) { + V3_2::Stream videoStream = {streamId++, + StreamType::OUTPUT, + static_cast(videoIter.width), + static_cast(videoIter.height), + static_cast(videoIter.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream blobStream = {streamId++, + StreamType::OUTPUT, + static_cast(blobIter.width), + static_cast(blobIter.height), + static_cast(blobIter.format), + GRALLOC1_CONSUMER_USAGE_CPU_READ, + static_cast(Dataspace::V0_JFIF), + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {videoStream, blobStream}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + if (session3_5 != nullptr) { + verifyStreamCombination(cameraDevice3_7, config3_7, cameraDevice3_5, config3_4, + /*expectedStatus*/ true, + /*expectStreamCombQuery*/ false); + } + + if (session3_7 != nullptr) { + config3_7.streamConfigCounter = streamConfigCounter++; + ret = session3_7->configureStreams_3_7( + config3_7, + [](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else if (session3_5 != nullptr) { + config3_5.streamConfigCounter = streamConfigCounter++; + ret = session3_5->configureStreams_3_5(config3_5, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } + ASSERT_TRUE(ret.isOk()); + } + } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Generate and verify a camera capture request +TEST_P(CameraHidlTest, processCaptureRequestPreview) { + processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW, + false /*secureOnlyCameras*/); +} + +// Generate and verify a secure camera capture request +TEST_P(CameraHidlTest, processSecureCaptureRequest) { + processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE, + true /*secureOnlyCameras*/); +} + +void CameraHidlTest::processCaptureRequestInternal(uint64_t bufferUsage, + RequestTemplate reqTemplate, + bool useSecureOnlyCameras) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider, useSecureOnlyCameras); + AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + V3_2::Stream testStream; + HalStreamConfiguration halStreamConfig; + sp session; + sp cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + configureSingleStream(name, deviceVersion, mProvider, &streamThreshold, bufferUsage, + reqTemplate, &session /*out*/, &testStream /*out*/, + &halStreamConfig /*out*/, &supportsPartialResults /*out*/, + &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/); + + std::shared_ptr resultQueue; + auto resultQueueRet = + session->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared( + descriptor); + if (!resultQueue->isValid() || + resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + InFlightRequest inflightReq = {1, false, supportsPartialResults, + partialResultCount, resultQueue}; + + Return ret; + ret = session->constructDefaultRequestSettings(reqTemplate, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; + }); + ASSERT_TRUE(ret.isOk()); + overrideRotateAndCrop(&settings); + + hidl_handle buffer_handle; + StreamBuffer outputBuffer; + if (useHalBufManager) { + outputBuffer = {halStreamConfig.streams[0].id, + /*bufferId*/ 0, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + } else { + allocateGraphicBuffer(testStream.width, testStream.height, + /* We don't look at halStreamConfig.streams[0].consumerUsage + * since that is 0 for output streams + */ + android_convertGralloc1To0Usage( + halStreamConfig.streams[0].producerUsage, bufferUsage), + halStreamConfig.streams[0].overrideFormat, &buffer_handle); + outputBuffer = {halStreamConfig.streams[0].id, + bufferId, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + } + ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, + nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}; + + { + std::unique_lock l(mLock); + mInflightMap.clear(); + mInflightMap.add(frameNumber, &inflightReq); + } + + Status status = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + Return returnStatus = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + ASSERT_EQ(testStream.id, inflightReq.resultOutputBuffers[0].streamId); + + request.frameNumber++; + // Empty settings should be supported after the first call + // for repeating requests. + request.settings.setToExternal(nullptr, 0, true); + // The buffer has been registered to HAL by bufferId, so per + // API contract we should send a null handle for this buffer + request.outputBuffers[0].buffer = nullptr; + mInflightMap.clear(); + inflightReq = {1, false, supportsPartialResults, partialResultCount, + resultQueue}; + mInflightMap.add(request.frameNumber, &inflightReq); + } + + returnStatus = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + ASSERT_EQ(testStream.id, inflightReq.resultOutputBuffers[0].streamId); + } + + if (useHalBufManager) { + verifyBuffersReturned(session, deviceVersion, testStream.id, cb); + } + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Generate and verify a multi-camera capture request +TEST_P(CameraHidlTest, processMultiCaptureRequestPreview) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::YCBCR_420_888)}; + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + ::android::hardware::hidl_vec emptySettings; + hidl_string invalidPhysicalId = "-1"; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_5) { + continue; + } + std::string version, deviceId; + ASSERT_TRUE(::matchDeviceName(name, mProviderType, &version, &deviceId)); + camera_metadata_t* staticMeta; + Return ret; + sp session; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/); + + Status rc = isLogicalMultiCamera(staticMeta); + if (Status::METHOD_NOT_SUPPORTED == rc) { + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + std::unordered_set physicalIds; + rc = getPhysicalCameraIds(staticMeta, &physicalIds); + ASSERT_TRUE(Status::OK == rc); + ASSERT_TRUE(physicalIds.size() > 1); + + std::unordered_set physicalRequestKeyIDs; + rc = getSupportedKeys(staticMeta, + ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS, &physicalRequestKeyIDs); + ASSERT_TRUE(Status::OK == rc); + if (physicalRequestKeyIDs.empty()) { + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + // The logical camera doesn't support any individual physical requests. + continue; + } + + android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings; + constructFilteredSettings(session, physicalRequestKeyIDs, RequestTemplate::PREVIEW, + &defaultPreviewSettings, &filteredSettings); + if (filteredSettings.isEmpty()) { + // No physical device settings in default request. + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + const camera_metadata_t *settingsBuffer = defaultPreviewSettings.getAndLock(); + settings.setToExternal( + reinterpret_cast (const_cast (settingsBuffer)), + get_camera_metadata_size(settingsBuffer)); + overrideRotateAndCrop(&settings); + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + + // Leave only 2 physical devices in the id set. + auto it = physicalIds.begin(); + std::string physicalDeviceId = *it; it++; + physicalIds.erase(++it, physicalIds.end()); + ASSERT_EQ(physicalIds.size(), 2u); + + V3_4::HalStreamConfiguration halStreamConfig; + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + V3_2::Stream previewStream; + sp session3_4; + sp session3_5; + sp cb; + configurePreviewStreams3_4(name, deviceVersion, mProvider, &previewThreshold, physicalIds, + &session3_4, &session3_5, &previewStream, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/, + true /*allowUnsupport*/); + if (session3_5 == nullptr) { + ret = session3_4->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + std::shared_ptr resultQueue; + auto resultQueueRet = + session3_4->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared( + descriptor); + if (!resultQueue->isValid() || + resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + InFlightRequest inflightReq = {static_cast (halStreamConfig.streams.size()), false, + supportsPartialResults, partialResultCount, physicalIds, resultQueue}; + + std::vector graphicBuffers; + graphicBuffers.reserve(halStreamConfig.streams.size()); + ::android::hardware::hidl_vec outputBuffers; + outputBuffers.resize(halStreamConfig.streams.size()); + size_t k = 0; + for (const auto& halStream : halStreamConfig.streams) { + hidl_handle buffer_handle; + if (useHalBufManager) { + outputBuffers[k] = {halStream.v3_3.v3_2.id, /*bufferId*/0, buffer_handle, + BufferStatus::OK, nullptr, nullptr}; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStream.v3_3.v3_2.producerUsage, + halStream.v3_3.v3_2.consumerUsage), + halStream.v3_3.v3_2.overrideFormat, &buffer_handle); + graphicBuffers.push_back(buffer_handle); + outputBuffers[k] = {halStream.v3_3.v3_2.id, bufferId, buffer_handle, + BufferStatus::OK, nullptr, nullptr}; + bufferId++; + } + k++; + } + hidl_vec camSettings(1); + const camera_metadata_t *filteredSettingsBuffer = filteredSettings.getAndLock(); + camSettings[0].settings.setToExternal( + reinterpret_cast (const_cast ( + filteredSettingsBuffer)), + get_camera_metadata_size(filteredSettingsBuffer)); + overrideRotateAndCrop(&camSettings[0].settings); + camSettings[0].fmqSettingsSize = 0; + camSettings[0].physicalCameraId = physicalDeviceId; + + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, nullptr}; + V3_4::CaptureRequest request = {{frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}, camSettings}; + + { + std::unique_lock l(mLock); + mInflightMap.clear(); + mInflightMap.add(frameNumber, &inflightReq); + } + + Status stat = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + Return returnStatus = session3_4->processCaptureRequest_3_4( + {request}, cachesToRemove, [&stat, &numRequestProcessed](auto s, uint32_t n) { + stat = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, stat); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + + request.v3_2.frameNumber++; + // Empty settings should be supported after the first call + // for repeating requests. + request.v3_2.settings.setToExternal(nullptr, 0, true); + request.physicalCameraSettings[0].settings.setToExternal(nullptr, 0, true); + // The buffer has been registered to HAL by bufferId, so per + // API contract we should send a null handle for this buffer + request.v3_2.outputBuffers[0].buffer = nullptr; + mInflightMap.clear(); + inflightReq = {static_cast (physicalIds.size()), false, + supportsPartialResults, partialResultCount, physicalIds, resultQueue}; + mInflightMap.add(request.v3_2.frameNumber, &inflightReq); + } + + returnStatus = session3_4->processCaptureRequest_3_4( + {request}, cachesToRemove, [&stat, &numRequestProcessed](auto s, uint32_t n) { + stat = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, stat); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + } + + // Invalid physical camera id should fail process requests + frameNumber++; + camSettings[0].physicalCameraId = invalidPhysicalId; + camSettings[0].settings = settings; + request = {{frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}, camSettings}; + returnStatus = session3_4->processCaptureRequest_3_4( + {request}, cachesToRemove, [&stat, &numRequestProcessed](auto s, uint32_t n) { + stat = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, stat); + + defaultPreviewSettings.unlock(settingsBuffer); + filteredSettings.unlock(filteredSettingsBuffer); + + if (useHalBufManager) { + hidl_vec streamIds(halStreamConfig.streams.size()); + for (size_t i = 0; i < streamIds.size(); i++) { + streamIds[i] = halStreamConfig.streams[i].v3_3.v3_2.id; + } + verifyBuffersReturned(session3_4, streamIds, cb); + } + + ret = session3_4->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Generate and verify an ultra high resolution capture request +TEST_P(CameraHidlTest, processUltraHighResolutionRequest) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_7) { + continue; + } + std::string version, deviceId; + ASSERT_TRUE(::matchDeviceName(name, mProviderType, &version, &deviceId)); + camera_metadata_t* staticMeta; + Return ret; + sp session; + openEmptyDeviceSession(name, mProvider, &session, &staticMeta); + if (!isUltraHighResolution(staticMeta)) { + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings; + ret = session->constructDefaultRequestSettings( + RequestTemplate::STILL_CAPTURE, + [&defaultSettings](auto status, const auto& req) mutable { + ASSERT_EQ(Status::OK, status); + + const camera_metadata_t* metadata = + reinterpret_cast(req.data()); + size_t expectedSize = req.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + + size_t entryCount = get_camera_metadata_entry_count(metadata); + ASSERT_GT(entryCount, 0u); + defaultSettings = metadata; + }); + ASSERT_TRUE(ret.isOk()); + uint8_t sensorPixelMode = + static_cast(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION); + ASSERT_EQ(::android::OK, + defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1)); + + const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock(); + settings.setToExternal( + reinterpret_cast(const_cast(settingsBuffer)), + get_camera_metadata_size(settingsBuffer)); + overrideRotateAndCrop(&settings); + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + V3_6::HalStreamConfiguration halStreamConfig; + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + V3_2::Stream previewStream; + sp session3_7; + sp cb; + std::list pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16}; + for (PixelFormat format : pixelFormats) { + configureStreams3_7(name, deviceVersion, mProvider, format, &session3_7, &previewStream, + &halStreamConfig, &supportsPartialResults, &partialResultCount, + &useHalBufManager, &cb, 0, /*maxResolution*/ true); + ASSERT_NE(session3_7, nullptr); + + std::shared_ptr resultQueue; + auto resultQueueRet = session3_7->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", + __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + std::vector graphicBuffers; + graphicBuffers.reserve(halStreamConfig.streams.size()); + ::android::hardware::hidl_vec outputBuffers; + outputBuffers.resize(halStreamConfig.streams.size()); + InFlightRequest inflightReq = {static_cast(halStreamConfig.streams.size()), + false, + supportsPartialResults, + partialResultCount, + std::unordered_set(), + resultQueue}; + + size_t k = 0; + for (const auto& halStream : halStreamConfig.streams) { + hidl_handle buffer_handle; + if (useHalBufManager) { + outputBuffers[k] = {halStream.v3_4.v3_3.v3_2.id, + 0, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + } else { + allocateGraphicBuffer( + previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStream.v3_4.v3_3.v3_2.producerUsage, + halStream.v3_4.v3_3.v3_2.consumerUsage), + halStream.v3_4.v3_3.v3_2.overrideFormat, &buffer_handle); + graphicBuffers.push_back(buffer_handle); + outputBuffers[k] = {halStream.v3_4.v3_3.v3_2.id, + bufferId, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + bufferId++; + } + k++; + } + + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, nullptr}; + V3_4::CaptureRequest request3_4; + request3_4.v3_2.frameNumber = frameNumber; + request3_4.v3_2.fmqSettingsSize = 0; + request3_4.v3_2.settings = settings; + request3_4.v3_2.inputBuffer = emptyInputBuffer; + request3_4.v3_2.outputBuffers = outputBuffers; + V3_7::CaptureRequest request3_7; + request3_7.v3_4 = request3_4; + request3_7.inputWidth = 0; + request3_7.inputHeight = 0; + + { + std::unique_lock l(mLock); + mInflightMap.clear(); + mInflightMap.add(frameNumber, &inflightReq); + } + + Status stat = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + Return returnStatus = session3_7->processCaptureRequest_3_7( + {request3_7}, cachesToRemove, + [&stat, &numRequestProcessed](auto s, uint32_t n) { + stat = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, stat); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + } + if (useHalBufManager) { + hidl_vec streamIds(halStreamConfig.streams.size()); + for (size_t i = 0; i < streamIds.size(); i++) { + streamIds[i] = halStreamConfig.streams[i].v3_4.v3_3.v3_2.id; + } + verifyBuffersReturned(session3_7, streamIds, cb); + } + + ret = session3_7->close(); + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Generate and verify a burst containing alternating sensor sensitivity values +TEST_P(CameraHidlTest, processCaptureRequestBurstISO) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + float isoTol = .03f; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + camera_metadata_t* staticMetaBuffer; + Return ret; + sp session; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta( + staticMetaBuffer); + + camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL); + ASSERT_TRUE(0 < hwLevel.count); + if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] || + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) { + //Limited/External devices can skip this test + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE); + ASSERT_EQ(isoRange.count, 2u); + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp cb; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, + &session /*out*/, &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/); + std::shared_ptr resultQueue; + + auto resultQueueRet = session->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + ASSERT_NE(nullptr, resultQueue); + + ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; }); + ASSERT_TRUE(ret.isOk()); + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, nullptr}; + hidl_handle buffers[kBurstFrameCount]; + StreamBuffer outputBuffers[kBurstFrameCount]; + CaptureRequest requests[kBurstFrameCount]; + InFlightRequest inflightReqs[kBurstFrameCount]; + int32_t isoValues[kBurstFrameCount]; + hidl_vec requestSettings[kBurstFrameCount]; + for (uint32_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock l(mLock); + + isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1]; + if (useHalBufManager) { + outputBuffers[i] = {halStreamConfig.streams[0].id, /*bufferId*/0, + nullptr, BufferStatus::OK, nullptr, nullptr}; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, + halStreamConfig.streams[0].consumerUsage), + halStreamConfig.streams[0].overrideFormat, &buffers[i]); + outputBuffers[i] = {halStreamConfig.streams[0].id, bufferId + i, + buffers[i], BufferStatus::OK, nullptr, nullptr}; + } + + requestMeta.append(reinterpret_cast (settings.data())); + + // Disable all 3A routines + uint8_t mode = static_cast(ANDROID_CONTROL_MODE_OFF); + ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1)); + ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], + 1)); + camera_metadata_t *metaBuffer = requestMeta.release(); + requestSettings[i].setToExternal(reinterpret_cast (metaBuffer), + get_camera_metadata_size(metaBuffer), true); + overrideRotateAndCrop(&requestSettings[i]); + + requests[i] = {frameNumber + i, 0 /* fmqSettingsSize */, requestSettings[i], + emptyInputBuffer, {outputBuffers[i]}}; + + inflightReqs[i] = {1, false, supportsPartialResults, partialResultCount, resultQueue}; + mInflightMap.add(frameNumber + i, &inflightReqs[i]); + } + + Status status = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + hidl_vec burstRequest; + burstRequest.setToExternal(requests, kBurstFrameCount); + Return returnStatus = session->processCaptureRequest(burstRequest, cachesToRemove, + [&status, &numRequestProcessed] (auto s, uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, kBurstFrameCount); + + for (size_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock l(mLock); + while (!inflightReqs[i].errorCodeValid && ((0 < inflightReqs[i].numBuffersLeft) || + (!inflightReqs[i].haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReqs[i].errorCodeValid); + ASSERT_NE(inflightReqs[i].resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReqs[i].resultOutputBuffers[0].streamId); + ASSERT_FALSE(inflightReqs[i].collectedResult.isEmpty()); + ASSERT_TRUE(inflightReqs[i].collectedResult.exists(ANDROID_SENSOR_SENSITIVITY)); + camera_metadata_entry_t isoResult = inflightReqs[i].collectedResult.find( + ANDROID_SENSOR_SENSITIVITY); + ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <= + std::round(isoValues[i]*isoTol)); + } + + if (useHalBufManager) { + verifyBuffersReturned(session, deviceVersion, previewStream.id, cb); + } + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Test whether an incorrect capture request with missing settings will +// be reported correctly. +TEST_P(CameraHidlTest, processCaptureRequestInvalidSinglePreview) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + sp cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/); + + hidl_handle buffer_handle; + + if (useHalBufManager) { + bufferId = 0; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, + halStreamConfig.streams[0].consumerUsage), + halStreamConfig.streams[0].overrideFormat, &buffer_handle); + } + + StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, + bufferId, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, + nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}; + + // Settings were not correctly initialized, we should fail here + Status status = Status::OK; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + Return ret = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status); + ASSERT_EQ(numRequestProcessed, 0u); + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify camera offline session behavior +TEST_P(CameraHidlTest, switchToOffline) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight, + static_cast(PixelFormat::BLOB)}; + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMetaBuffer; + { + Return ret; + sp session; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta( + staticMetaBuffer); + + if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } + + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + V3_2::Stream stream; + V3_6::HalStreamConfiguration halStreamConfig; + sp session; + sp cb; + uint32_t jpegBufferSize; + bool useHalBufManager; + configureOfflineStillStream(name, deviceVersion, mProvider, &threshold, + &session /*out*/, &stream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, &cb /*out*/, + &jpegBufferSize /*out*/, &useHalBufManager /*out*/); + + auto ret = session->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; }); + ASSERT_TRUE(ret.isOk()); + + std::shared_ptr resultQueue; + auto resultQueueRet = + session->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared( + descriptor); + if (!resultQueue->isValid() || + resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, nullptr}; + hidl_handle buffers[kBurstFrameCount]; + StreamBuffer outputBuffers[kBurstFrameCount]; + CaptureRequest requests[kBurstFrameCount]; + InFlightRequest inflightReqs[kBurstFrameCount]; + hidl_vec requestSettings[kBurstFrameCount]; + auto halStreamConfig3_2 = halStreamConfig.streams[0].v3_4.v3_3.v3_2; + for (uint32_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock l(mLock); + + if (useHalBufManager) { + outputBuffers[i] = {halStreamConfig3_2.id, /*bufferId*/ 0, + buffers[i], BufferStatus::OK, nullptr, nullptr}; + } else { + // jpeg buffer (w,h) = (blobLen, 1) + allocateGraphicBuffer(jpegBufferSize, /*height*/1, + android_convertGralloc1To0Usage(halStreamConfig3_2.producerUsage, + halStreamConfig3_2.consumerUsage), + halStreamConfig3_2.overrideFormat, &buffers[i]); + outputBuffers[i] = {halStreamConfig3_2.id, bufferId + i, + buffers[i], BufferStatus::OK, nullptr, nullptr}; + } + + requestMeta.clear(); + requestMeta.append(reinterpret_cast (settings.data())); + + camera_metadata_t *metaBuffer = requestMeta.release(); + requestSettings[i].setToExternal(reinterpret_cast (metaBuffer), + get_camera_metadata_size(metaBuffer), true); + overrideRotateAndCrop(&requestSettings[i]); + + requests[i] = {frameNumber + i, 0 /* fmqSettingsSize */, requestSettings[i], + emptyInputBuffer, {outputBuffers[i]}}; + + inflightReqs[i] = {1, false, supportsPartialResults, partialResultCount, + resultQueue}; + mInflightMap.add(frameNumber + i, &inflightReqs[i]); + } + + Status status = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + hidl_vec burstRequest; + burstRequest.setToExternal(requests, kBurstFrameCount); + Return returnStatus = session->processCaptureRequest(burstRequest, cachesToRemove, + [&status, &numRequestProcessed] (auto s, uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, kBurstFrameCount); + + hidl_vec offlineStreamIds = {halStreamConfig3_2.id}; + V3_6::CameraOfflineSessionInfo offlineSessionInfo; + sp offlineSession; + returnStatus = session->switchToOffline(offlineStreamIds, + [&status, &offlineSessionInfo, &offlineSession] (auto stat, auto info, + auto offSession) { + status = stat; + offlineSessionInfo = info; + offlineSession = offSession; + }); + ASSERT_TRUE(returnStatus.isOk()); + + if (!halStreamConfig.streams[0].supportOffline) { + ASSERT_EQ(status, Status::ILLEGAL_ARGUMENT); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + ASSERT_EQ(status, Status::OK); + // Hal might be unable to find any requests qualified for offline mode. + if (offlineSession == nullptr) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u); + ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStreamConfig3_2.id); + ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u); + + // close device session to make sure offline session does not rely on it + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + + std::shared_ptr offlineResultQueue; + auto offlineResultQueueRet = + offlineSession->getCaptureResultMetadataQueue( + [&offlineResultQueue](const auto& descriptor) { + offlineResultQueue = std::make_shared( + descriptor); + if (!offlineResultQueue->isValid() || + offlineResultQueue->availableToWrite() <= 0) { + ALOGE("%s: offline session returns empty result metadata fmq," + " not use it", __func__); + offlineResultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(offlineResultQueueRet.isOk()); + + updateInflightResultQueue(offlineResultQueue); + + ret = offlineSession->setCallback(cb); + ASSERT_TRUE(ret.isOk()); + + for (size_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock l(mLock); + while (!inflightReqs[i].errorCodeValid && ((0 < inflightReqs[i].numBuffersLeft) || + (!inflightReqs[i].haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReqs[i].errorCodeValid); + ASSERT_NE(inflightReqs[i].resultOutputBuffers.size(), 0u); + ASSERT_EQ(stream.id, inflightReqs[i].resultOutputBuffers[0].streamId); + ASSERT_FALSE(inflightReqs[i].collectedResult.isEmpty()); + } + + + ret = offlineSession->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether an invalid capture request with missing output buffers +// will be reported correctly. +TEST_P(CameraHidlTest, processCaptureRequestInvalidBuffer) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputBlobStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + sp cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/); + + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + Return ret; + ret = session->constructDefaultRequestSettings(reqTemplate, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; + }); + ASSERT_TRUE(ret.isOk()); + overrideRotateAndCrop(&settings); + + ::android::hardware::hidl_vec emptyOutputBuffers; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, + nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, emptyOutputBuffers}; + + // Output buffers are missing, we should fail here + Status status = Status::OK; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + ret = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status); + ASSERT_EQ(numRequestProcessed, 0u); + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Generate, trigger and flush a preview request +TEST_P(CameraHidlTest, flushPreviewRequest) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + sp cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/); + + std::shared_ptr resultQueue; + auto resultQueueRet = + session->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared( + descriptor); + if (!resultQueue->isValid() || + resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + InFlightRequest inflightReq = {1, false, supportsPartialResults, + partialResultCount, resultQueue}; + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + Return ret; + ret = session->constructDefaultRequestSettings(reqTemplate, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; + }); + ASSERT_TRUE(ret.isOk()); + overrideRotateAndCrop(&settings); + + hidl_handle buffer_handle; + if (useHalBufManager) { + bufferId = 0; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, + halStreamConfig.streams[0].consumerUsage), + halStreamConfig.streams[0].overrideFormat, &buffer_handle); + } + + StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, + bufferId, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; + const StreamBuffer emptyInputBuffer = {-1, 0, nullptr, + BufferStatus::ERROR, nullptr, nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}; + + { + std::unique_lock l(mLock); + mInflightMap.clear(); + mInflightMap.add(frameNumber, &inflightReq); + } + + Status status = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + ret = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, 1u); + // Flush before waiting for request to complete. + Return returnStatus = session->flush(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, + timeout)); + } + + if (!inflightReq.errorCodeValid) { + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId); + } else { + switch (inflightReq.errorCode) { + case ErrorCode::ERROR_REQUEST: + case ErrorCode::ERROR_RESULT: + case ErrorCode::ERROR_BUFFER: + // Expected + break; + case ErrorCode::ERROR_DEVICE: + default: + FAIL() << "Unexpected error:" + << static_cast(inflightReq.errorCode); + } + } + } + + if (useHalBufManager) { + verifyBuffersReturned(session, deviceVersion, previewStream.id, cb); + } + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that camera flushes correctly without any pending requests. +TEST_P(CameraHidlTest, flushEmpty) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + sp cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/); + + Return returnStatus = session->flush(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mLock); + auto timeout = std::chrono::system_clock::now() + + std::chrono::milliseconds(kEmptyFlushTimeoutMSec); + ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + Return ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Test camera provider@2.5 notify method +TEST_P(CameraHidlTest, providerDeviceStateNotification) { + + notifyDeviceState(provider::V2_5::DeviceState::BACK_COVERED); + notifyDeviceState(provider::V2_5::DeviceState::NORMAL); +} + +// Verify that all supported stream formats and sizes can be configured +// successfully for injection camera. +TEST_P(CameraHidlTest, configureInjectionStreamsAvailableOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputStreams; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } else if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_7) { + continue; + } + + camera_metadata_t* staticMetaBuffer; + Return ret; + Status s; + sp session; + sp injectionSession3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + castInjectionSession(session, &injectionSession3_7); + if (injectionSession3_7 == nullptr) { + ALOGW("%s: The provider %s doesn't support ICameraInjectionSession", __func__, + mProviderType.c_str()); + continue; + } + + ::android::hardware::camera::device::V3_2::CameraMetadata hidlChars = {}; + hidlChars.setToExternal( + reinterpret_cast(const_cast(staticMetaBuffer)), + get_camera_metadata_size(staticMetaBuffer)); + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + for (auto& it : outputStreams) { + V3_2::Stream stream3_2; + V3_2::DataspaceFlags dataspaceFlag = getDataspace(static_cast(it.format)); + stream3_2 = {streamId, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(it.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + dataspaceFlag, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams3_2 = {stream3_2}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams3_2, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + + config3_7.streamConfigCounter = streamConfigCounter++; + s = injectionSession3_7->configureInjectionStreams(config3_7, hidlChars); + ASSERT_EQ(Status::OK, s); + streamId++; + } + + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check for correct handling of invalid/incorrect configuration parameters for injection camera. +TEST_P(CameraHidlTest, configureInjectionStreamsInvalidOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputStreams; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } else if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_7) { + continue; + } + + camera_metadata_t* staticMetaBuffer; + Return ret; + Status s; + sp session; + sp injectionSession3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + castInjectionSession(session, &injectionSession3_7); + if (injectionSession3_7 == nullptr) { + ALOGW("%s: The provider %s doesn't support ICameraInjectionSession", __func__, + mProviderType.c_str()); + continue; + } + + ::android::hardware::camera::device::V3_2::CameraMetadata hidlChars = {}; + hidlChars.setToExternal( + reinterpret_cast(const_cast(staticMetaBuffer)), + get_camera_metadata_size(staticMetaBuffer)); + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + V3_2::Stream stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(0), + static_cast(0), + static_cast(outputStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + uint32_t streamConfigCounter = 0; + ::android::hardware::hidl_vec streams = {stream3_2}; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + + config3_7.streamConfigCounter = streamConfigCounter++; + s = injectionSession3_7->configureInjectionStreams(config3_7, hidlChars); + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || (Status::INTERNAL_ERROR == s)); + + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(UINT32_MAX), + static_cast(UINT32_MAX), + static_cast(outputStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + config3_7.streamConfigCounter = streamConfigCounter++; + s = injectionSession3_7->configureInjectionStreams(config3_7, hidlChars); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + + for (auto& it : outputStreams) { + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(UINT32_MAX), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + config3_7.streamConfigCounter = streamConfigCounter++; + s = injectionSession3_7->configureInjectionStreams(config3_7, hidlChars); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(it.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + static_cast(UINT32_MAX)}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + config3_7.streamConfigCounter = streamConfigCounter++; + s = injectionSession3_7->configureInjectionStreams(config3_7, hidlChars); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + } + + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether session parameters are supported for injection camera. If Hal support for them +// exist, then try to configure a preview stream using them. +TEST_P(CameraHidlTest, configureInjectionStreamsWithSessionParameters) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } else if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_7) { + continue; + } + + camera_metadata_t* staticMetaBuffer; + Return ret; + Status s; + sp session; + sp injectionSession3_7; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + castInjectionSession(session, &injectionSession3_7); + if (injectionSession3_7 == nullptr) { + ALOGW("%s: The provider %s doesn't support ICameraInjectionSession", __func__, + mProviderType.c_str()); + continue; + } + + ::android::hardware::camera::device::V3_2::CameraMetadata hidlChars = {}; + hidlChars.setToExternal( + reinterpret_cast(const_cast(staticMetaBuffer)), + get_camera_metadata_size(staticMetaBuffer)); + + std::unordered_set availableSessionKeys; + auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, + &availableSessionKeys); + ASSERT_TRUE(Status::OK == rc); + if (availableSessionKeys.empty()) { + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams, + modifiedSessionParams; + constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW, + &previewRequestSettings, &sessionParams); + if (sessionParams.isEmpty()) { + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputPreviewStreams.clear(); + + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams, + &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + V3_4::Stream previewStream; + previewStream.v3_2 = {0, + StreamType::OUTPUT, + static_cast(outputPreviewStreams[0].width), + static_cast(outputPreviewStreams[0].height), + static_cast(outputPreviewStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + previewStream.bufferSize = 0; + ::android::hardware::hidl_vec streams = {previewStream}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + config.streams = streams; + config.operationMode = StreamConfigurationMode::NORMAL_MODE; + modifiedSessionParams = sessionParams; + auto sessionParamsBuffer = sessionParams.release(); + config.sessionParams.setToExternal(reinterpret_cast(sessionParamsBuffer), + get_camera_metadata_size(sessionParamsBuffer)); + config3_5.v3_4 = config; + config3_5.streamConfigCounter = 0; + config3_7.streams = {{previewStream, -1, {ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}}; + config3_7.operationMode = config.operationMode; + config3_7.sessionParams.setToExternal(reinterpret_cast(sessionParamsBuffer), + get_camera_metadata_size(sessionParamsBuffer)); + config3_7.streamConfigCounter = 0; + config3_7.multiResolutionInputImage = false; + + s = injectionSession3_7->configureInjectionStreams(config3_7, hidlChars); + sessionParams.acquire(sessionParamsBuffer); + ASSERT_EQ(Status::OK, s); + + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Retrieve all valid output stream resolutions from the camera +// static characteristics. +Status CameraHidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta, + std::vector& outputStreams, + const AvailableStream* threshold, + bool maxResolution) { + AvailableStream depthPreviewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::Y16)}; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + int scalerTag = maxResolution + ? ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION + : ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS; + int depthTag = maxResolution + ? ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION + : ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS; + + camera_metadata_ro_entry scalarEntry; + camera_metadata_ro_entry depthEntry; + int foundScalar = find_camera_metadata_ro_entry(staticMeta, scalerTag, &scalarEntry); + int foundDepth = find_camera_metadata_ro_entry(staticMeta, depthTag, &depthEntry); + if ((0 != foundScalar || (0 != (scalarEntry.count % 4))) && + (0 != foundDepth || (0 != (depthEntry.count % 4)))) { + return Status::ILLEGAL_ARGUMENT; + } + + if(foundScalar == 0 && (0 == (scalarEntry.count % 4))) { + fillOutputStreams(&scalarEntry, outputStreams, threshold, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + + if(foundDepth == 0 && (0 == (depthEntry.count % 4))) { + fillOutputStreams(&depthEntry, outputStreams, &depthPreviewThreshold, + ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT); + } + + return Status::OK; +} + +static Size getMinSize(Size a, Size b) { + if (a.width * a.height < b.width * b.height) { + return a; + } + return b; +} + +// TODO: Add more combinations +Status CameraHidlTest::getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta, + std::vector* outputStreams) { + if (nullptr == staticMeta || nullptr == outputStreams) { + return Status::ILLEGAL_ARGUMENT; + } + + if (isDepthOnly(staticMeta)) { + Size y16MaxSize(640, 480); + Size maxAvailableY16Size; + getMaxOutputSizeForFormat(staticMeta, PixelFormat::Y16, &maxAvailableY16Size); + Size y16ChosenSize = getMinSize(y16MaxSize, maxAvailableY16Size); + AvailableStream y16Stream = {.width = y16ChosenSize.width, + .height = y16ChosenSize.height, + .format = static_cast(PixelFormat::Y16)}; + outputStreams->push_back(y16Stream); + return Status::OK; + } + + Size yuvMaxSize(1280, 720); + Size jpegMaxSize(1920, 1440); + Size maxAvailableYuvSize; + Size maxAvailableJpegSize; + getMaxOutputSizeForFormat(staticMeta, PixelFormat::YCBCR_420_888, &maxAvailableYuvSize); + getMaxOutputSizeForFormat(staticMeta, PixelFormat::BLOB, &maxAvailableJpegSize); + Size yuvChosenSize = getMinSize(yuvMaxSize, maxAvailableYuvSize); + Size jpegChosenSize = getMinSize(jpegMaxSize, maxAvailableJpegSize); + + AvailableStream yuvStream = {.width = yuvChosenSize.width, + .height = yuvChosenSize.height, + .format = static_cast(PixelFormat::YCBCR_420_888)}; + + AvailableStream jpegStream = {.width = jpegChosenSize.width, + .height = jpegChosenSize.height, + .format = static_cast(PixelFormat::BLOB)}; + outputStreams->push_back(yuvStream); + outputStreams->push_back(jpegStream); + + return Status::OK; +} + +Status CameraHidlTest::getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta, + PixelFormat format, Size* size, + bool maxResolution) { + std::vector outputStreams; + if (size == nullptr || + getAvailableOutputStreams(staticMeta, outputStreams, + /*threshold*/ nullptr, maxResolution) != Status::OK) { + return Status::ILLEGAL_ARGUMENT; + } + Size maxSize; + bool found = false; + for (auto& outputStream : outputStreams) { + if (static_cast(format) == outputStream.format && + (outputStream.width * outputStream.height > maxSize.width * maxSize.height)) { + maxSize.width = outputStream.width; + maxSize.height = outputStream.height; + found = true; + } + } + if (!found) { + ALOGE("%s :chosen format %d not found", __FUNCTION__, static_cast(format)); + return Status::ILLEGAL_ARGUMENT; + } + *size = maxSize; + return Status::OK; +} + +void CameraHidlTest::fillOutputStreams(camera_metadata_ro_entry_t* entry, + std::vector& outputStreams, const AvailableStream* threshold, + const int32_t availableConfigOutputTag) { + for (size_t i = 0; i < entry->count; i+=4) { + if (availableConfigOutputTag == entry->data.i32[i + 3]) { + if(nullptr == threshold) { + AvailableStream s = {entry->data.i32[i+1], + entry->data.i32[i+2], entry->data.i32[i]}; + outputStreams.push_back(s); + } else { + if ((threshold->format == entry->data.i32[i]) && + (threshold->width >= entry->data.i32[i+1]) && + (threshold->height >= entry->data.i32[i+2])) { + AvailableStream s = {entry->data.i32[i+1], + entry->data.i32[i+2], threshold->format}; + outputStreams.push_back(s); + } + } + } + } +} + +// Get max jpeg buffer size in android.jpeg.maxSize +Status CameraHidlTest::getJpegBufferSize(camera_metadata_t *staticMeta, uint32_t* outBufSize) { + if (nullptr == staticMeta || nullptr == outBufSize) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_JPEG_MAX_SIZE, &entry); + if ((0 != rc) || (1 != entry.count)) { + return Status::ILLEGAL_ARGUMENT; + } + + *outBufSize = static_cast(entry.data.i32[0]); + return Status::OK; +} + +// Check if the camera device has logical multi-camera capability. +Status CameraHidlTest::isLogicalMultiCamera(const camera_metadata_t *staticMeta) { + Status ret = Status::METHOD_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +// Check if the camera device has logical multi-camera capability. +Status CameraHidlTest::isOfflineSessionSupported(const camera_metadata_t *staticMeta) { + Status ret = Status::METHOD_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING == entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +// Generate a list of physical camera ids backing a logical multi-camera. +Status CameraHidlTest::getPhysicalCameraIds(const camera_metadata_t *staticMeta, + std::unordered_set *physicalIds) { + if ((nullptr == staticMeta) || (nullptr == physicalIds)) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + const uint8_t* ids = entry.data.u8; + size_t start = 0; + for (size_t i = 0; i < entry.count; i++) { + if (ids[i] == '\0') { + if (start != i) { + std::string currentId(reinterpret_cast (ids + start)); + physicalIds->emplace(currentId); + } + start = i + 1; + } + } + + return Status::OK; +} + +// Generate a set of suported camera key ids. +Status CameraHidlTest::getSupportedKeys(camera_metadata_t *staticMeta, + uint32_t tagId, std::unordered_set *requestIDs) { + if ((nullptr == staticMeta) || (nullptr == requestIDs)) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry); + if ((0 != rc) || (entry.count == 0)) { + return Status::OK; + } + + requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count); + + return Status::OK; +} + +void CameraHidlTest::constructFilteredSettings(const sp& session, + const std::unordered_set& availableKeys, RequestTemplate reqTemplate, + android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings, + android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) { + ASSERT_NE(defaultSettings, nullptr); + ASSERT_NE(filteredSettings, nullptr); + + auto ret = session->constructDefaultRequestSettings(reqTemplate, + [&defaultSettings] (auto status, const auto& req) mutable { + ASSERT_EQ(Status::OK, status); + + const camera_metadata_t *metadata = reinterpret_cast ( + req.data()); + size_t expectedSize = req.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + + size_t entryCount = get_camera_metadata_entry_count(metadata); + ASSERT_GT(entryCount, 0u); + *defaultSettings = metadata; + }); + ASSERT_TRUE(ret.isOk()); + const android::hardware::camera::common::V1_0::helper::CameraMetadata &constSettings = + *defaultSettings; + for (const auto& keyIt : availableKeys) { + camera_metadata_ro_entry entry = constSettings.find(keyIt); + if (entry.count > 0) { + filteredSettings->update(entry); + } + } +} + +// Check if constrained mode is supported by using the static +// camera characteristics. +Status CameraHidlTest::isConstrainedModeAvailable(camera_metadata_t *staticMeta) { + Status ret = Status::METHOD_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO == + entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +// Pick the largest supported HFR mode from the static camera +// characteristics. +Status CameraHidlTest::pickConstrainedModeSize(camera_metadata_t *staticMeta, + AvailableStream &hfrStream) { + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &entry); + if (0 != rc) { + return Status::METHOD_NOT_SUPPORTED; + } else if (0 != (entry.count % 5)) { + return Status::ILLEGAL_ARGUMENT; + } + + hfrStream = {0, 0, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + for (size_t i = 0; i < entry.count; i+=5) { + int32_t w = entry.data.i32[i]; + int32_t h = entry.data.i32[i+1]; + if ((hfrStream.width * hfrStream.height) < (w *h)) { + hfrStream.width = w; + hfrStream.height = h; + } + } + + return Status::OK; +} + +// Check whether ZSL is available using the static camera +// characteristics. +Status CameraHidlTest::isZSLModeAvailable(const camera_metadata_t *staticMeta) { + if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) { + return Status::OK; + } else { + return isZSLModeAvailable(staticMeta, YUV_REPROCESS); + } +} + +Status CameraHidlTest::isZSLModeAvailable(const camera_metadata_t *staticMeta, + ReprocessType reprocType) { + + Status ret = Status::METHOD_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if ((reprocType == PRIV_REPROCESS && + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING == entry.data.u8[i]) || + (reprocType == YUV_REPROCESS && + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING == entry.data.u8[i])) { + ret = Status::OK; + break; + } + } + + return ret; +} + +Status CameraHidlTest::getSystemCameraKind(const camera_metadata_t* staticMeta, + SystemCameraKind* systemCameraKind) { + Status ret = Status::OK; + if (nullptr == staticMeta || nullptr == systemCameraKind) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + if (entry.count == 1 && + entry.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA) { + *systemCameraKind = SystemCameraKind::HIDDEN_SECURE_CAMERA; + return ret; + } + + // Go through the capabilities and check if it has + // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA + for (size_t i = 0; i < entry.count; ++i) { + uint8_t capability = entry.data.u8[i]; + if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA) { + *systemCameraKind = SystemCameraKind::SYSTEM_ONLY_CAMERA; + return ret; + } + } + *systemCameraKind = SystemCameraKind::PUBLIC; + return ret; +} + +void CameraHidlTest::getMultiResolutionStreamConfigurations( + camera_metadata_ro_entry* multiResStreamConfigs, camera_metadata_ro_entry* streamConfigs, + camera_metadata_ro_entry* maxResolutionStreamConfigs, + const camera_metadata_t* staticMetadata) { + ASSERT_NE(multiResStreamConfigs, nullptr); + ASSERT_NE(streamConfigs, nullptr); + ASSERT_NE(maxResolutionStreamConfigs, nullptr); + ASSERT_NE(staticMetadata, nullptr); + + int retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, streamConfigs); + ASSERT_TRUE(0 == retcode); + retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION, + maxResolutionStreamConfigs); + ASSERT_TRUE(-ENOENT == retcode || 0 == retcode); + retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS, + multiResStreamConfigs); + ASSERT_TRUE(-ENOENT == retcode || 0 == retcode); +} + +void CameraHidlTest::getPrivacyTestPatternModes( + const camera_metadata_t* staticMetadata, + std::unordered_set* privacyTestPatternModes/*out*/) { + ASSERT_NE(staticMetadata, nullptr); + ASSERT_NE(privacyTestPatternModes, nullptr); + + camera_metadata_ro_entry entry; + int retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &entry); + ASSERT_TRUE(0 == retcode); + + for (auto i = 0; i < entry.count; i++) { + if (entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR || + entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) { + privacyTestPatternModes->insert(entry.data.i32[i]); + } + } +} + +// Select an appropriate dataspace given a specific pixel format. +V3_2::DataspaceFlags CameraHidlTest::getDataspace(PixelFormat format) { + switch (format) { + case PixelFormat::BLOB: + return static_cast(Dataspace::V0_JFIF); + case PixelFormat::Y16: + return static_cast(Dataspace::DEPTH); + case PixelFormat::RAW16: + case PixelFormat::RAW_OPAQUE: + case PixelFormat::RAW10: + case PixelFormat::RAW12: + return static_cast(Dataspace::ARBITRARY); + default: + return static_cast(Dataspace::UNKNOWN); + } +} + +// Check whether this is a monochrome camera using the static camera characteristics. +Status CameraHidlTest::isMonochromeCamera(const camera_metadata_t *staticMeta) { + Status ret = Status::METHOD_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME == entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +// Retrieve the reprocess input-output format map from the static +// camera characteristics. +Status CameraHidlTest::getZSLInputOutputMap(camera_metadata_t *staticMeta, + std::vector &inputOutputMap) { + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry); + if ((0 != rc) || (0 >= entry.count)) { + return Status::ILLEGAL_ARGUMENT; + } + + const int32_t* contents = &entry.data.i32[0]; + for (size_t i = 0; i < entry.count; ) { + int32_t inputFormat = contents[i++]; + int32_t length = contents[i++]; + for (int32_t j = 0; j < length; j++) { + int32_t outputFormat = contents[i+j]; + AvailableZSLInputOutput zslEntry = {inputFormat, outputFormat}; + inputOutputMap.push_back(zslEntry); + } + i += length; + } + + return Status::OK; +} + +// Search for the largest stream size for a given format. +Status CameraHidlTest::findLargestSize( + const std::vector &streamSizes, int32_t format, + AvailableStream &result) { + result = {0, 0, 0}; + for (auto &iter : streamSizes) { + if (format == iter.format) { + if ((result.width * result.height) < (iter.width * iter.height)) { + result = iter; + } + } + } + + return (result.format == format) ? Status::OK : Status::ILLEGAL_ARGUMENT; +} + +// Check whether the camera device supports specific focus mode. +Status CameraHidlTest::isAutoFocusModeAvailable( + CameraParameters &cameraParams, + const char *mode) { + ::android::String8 focusModes(cameraParams.get( + CameraParameters::KEY_SUPPORTED_FOCUS_MODES)); + if (focusModes.contains(mode)) { + return Status::OK; + } + + return Status::METHOD_NOT_SUPPORTED; +} + +void CameraHidlTest::createStreamConfiguration( + const ::android::hardware::hidl_vec& streams3_2, + StreamConfigurationMode configMode, + ::android::hardware::camera::device::V3_2::StreamConfiguration* config3_2 /*out*/, + ::android::hardware::camera::device::V3_4::StreamConfiguration* config3_4 /*out*/, + ::android::hardware::camera::device::V3_5::StreamConfiguration* config3_5 /*out*/, + ::android::hardware::camera::device::V3_7::StreamConfiguration* config3_7 /*out*/, + uint32_t jpegBufferSize) { + ASSERT_NE(nullptr, config3_2); + ASSERT_NE(nullptr, config3_4); + ASSERT_NE(nullptr, config3_5); + ASSERT_NE(nullptr, config3_7); + + ::android::hardware::hidl_vec streams3_4(streams3_2.size()); + ::android::hardware::hidl_vec streams3_7(streams3_2.size()); + size_t idx = 0; + for (auto& stream3_2 : streams3_2) { + V3_4::Stream stream; + stream.v3_2 = stream3_2; + stream.bufferSize = 0; + if (stream3_2.format == PixelFormat::BLOB && + stream3_2.dataSpace == static_cast(Dataspace::V0_JFIF)) { + stream.bufferSize = jpegBufferSize; + } + streams3_4[idx] = stream; + streams3_7[idx] = {stream, /*groupId*/ -1, {ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + idx++; + } + // Caller is responsible to fill in non-zero config3_5->streamConfigCounter after this returns + *config3_7 = {streams3_7, configMode, {}, 0, false}; + *config3_5 = {{streams3_4, configMode, {}}, 0}; + *config3_4 = config3_5->v3_4; + *config3_2 = {streams3_2, configMode}; +} + +// Configure streams +void CameraHidlTest::configureStreams3_7( + const std::string& name, int32_t deviceVersion, sp provider, + PixelFormat format, sp* session3_7 /*out*/, + V3_2::Stream* previewStream /*out*/, + device::V3_6::HalStreamConfiguration* halStreamConfig /*out*/, + bool* supportsPartialResults /*out*/, uint32_t* partialResultCount /*out*/, + bool* useHalBufManager /*out*/, sp* outCb /*out*/, uint32_t streamConfigCounter, + bool maxResolution) { + ASSERT_NE(nullptr, session3_7); + ASSERT_NE(nullptr, halStreamConfig); + ASSERT_NE(nullptr, previewStream); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, useHalBufManager); + ASSERT_NE(nullptr, outCb); + + std::vector outputStreams; + ::android::sp device3_x; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + Return ret; + ret = provider->getCameraDeviceInterface_V3_x(name, [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_t* staticMeta; + ret = device3_x->getCameraCharacteristics([&](Status s, CameraMetadata metadata) { + ASSERT_EQ(Status::OK, s); + staticMeta = + clone_camera_metadata(reinterpret_cast(metadata.data())); + ASSERT_NE(nullptr, staticMeta); + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_ro_entry entry; + auto status = + find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + sp cb = new DeviceCb(this, deviceVersion, staticMeta); + sp session; + ret = device3_x->open(cb, [&session](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + session = newSession; + }); + ASSERT_TRUE(ret.isOk()); + *outCb = cb; + + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, &session3_6, + session3_7); + ASSERT_NE(nullptr, (*session3_7).get()); + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry( + staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + outputStreams.clear(); + Size maxSize; + auto rc = getMaxOutputSizeForFormat(staticMeta, format, &maxSize, maxResolution); + ASSERT_EQ(Status::OK, rc); + free_camera_metadata(staticMeta); + + ::android::hardware::hidl_vec streams3_7(1); + streams3_7[0].groupId = -1; + streams3_7[0].sensorPixelModesUsed = { + CameraMetadataEnumAndroidSensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}; + streams3_7[0].v3_4.bufferSize = 0; + streams3_7[0].v3_4.v3_2.id = 0; + streams3_7[0].v3_4.v3_2.streamType = StreamType::OUTPUT; + streams3_7[0].v3_4.v3_2.width = static_cast(maxSize.width); + streams3_7[0].v3_4.v3_2.height = static_cast(maxSize.height); + streams3_7[0].v3_4.v3_2.format = static_cast(format); + streams3_7[0].v3_4.v3_2.usage = GRALLOC1_CONSUMER_USAGE_CPU_READ; + streams3_7[0].v3_4.v3_2.dataSpace = 0; + streams3_7[0].v3_4.v3_2.rotation = StreamRotation::ROTATION_0; + + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + config3_7.streams = streams3_7; + config3_7.operationMode = StreamConfigurationMode::NORMAL_MODE; + config3_7.streamConfigCounter = streamConfigCounter; + config3_7.multiResolutionInputImage = false; + RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE; + ret = (*session3_7) + ->constructDefaultRequestSettings(reqTemplate, + [&config3_7](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + config3_7.sessionParams = req; + }); + ASSERT_TRUE(ret.isOk()); + + ASSERT_TRUE(deviceVersion >= CAMERA_DEVICE_API_VERSION_3_7); + sp cameraDevice3_5 = nullptr; + sp cameraDevice3_7 = nullptr; + castDevice(device3_x, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + ASSERT_NE(cameraDevice3_7, nullptr); + bool supported = false; + ret = cameraDevice3_7->isStreamCombinationSupported_3_7( + config3_7, [&supported](Status s, bool combStatus) { + ASSERT_TRUE((Status::OK == s) || (Status::METHOD_NOT_SUPPORTED == s)); + if (Status::OK == s) { + supported = combStatus; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(supported, true); + + if (*session3_7 != nullptr) { + ret = (*session3_7) + ->configureStreams_3_7( + config3_7, + [&](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + *halStreamConfig = halConfig; + if (*useHalBufManager) { + hidl_vec streams(1); + hidl_vec halStreams(1); + streams[0] = streams3_7[0].v3_4; + halStreams[0] = halConfig.streams[0].v3_4.v3_3.v3_2; + cb->setCurrentStreamConfig(streams, halStreams); + } + }); + } + *previewStream = streams3_7[0].v3_4.v3_2; + ASSERT_TRUE(ret.isOk()); +} + +// Configure multiple preview streams using different physical ids. +void CameraHidlTest::configurePreviewStreams3_4(const std::string &name, int32_t deviceVersion, + sp provider, + const AvailableStream *previewThreshold, + const std::unordered_set& physicalIds, + sp *session3_4 /*out*/, + sp *session3_5 /*out*/, + V3_2::Stream *previewStream /*out*/, + device::V3_4::HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/, + bool *useHalBufManager /*out*/, + sp *outCb /*out*/, + uint32_t streamConfigCounter, + bool allowUnsupport) { + ASSERT_NE(nullptr, session3_4); + ASSERT_NE(nullptr, session3_5); + ASSERT_NE(nullptr, halStreamConfig); + ASSERT_NE(nullptr, previewStream); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, useHalBufManager); + ASSERT_NE(nullptr, outCb); + ASSERT_FALSE(physicalIds.empty()); + + std::vector outputPreviewStreams; + ::android::sp device3_x; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + Return ret; + ret = provider->getCameraDeviceInterface_V3_x( + name, + [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", + (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_t *staticMeta; + ret = device3_x->getCameraCharacteristics([&] (Status s, + CameraMetadata metadata) { + ASSERT_EQ(Status::OK, s); + staticMeta = clone_camera_metadata( + reinterpret_cast(metadata.data())); + ASSERT_NE(nullptr, staticMeta); + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_ro_entry entry; + auto status = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + sp cb = new DeviceCb(this, deviceVersion, staticMeta); + sp session; + ret = device3_x->open( + cb, + [&session](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + session = newSession; + }); + ASSERT_TRUE(ret.isOk()); + *outCb = cb; + + sp session3_3; + sp session3_6; + sp session3_7; + castSession(session, deviceVersion, &session3_3, session3_4, session3_5, + &session3_6, &session3_7); + ASSERT_NE(nullptr, (*session3_4).get()); + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry(staticMeta, + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + outputPreviewStreams.clear(); + auto rc = getAvailableOutputStreams(staticMeta, + outputPreviewStreams, previewThreshold); + free_camera_metadata(staticMeta); + ASSERT_EQ(Status::OK, rc); + ASSERT_FALSE(outputPreviewStreams.empty()); + + ::android::hardware::hidl_vec streams3_4(physicalIds.size()); + int32_t streamId = 0; + for (auto const& physicalId : physicalIds) { + V3_4::Stream stream3_4 = {{streamId, StreamType::OUTPUT, + static_cast (outputPreviewStreams[0].width), + static_cast (outputPreviewStreams[0].height), + static_cast (outputPreviewStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0, StreamRotation::ROTATION_0}, + physicalId.c_str(), /*bufferSize*/ 0}; + streams3_4[streamId++] = stream3_4; + } + + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + config3_4 = {streams3_4, StreamConfigurationMode::NORMAL_MODE, {}}; + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + ret = (*session3_4)->constructDefaultRequestSettings(reqTemplate, + [&config3_4](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + config3_4.sessionParams = req; + }); + ASSERT_TRUE(ret.isOk()); + + ASSERT_TRUE(!allowUnsupport || deviceVersion >= CAMERA_DEVICE_API_VERSION_3_5); + if (allowUnsupport) { + sp cameraDevice3_5; + sp cameraDevice3_7; + castDevice(device3_x, deviceVersion, &cameraDevice3_5, &cameraDevice3_7); + + bool supported = false; + ret = cameraDevice3_5->isStreamCombinationSupported(config3_4, + [&supported](Status s, bool combStatus) { + ASSERT_TRUE((Status::OK == s) || + (Status::METHOD_NOT_SUPPORTED == s)); + if (Status::OK == s) { + supported = combStatus; + } + }); + ASSERT_TRUE(ret.isOk()); + // If stream combination is not supported, return null session. + if (!supported) { + *session3_5 = nullptr; + return; + } + } + + if (*session3_5 != nullptr) { + config3_5.v3_4 = config3_4; + config3_5.streamConfigCounter = streamConfigCounter; + ret = (*session3_5)->configureStreams_3_5(config3_5, + [&] (Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(physicalIds.size(), halConfig.streams.size()); + *halStreamConfig = halConfig; + if (*useHalBufManager) { + hidl_vec streams(physicalIds.size()); + hidl_vec halStreams(physicalIds.size()); + for (size_t i = 0; i < physicalIds.size(); i++) { + streams[i] = streams3_4[i]; + halStreams[i] = halConfig.streams[i].v3_3.v3_2; + } + cb->setCurrentStreamConfig(streams, halStreams); + } + }); + } else { + ret = (*session3_4)->configureStreams_3_4(config3_4, + [&] (Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(physicalIds.size(), halConfig.streams.size()); + *halStreamConfig = halConfig; + }); + } + *previewStream = streams3_4[0].v3_2; + ASSERT_TRUE(ret.isOk()); +} + +// Configure preview stream with possible offline session support +void CameraHidlTest::configureOfflineStillStream(const std::string &name, + int32_t deviceVersion, + sp provider, + const AvailableStream *threshold, + sp *session/*out*/, + V3_2::Stream *stream /*out*/, + device::V3_6::HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/, + sp *outCb /*out*/, + uint32_t *jpegBufferSize /*out*/, + bool *useHalBufManager /*out*/) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, halStreamConfig); + ASSERT_NE(nullptr, stream); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, outCb); + ASSERT_NE(nullptr, jpegBufferSize); + ASSERT_NE(nullptr, useHalBufManager); + + std::vector outputStreams; + ::android::sp cameraDevice; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + Return ret; + ret = provider->getCameraDeviceInterface_V3_x( + name, + [&cameraDevice](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", + (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + auto castResult = device::V3_6::ICameraDevice::castFrom(device); + ASSERT_TRUE(castResult.isOk()); + cameraDevice = castResult; + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_t *staticMeta; + ret = cameraDevice->getCameraCharacteristics([&] (Status s, + CameraMetadata metadata) { + ASSERT_EQ(Status::OK, s); + staticMeta = clone_camera_metadata( + reinterpret_cast(metadata.data())); + ASSERT_NE(nullptr, staticMeta); + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_ro_entry entry; + auto status = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry(staticMeta, + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + auto st = getJpegBufferSize(staticMeta, jpegBufferSize); + ASSERT_EQ(st, Status::OK); + + sp cb = new DeviceCb(this, deviceVersion, staticMeta); + ret = cameraDevice->open(cb, [&session](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + auto castResult = device::V3_6::ICameraDeviceSession::castFrom(newSession); + ASSERT_TRUE(castResult.isOk()); + *session = castResult; + }); + ASSERT_TRUE(ret.isOk()); + *outCb = cb; + + outputStreams.clear(); + auto rc = getAvailableOutputStreams(staticMeta, + outputStreams, threshold); + size_t idx = 0; + int currLargest = outputStreams[0].width * outputStreams[0].height; + for (size_t i = 0; i < outputStreams.size(); i++) { + int area = outputStreams[i].width * outputStreams[i].height; + if (area > currLargest) { + idx = i; + currLargest = area; + } + } + free_camera_metadata(staticMeta); + ASSERT_EQ(Status::OK, rc); + ASSERT_FALSE(outputStreams.empty()); + + V3_2::DataspaceFlags dataspaceFlag = getDataspace( + static_cast(outputStreams[idx].format)); + + ::android::hardware::hidl_vec streams3_4(/*size*/1); + V3_4::Stream stream3_4 = {{ 0 /*streamId*/, StreamType::OUTPUT, + static_cast (outputStreams[idx].width), + static_cast (outputStreams[idx].height), + static_cast (outputStreams[idx].format), + GRALLOC1_CONSUMER_USAGE_CPU_READ, dataspaceFlag, StreamRotation::ROTATION_0}, + nullptr /*physicalId*/, /*bufferSize*/ *jpegBufferSize}; + streams3_4[0] = stream3_4; + + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + config3_4 = {streams3_4, StreamConfigurationMode::NORMAL_MODE, {}}; + + config3_5.v3_4 = config3_4; + config3_5.streamConfigCounter = 0; + ret = (*session)->configureStreams_3_6(config3_5, + [&] (Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + *halStreamConfig = halConfig; + + if (*useHalBufManager) { + hidl_vec halStreams3_2(1); + halStreams3_2[0] = halConfig.streams[0].v3_4.v3_3.v3_2; + cb->setCurrentStreamConfig(streams3_4, halStreams3_2); + } + }); + *stream = streams3_4[0].v3_2; + ASSERT_TRUE(ret.isOk()); +} + +bool CameraHidlTest::isUltraHighResolution(const camera_metadata_t* staticMeta) { + camera_metadata_ro_entry scalarEntry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &scalarEntry); + if (rc == 0) { + for (uint32_t i = 0; i < scalarEntry.count; i++) { + if (scalarEntry.data.u8[i] == + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) { + return true; + } + } + } + return false; +} + +bool CameraHidlTest::isDepthOnly(const camera_metadata_t* staticMeta) { + camera_metadata_ro_entry scalarEntry; + camera_metadata_ro_entry depthEntry; + + int rc = find_camera_metadata_ro_entry( + staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &scalarEntry); + if (rc == 0) { + for (uint32_t i = 0; i < scalarEntry.count; i++) { + if (scalarEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) { + return false; + } + } + } + + for (uint32_t i = 0; i < scalarEntry.count; i++) { + if (scalarEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) { + + rc = find_camera_metadata_ro_entry( + staticMeta, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &depthEntry); + size_t i = 0; + if (rc == 0 && depthEntry.data.i32[i] == static_cast(PixelFormat::Y16)) { + // only Depth16 format is supported now + return true; + } + break; + } + } + + return false; +} + +void CameraHidlTest::updateInflightResultQueue(std::shared_ptr resultQueue) { + std::unique_lock l(mLock); + for (size_t i = 0; i < mInflightMap.size(); i++) { + auto& req = mInflightMap.editValueAt(i); + req->resultQueue = resultQueue; + } +} + +// Open a device session and configure a preview stream. +void CameraHidlTest::configurePreviewStream(const std::string &name, int32_t deviceVersion, + sp provider, + const AvailableStream *previewThreshold, + sp *session /*out*/, + V3_2::Stream *previewStream /*out*/, + HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/, + bool *useHalBufManager /*out*/, + sp *outCb /*out*/, + uint32_t streamConfigCounter) { + configureSingleStream(name, deviceVersion, provider, previewThreshold, + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW, session, + previewStream, halStreamConfig, supportsPartialResults, + partialResultCount, useHalBufManager, outCb, streamConfigCounter); +} +// Open a device session and configure a preview stream. +void CameraHidlTest::configureSingleStream( + const std::string& name, int32_t deviceVersion, sp provider, + const AvailableStream* previewThreshold, uint64_t bufferUsage, RequestTemplate reqTemplate, + sp* session /*out*/, V3_2::Stream* previewStream /*out*/, + HalStreamConfiguration* halStreamConfig /*out*/, bool* supportsPartialResults /*out*/, + uint32_t* partialResultCount /*out*/, bool* useHalBufManager /*out*/, + sp* outCb /*out*/, uint32_t streamConfigCounter) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, previewStream); + ASSERT_NE(nullptr, halStreamConfig); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, useHalBufManager); + ASSERT_NE(nullptr, outCb); + + std::vector outputPreviewStreams; + ::android::sp device3_x; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + Return ret; + ret = provider->getCameraDeviceInterface_V3_x( + name, + [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", + (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_t *staticMeta; + ret = device3_x->getCameraCharacteristics([&] (Status s, + CameraMetadata metadata) { + ASSERT_EQ(Status::OK, s); + staticMeta = clone_camera_metadata( + reinterpret_cast(metadata.data())); + ASSERT_NE(nullptr, staticMeta); + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_ro_entry entry; + auto status = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + sp cb = new DeviceCb(this, deviceVersion, staticMeta); + ret = device3_x->open( + cb, + [&](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + *session = newSession; + }); + ASSERT_TRUE(ret.isOk()); + *outCb = cb; + + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + castSession(*session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry(staticMeta, + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + outputPreviewStreams.clear(); + auto rc = getAvailableOutputStreams(staticMeta, + outputPreviewStreams, previewThreshold); + + uint32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + free_camera_metadata(staticMeta); + ASSERT_EQ(Status::OK, rc); + ASSERT_FALSE(outputPreviewStreams.empty()); + + V3_2::DataspaceFlags dataspaceFlag = 0; + switch (static_cast(outputPreviewStreams[0].format)) { + case PixelFormat::Y16: + dataspaceFlag = static_cast(Dataspace::DEPTH); + break; + default: + dataspaceFlag = static_cast(Dataspace::UNKNOWN); + } + + V3_2::Stream stream3_2 = {0, + StreamType::OUTPUT, + static_cast(outputPreviewStreams[0].width), + static_cast(outputPreviewStreams[0].height), + static_cast(outputPreviewStreams[0].format), + bufferUsage, + dataspaceFlag, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams3_2 = {stream3_2}; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_5::StreamConfiguration config3_5; + ::android::hardware::camera::device::V3_7::StreamConfiguration config3_7; + createStreamConfiguration(streams3_2, StreamConfigurationMode::NORMAL_MODE, &config3_2, + &config3_4, &config3_5, &config3_7, jpegBufferSize); + if (session3_7 != nullptr) { + ret = session3_7->constructDefaultRequestSettings( + reqTemplate, [&config3_7](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + config3_7.sessionParams = req; + }); + ASSERT_TRUE(ret.isOk()); + config3_7.streamConfigCounter = streamConfigCounter; + ret = session3_7->configureStreams_3_7( + config3_7, [&](Status s, device::V3_6::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + halStreamConfig->streams.resize(1); + halStreamConfig->streams[0] = halConfig.streams[0].v3_4.v3_3.v3_2; + if (*useHalBufManager) { + hidl_vec streams(1); + hidl_vec halStreams(1); + streams[0] = config3_4.streams[0]; + halStreams[0] = halConfig.streams[0].v3_4.v3_3.v3_2; + cb->setCurrentStreamConfig(streams, halStreams); + } + }); + } else if (session3_5 != nullptr) { + ret = session3_5->constructDefaultRequestSettings(reqTemplate, + [&config3_5](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + config3_5.v3_4.sessionParams = req; + }); + ASSERT_TRUE(ret.isOk()); + config3_5.streamConfigCounter = streamConfigCounter; + ret = session3_5->configureStreams_3_5(config3_5, + [&] (Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + halStreamConfig->streams.resize(1); + halStreamConfig->streams[0] = halConfig.streams[0].v3_3.v3_2; + if (*useHalBufManager) { + hidl_vec streams(1); + hidl_vec halStreams(1); + streams[0] = config3_4.streams[0]; + halStreams[0] = halConfig.streams[0].v3_3.v3_2; + cb->setCurrentStreamConfig(streams, halStreams); + } + }); + } else if (session3_4 != nullptr) { + ret = session3_4->constructDefaultRequestSettings(reqTemplate, + [&config3_4](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + config3_4.sessionParams = req; + }); + ASSERT_TRUE(ret.isOk()); + ret = session3_4->configureStreams_3_4(config3_4, + [&] (Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + halStreamConfig->streams.resize(halConfig.streams.size()); + for (size_t i = 0; i < halConfig.streams.size(); i++) { + halStreamConfig->streams[i] = halConfig.streams[i].v3_3.v3_2; + } + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [&] (Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + halStreamConfig->streams.resize(halConfig.streams.size()); + for (size_t i = 0; i < halConfig.streams.size(); i++) { + halStreamConfig->streams[i] = halConfig.streams[i].v3_2; + } + }); + } else { + ret = (*session)->configureStreams(config3_2, + [&] (Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + *halStreamConfig = halConfig; + }); + } + *previewStream = stream3_2; + ASSERT_TRUE(ret.isOk()); +} + +void CameraHidlTest::castDevice(const sp& device, + int32_t deviceVersion, + sp* device3_5 /*out*/, + sp* device3_7 /*out*/) { + ASSERT_NE(nullptr, device3_5); + ASSERT_NE(nullptr, device3_7); + + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: { + auto castResult = device::V3_7::ICameraDevice::castFrom(device); + ASSERT_TRUE(castResult.isOk()); + *device3_7 = castResult; + } + [[fallthrough]]; + case CAMERA_DEVICE_API_VERSION_3_5: { + auto castResult = device::V3_5::ICameraDevice::castFrom(device); + ASSERT_TRUE(castResult.isOk()); + *device3_5 = castResult; + break; + } + default: + // no-op + return; + } +} + +//Cast camera provider to corresponding version if available +void CameraHidlTest::castProvider(const sp& provider, + sp* provider2_5 /*out*/, + sp* provider2_6 /*out*/, + sp* provider2_7 /*out*/) { + ASSERT_NE(nullptr, provider2_5); + auto castResult2_5 = provider::V2_5::ICameraProvider::castFrom(provider); + if (castResult2_5.isOk()) { + *provider2_5 = castResult2_5; + } + + ASSERT_NE(nullptr, provider2_6); + auto castResult2_6 = provider::V2_6::ICameraProvider::castFrom(provider); + if (castResult2_6.isOk()) { + *provider2_6 = castResult2_6; + } + + ASSERT_NE(nullptr, provider2_7); + auto castResult2_7 = provider::V2_7::ICameraProvider::castFrom(provider); + if (castResult2_7.isOk()) { + *provider2_7 = castResult2_7; + } +} + +//Cast camera device session to corresponding version +void CameraHidlTest::castSession(const sp &session, int32_t deviceVersion, + sp *session3_3 /*out*/, + sp *session3_4 /*out*/, + sp *session3_5 /*out*/, + sp *session3_6 /*out*/, + sp *session3_7 /*out*/) { + ASSERT_NE(nullptr, session3_3); + ASSERT_NE(nullptr, session3_4); + ASSERT_NE(nullptr, session3_5); + ASSERT_NE(nullptr, session3_6); + ASSERT_NE(nullptr, session3_7); + + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_7: { + auto castResult = device::V3_7::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + *session3_7 = castResult; + } + [[fallthrough]]; + case CAMERA_DEVICE_API_VERSION_3_6: { + auto castResult = device::V3_6::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + *session3_6 = castResult; + } + [[fallthrough]]; + case CAMERA_DEVICE_API_VERSION_3_5: { + auto castResult = device::V3_5::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + *session3_5 = castResult; + } + [[fallthrough]]; + case CAMERA_DEVICE_API_VERSION_3_4: { + auto castResult = device::V3_4::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + *session3_4 = castResult; + } + [[fallthrough]]; + case CAMERA_DEVICE_API_VERSION_3_3: { + auto castResult = device::V3_3::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + *session3_3 = castResult; + break; + } + default: + //no-op + return; + } +} + +// Cast camera device session to injection session +void CameraHidlTest::castInjectionSession( + const sp& session, + sp* injectionSession3_7 /*out*/) { + ASSERT_NE(nullptr, injectionSession3_7); + + sp session3_7; + auto castResult = device::V3_7::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + session3_7 = castResult; + + auto castInjectionResult = device::V3_7::ICameraInjectionSession::castFrom(session3_7); + ASSERT_TRUE(castInjectionResult.isOk()); + *injectionSession3_7 = castInjectionResult; +} + +void CameraHidlTest::verifyStreamCombination( + sp cameraDevice3_7, + const ::android::hardware::camera::device::V3_7::StreamConfiguration& config3_7, + sp cameraDevice3_5, + const ::android::hardware::camera::device::V3_4::StreamConfiguration& config3_4, + bool expectedStatus, bool expectMethodSupported) { + if (cameraDevice3_7.get() != nullptr) { + auto ret = cameraDevice3_7->isStreamCombinationSupported_3_7( + config3_7, [expectedStatus, expectMethodSupported](Status s, bool combStatus) { + ASSERT_TRUE((Status::OK == s) || + (!expectMethodSupported && Status::METHOD_NOT_SUPPORTED == s)); + if (Status::OK == s) { + ASSERT_TRUE(combStatus == expectedStatus); + } + }); + ASSERT_TRUE(ret.isOk()); + } + + if (cameraDevice3_5.get() != nullptr) { + auto ret = cameraDevice3_5->isStreamCombinationSupported(config3_4, + [expectedStatus, expectMethodSupported] (Status s, bool combStatus) { + ASSERT_TRUE((Status::OK == s) || + (!expectMethodSupported && Status::METHOD_NOT_SUPPORTED == s)); + if (Status::OK == s) { + ASSERT_TRUE(combStatus == expectedStatus); + } + }); + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify logical or ultra high resolution camera static metadata +void CameraHidlTest::verifyLogicalOrUltraHighResCameraMetadata( + const std::string& cameraName, + const ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice>& device, + const CameraMetadata& chars, int deviceVersion, const hidl_vec& deviceNames) { + const camera_metadata_t* metadata = (camera_metadata_t*)chars.data(); + ASSERT_NE(nullptr, metadata); + SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC; + Status rc = getSystemCameraKind(metadata, &systemCameraKind); + ASSERT_EQ(rc, Status::OK); + rc = isLogicalMultiCamera(metadata); + ASSERT_TRUE(Status::OK == rc || Status::METHOD_NOT_SUPPORTED == rc); + bool isMultiCamera = (Status::OK == rc); + bool isUltraHighResCamera = isUltraHighResolution(metadata); + if (!isMultiCamera && !isUltraHighResCamera) { + return; + } + + camera_metadata_ro_entry entry; + int retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool hasZoomRatioRange = (0 == retcode && entry.count == 2); + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + bool hasHalBufferManager = + (0 == retcode && 1 == entry.count && + entry.data.i32[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, &entry); + bool multiResolutionStreamSupported = + (0 == retcode && 1 == entry.count && + entry.data.u8[0] == ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE); + if (multiResolutionStreamSupported) { + ASSERT_TRUE(hasHalBufferManager); + } + + std::string version, cameraId; + ASSERT_TRUE(::matchDeviceName(cameraName, mProviderType, &version, &cameraId)); + std::unordered_set physicalIds; + rc = getPhysicalCameraIds(metadata, &physicalIds); + ASSERT_TRUE(isUltraHighResCamera || Status::OK == rc); + for (auto physicalId : physicalIds) { + ASSERT_NE(physicalId, cameraId); + } + if (physicalIds.size() == 0) { + ASSERT_TRUE(isUltraHighResCamera && !isMultiCamera); + physicalIds.insert(cameraId); + } + + std::unordered_set physicalRequestKeyIDs; + rc = getSupportedKeys(const_cast(metadata), + ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS, &physicalRequestKeyIDs); + ASSERT_TRUE(Status::OK == rc); + bool hasTestPatternPhysicalRequestKey = physicalRequestKeyIDs.find( + ANDROID_SENSOR_TEST_PATTERN_MODE) != physicalRequestKeyIDs.end(); + std::unordered_set privacyTestPatternModes; + getPrivacyTestPatternModes(metadata, &privacyTestPatternModes); + + // Map from image format to number of multi-resolution sizes for that format + std::unordered_map multiResOutputFormatCounterMap; + std::unordered_map multiResInputFormatCounterMap; + for (auto physicalId : physicalIds) { + bool isPublicId = false; + std::string fullPublicId; + SystemCameraKind physSystemCameraKind = SystemCameraKind::PUBLIC; + for (auto& deviceName : deviceNames) { + std::string publicVersion, publicId; + ASSERT_TRUE(::matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId)); + if (physicalId == publicId) { + isPublicId = true; + fullPublicId = deviceName; + break; + } + } + + camera_metadata_ro_entry physicalMultiResStreamConfigs; + camera_metadata_ro_entry physicalStreamConfigs; + camera_metadata_ro_entry physicalMaxResolutionStreamConfigs; + bool isUltraHighRes = false; + std::unordered_set subCameraPrivacyTestPatterns; + if (isPublicId) { + ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> subDevice; + Return ret; + ret = mProvider->getCameraDeviceInterface_V3_x( + fullPublicId, [&](auto status, const auto& device) { + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + subDevice = device; + }); + ASSERT_TRUE(ret.isOk()); + + ret = subDevice->getCameraCharacteristics([&](auto status, const auto& chars) { + ASSERT_EQ(Status::OK, status); + const camera_metadata_t* staticMetadata = + reinterpret_cast(chars.data()); + rc = getSystemCameraKind(staticMetadata, &physSystemCameraKind); + ASSERT_EQ(rc, Status::OK); + // Make sure that the system camera kind of a non-hidden + // physical cameras is the same as the logical camera associated + // with it. + ASSERT_EQ(physSystemCameraKind, systemCameraKind); + retcode = find_camera_metadata_ro_entry(staticMetadata, + ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2); + ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange); + + getMultiResolutionStreamConfigurations( + &physicalMultiResStreamConfigs, &physicalStreamConfigs, + &physicalMaxResolutionStreamConfigs, staticMetadata); + isUltraHighRes = isUltraHighResolution(staticMetadata); + + getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns); + }); + ASSERT_TRUE(ret.isOk()); + } else { + ASSERT_TRUE(deviceVersion >= CAMERA_DEVICE_API_VERSION_3_5); + auto castResult = device::V3_5::ICameraDevice::castFrom(device); + ASSERT_TRUE(castResult.isOk()); + ::android::sp<::android::hardware::camera::device::V3_5::ICameraDevice> device3_5 = + castResult; + ASSERT_NE(device3_5, nullptr); + + // Check camera characteristics for hidden camera id + Return ret = device3_5->getPhysicalCameraCharacteristics( + physicalId, [&](auto status, const auto& chars) { + verifyCameraCharacteristics(status, chars); + verifyMonochromeCharacteristics(chars, deviceVersion); + + auto staticMetadata = (const camera_metadata_t*)chars.data(); + retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2); + ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange); + + getMultiResolutionStreamConfigurations( + &physicalMultiResStreamConfigs, &physicalStreamConfigs, + &physicalMaxResolutionStreamConfigs, staticMetadata); + isUltraHighRes = isUltraHighResolution(staticMetadata); + getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns); + }); + ASSERT_TRUE(ret.isOk()); + + // Check calling getCameraDeviceInterface_V3_x() on hidden camera id returns + // ILLEGAL_ARGUMENT. + std::stringstream s; + s << "device@" << version << "/" << mProviderType << "/" << physicalId; + hidl_string fullPhysicalId(s.str()); + ret = mProvider->getCameraDeviceInterface_V3_x( + fullPhysicalId, [&](auto status, const auto& device3_x) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status); + ASSERT_EQ(device3_x, nullptr); + }); + ASSERT_TRUE(ret.isOk()); + } + + if (hasTestPatternPhysicalRequestKey) { + ASSERT_TRUE(privacyTestPatternModes == subCameraPrivacyTestPatterns); + } + + if (physicalMultiResStreamConfigs.count > 0) { + ASSERT_GE(deviceVersion, CAMERA_DEVICE_API_VERSION_3_7); + ASSERT_EQ(physicalMultiResStreamConfigs.count % 4, 0); + + // Each supported size must be max size for that format, + for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4; i++) { + int32_t multiResFormat = physicalMultiResStreamConfigs.data.i32[i * 4]; + int32_t multiResWidth = physicalMultiResStreamConfigs.data.i32[i * 4 + 1]; + int32_t multiResHeight = physicalMultiResStreamConfigs.data.i32[i * 4 + 2]; + int32_t multiResInput = physicalMultiResStreamConfigs.data.i32[i * 4 + 3]; + + // Check if the resolution is the max resolution in stream + // configuration map + bool supported = false; + bool isMaxSize = true; + for (size_t j = 0; j < physicalStreamConfigs.count / 4; j++) { + int32_t format = physicalStreamConfigs.data.i32[j * 4]; + int32_t width = physicalStreamConfigs.data.i32[j * 4 + 1]; + int32_t height = physicalStreamConfigs.data.i32[j * 4 + 2]; + int32_t input = physicalStreamConfigs.data.i32[j * 4 + 3]; + if (format == multiResFormat && input == multiResInput) { + if (width == multiResWidth && height == multiResHeight) { + supported = true; + } else if (width * height > multiResWidth * multiResHeight) { + isMaxSize = false; + } + } + } + // Check if the resolution is the max resolution in max + // resolution stream configuration map + bool supportedUltraHighRes = false; + bool isUltraHighResMaxSize = true; + for (size_t j = 0; j < physicalMaxResolutionStreamConfigs.count / 4; j++) { + int32_t format = physicalMaxResolutionStreamConfigs.data.i32[j * 4]; + int32_t width = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 1]; + int32_t height = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 2]; + int32_t input = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 3]; + if (format == multiResFormat && input == multiResInput) { + if (width == multiResWidth && height == multiResHeight) { + supportedUltraHighRes = true; + } else if (width * height > multiResWidth * multiResHeight) { + isUltraHighResMaxSize = false; + } + } + } + + if (isUltraHighRes) { + // For ultra high resolution camera, the configuration must + // be the maximum size in stream configuration map, or max + // resolution stream configuration map + ASSERT_TRUE((supported && isMaxSize) || + (supportedUltraHighRes && isUltraHighResMaxSize)); + } else { + // The configuration must be the maximum size in stream + // configuration map + ASSERT_TRUE(supported && isMaxSize); + ASSERT_FALSE(supportedUltraHighRes); + } + + // Increment the counter for the configuration's format. + auto& formatCounterMap = multiResInput ? multiResInputFormatCounterMap + : multiResOutputFormatCounterMap; + if (formatCounterMap.count(multiResFormat) == 0) { + formatCounterMap[multiResFormat] = 1; + } else { + formatCounterMap[multiResFormat]++; + } + } + + // There must be no duplicates + for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4 - 1; i++) { + for (size_t j = i + 1; j < physicalMultiResStreamConfigs.count / 4; j++) { + // Input/output doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4 + 3] != + physicalMultiResStreamConfigs.data.i32[j * 4 + 3]) { + continue; + } + // Format doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4] != + physicalMultiResStreamConfigs.data.i32[j * 4]) { + continue; + } + // Width doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4 + 1] != + physicalMultiResStreamConfigs.data.i32[j * 4 + 1]) { + continue; + } + // Height doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4 + 2] != + physicalMultiResStreamConfigs.data.i32[j * 4 + 2]) { + continue; + } + // input/output, format, width, and height all match + ADD_FAILURE(); + } + } + } + } + + // If a multi-resolution stream is supported, there must be at least one + // format with more than one resolutions + if (multiResolutionStreamSupported) { + size_t numMultiResFormats = 0; + for (const auto& [format, sizeCount] : multiResOutputFormatCounterMap) { + if (sizeCount >= 2) { + numMultiResFormats++; + } + } + for (const auto& [format, sizeCount] : multiResInputFormatCounterMap) { + if (sizeCount >= 2) { + numMultiResFormats++; + + // If multi-resolution reprocessing is supported, the logical + // camera or ultra-high resolution sensor camera must support + // the corresponding reprocessing capability. + if (format == static_cast(PixelFormat::IMPLEMENTATION_DEFINED)) { + ASSERT_EQ(isZSLModeAvailable(metadata, PRIV_REPROCESS), Status::OK); + } else if (format == static_cast(PixelFormat::YCBCR_420_888)) { + ASSERT_EQ(isZSLModeAvailable(metadata, YUV_REPROCESS), Status::OK); + } + } + } + ASSERT_GT(numMultiResFormats, 0); + } + + // Make sure ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID is available in + // result keys. + if (isMultiCamera && deviceVersion >= CAMERA_DEVICE_API_VERSION_3_5) { + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry); + if ((0 == retcode) && (entry.count > 0)) { + ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count, + static_cast( + CameraMetadataTag::ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)), + entry.data.i32 + entry.count); + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + } +} + +void CameraHidlTest::verifyCameraCharacteristics(Status status, const CameraMetadata& chars) { + ASSERT_EQ(Status::OK, status); + const camera_metadata_t* metadata = (camera_metadata_t*)chars.data(); + size_t expectedSize = chars.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + size_t entryCount = get_camera_metadata_entry_count(metadata); + // TODO: we can do better than 0 here. Need to check how many required + // characteristics keys we've defined. + ASSERT_GT(entryCount, 0u); + + camera_metadata_ro_entry entry; + int retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry); + if ((0 == retcode) && (entry.count > 0)) { + uint8_t hardwareLevel = entry.data.u8[0]; + ASSERT_TRUE( + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL); + } else { + ADD_FAILURE() << "Get camera hardware level failed!"; + } + + entry.count = 0; + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION " + << " per API contract should never be set by Hal!"; + } + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS" + << " per API contract should never be set by Hal!"; + } + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS" + << " per API contract should never be set by Hal!"; + } + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS" + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, &entry); + if (0 == retcode || entry.count > 0) { + ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS " + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, &entry); + if (0 == retcode || entry.count > 0) { + ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS " + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, &entry); + if (0 == retcode || entry.count > 0) { + ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS " + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_HEIC_INFO_SUPPORTED, &entry); + if (0 == retcode && entry.count > 0) { + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, &entry); + if (0 == retcode && entry.count > 0) { + uint8_t maxJpegAppSegmentsCount = entry.data.u8[0]; + ASSERT_TRUE(maxJpegAppSegmentsCount >= 1 && + maxJpegAppSegmentsCount <= 16); + } else { + ADD_FAILURE() << "Get Heic maxJpegAppSegmentsCount failed!"; + } + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_LENS_POSE_REFERENCE, &entry); + if (0 == retcode && entry.count > 0) { + uint8_t poseReference = entry.data.u8[0]; + ASSERT_TRUE(poseReference <= ANDROID_LENS_POSE_REFERENCE_UNDEFINED && + poseReference >= ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA); + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_INFO_DEVICE_STATE_ORIENTATIONS, &entry); + if (0 == retcode && entry.count > 0) { + ASSERT_TRUE((entry.count % 2) == 0); + uint64_t maxPublicState = ((uint64_t) provider::V2_5::DeviceState::FOLDED) << 1; + uint64_t vendorStateStart = 1UL << 31; // Reserved for vendor specific states + uint64_t stateMask = (1 << vendorStateStart) - 1; + stateMask &= ~((1 << maxPublicState) - 1); + for (int i = 0; i < entry.count; i += 2){ + ASSERT_TRUE((entry.data.i64[i] & stateMask) == 0); + ASSERT_TRUE((entry.data.i64[i+1] % 90) == 0); + } + } + + verifyExtendedSceneModeCharacteristics(metadata); + verifyZoomCharacteristics(metadata); +} + +void CameraHidlTest::verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata) { + camera_metadata_ro_entry entry; + int retcode = 0; + + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_AVAILABLE_MODES, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (auto i = 0; i < entry.count; i++) { + ASSERT_TRUE(entry.data.u8[i] >= ANDROID_CONTROL_MODE_OFF && + entry.data.u8[i] <= ANDROID_CONTROL_MODE_USE_EXTENDED_SCENE_MODE); + } + } else { + ADD_FAILURE() << "Get camera controlAvailableModes failed!"; + } + + // Check key availability in capabilities, request and result. + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry); + bool hasExtendedSceneModeRequestKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasExtendedSceneModeRequestKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableRequestKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry); + bool hasExtendedSceneModeResultKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasExtendedSceneModeResultKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry); + bool hasExtendedSceneModeMaxSizesKey = false; + bool hasExtendedSceneModeZoomRatioRangesKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasExtendedSceneModeMaxSizesKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES) != + entry.data.i32 + entry.count; + hasExtendedSceneModeZoomRatioRangesKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES) != + entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!"; + } + + camera_metadata_ro_entry maxSizesEntry; + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, &maxSizesEntry); + bool hasExtendedSceneModeMaxSizes = (0 == retcode && maxSizesEntry.count > 0); + + camera_metadata_ro_entry zoomRatioRangesEntry; + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES, + &zoomRatioRangesEntry); + bool hasExtendedSceneModeZoomRatioRanges = (0 == retcode && zoomRatioRangesEntry.count > 0); + + // Extended scene mode keys must all be available, or all be unavailable. + bool noExtendedSceneMode = + !hasExtendedSceneModeRequestKey && !hasExtendedSceneModeResultKey && + !hasExtendedSceneModeMaxSizesKey && !hasExtendedSceneModeZoomRatioRangesKey && + !hasExtendedSceneModeMaxSizes && !hasExtendedSceneModeZoomRatioRanges; + if (noExtendedSceneMode) { + return; + } + bool hasExtendedSceneMode = hasExtendedSceneModeRequestKey && hasExtendedSceneModeResultKey && + hasExtendedSceneModeMaxSizesKey && + hasExtendedSceneModeZoomRatioRangesKey && + hasExtendedSceneModeMaxSizes && hasExtendedSceneModeZoomRatioRanges; + ASSERT_TRUE(hasExtendedSceneMode); + + // Must have DISABLED, and must have one of BOKEH_STILL_CAPTURE, BOKEH_CONTINUOUS, or a VENDOR + // mode. + ASSERT_TRUE((maxSizesEntry.count == 6 && zoomRatioRangesEntry.count == 2) || + (maxSizesEntry.count == 9 && zoomRatioRangesEntry.count == 4)); + bool hasDisabledMode = false; + bool hasBokehStillCaptureMode = false; + bool hasBokehContinuousMode = false; + bool hasVendorMode = false; + std::vector outputStreams; + ASSERT_EQ(Status::OK, getAvailableOutputStreams(metadata, outputStreams)); + for (int i = 0, j = 0; i < maxSizesEntry.count && j < zoomRatioRangesEntry.count; i += 3) { + int32_t mode = maxSizesEntry.data.i32[i]; + int32_t maxWidth = maxSizesEntry.data.i32[i+1]; + int32_t maxHeight = maxSizesEntry.data.i32[i+2]; + switch (mode) { + case ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED: + hasDisabledMode = true; + ASSERT_TRUE(maxWidth == 0 && maxHeight == 0); + break; + case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE: + hasBokehStillCaptureMode = true; + j += 2; + break; + case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS: + hasBokehContinuousMode = true; + j += 2; + break; + default: + if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START) { + ADD_FAILURE() << "Invalid extended scene mode advertised: " << mode; + } else { + hasVendorMode = true; + j += 2; + } + break; + } + + if (mode != ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED) { + // Make sure size is supported. + bool sizeSupported = false; + for (const auto& stream : outputStreams) { + if ((stream.format == static_cast(PixelFormat::YCBCR_420_888) || + stream.format == static_cast(PixelFormat::IMPLEMENTATION_DEFINED)) + && stream.width == maxWidth && stream.height == maxHeight) { + sizeSupported = true; + break; + } + } + ASSERT_TRUE(sizeSupported); + + // Make sure zoom range is valid + float minZoomRatio = zoomRatioRangesEntry.data.f[0]; + float maxZoomRatio = zoomRatioRangesEntry.data.f[1]; + ASSERT_GT(minZoomRatio, 0.0f); + ASSERT_LE(minZoomRatio, maxZoomRatio); + } + } + ASSERT_TRUE(hasDisabledMode); + ASSERT_TRUE(hasBokehStillCaptureMode || hasBokehContinuousMode || hasVendorMode); +} + +void CameraHidlTest::verifyZoomCharacteristics(const camera_metadata_t* metadata) { + camera_metadata_ro_entry entry; + int retcode = 0; + + // Check key availability in capabilities, request and result. + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &entry); + float maxDigitalZoom = 1.0; + if ((0 == retcode) && (entry.count == 1)) { + maxDigitalZoom = entry.data.f[0]; + } else { + ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry); + bool hasZoomRequestKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasZoomRequestKey = std::find(entry.data.i32, entry.data.i32+entry.count, + ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32+entry.count; + } else { + ADD_FAILURE() << "Get camera availableRequestKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry); + bool hasZoomResultKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasZoomResultKey = std::find(entry.data.i32, entry.data.i32+entry.count, + ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32+entry.count; + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry); + bool hasZoomCharacteristicsKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasZoomCharacteristicsKey = std::find(entry.data.i32, entry.data.i32+entry.count, + ANDROID_CONTROL_ZOOM_RATIO_RANGE) != entry.data.i32+entry.count; + } else { + ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool hasZoomRatioRange = (0 == retcode && entry.count == 2); + + // Zoom keys must all be available, or all be unavailable. + bool noZoomRatio = !hasZoomRequestKey && !hasZoomResultKey && !hasZoomCharacteristicsKey && + !hasZoomRatioRange; + if (noZoomRatio) { + return; + } + bool hasZoomRatio = hasZoomRequestKey && hasZoomResultKey && hasZoomCharacteristicsKey && + hasZoomRatioRange; + ASSERT_TRUE(hasZoomRatio); + + float minZoomRatio = entry.data.f[0]; + float maxZoomRatio = entry.data.f[1]; + constexpr float FLOATING_POINT_THRESHOLD = 0.00001f; + if (maxDigitalZoom > maxZoomRatio + FLOATING_POINT_THRESHOLD) { + ADD_FAILURE() << "Maximum digital zoom " << maxDigitalZoom + << " is larger than maximum zoom ratio " << maxZoomRatio << " + threshold " + << FLOATING_POINT_THRESHOLD << "!"; + } + if (minZoomRatio > maxZoomRatio) { + ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!"; + } + if (minZoomRatio > 1.0f) { + ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!"; + } + if (maxZoomRatio < 1.0f) { + ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!"; + } + + // Make sure CROPPING_TYPE is CENTER_ONLY + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_SCALER_CROPPING_TYPE, &entry); + if ((0 == retcode) && (entry.count == 1)) { + int8_t croppingType = entry.data.u8[0]; + ASSERT_EQ(croppingType, ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY); + } else { + ADD_FAILURE() << "Get camera scalerCroppingType failed!"; + } +} + +void CameraHidlTest::verifyMonochromeCharacteristics(const CameraMetadata& chars, + int deviceVersion) { + const camera_metadata_t* metadata = (camera_metadata_t*)chars.data(); + Status rc = isMonochromeCamera(metadata); + if (Status::METHOD_NOT_SUPPORTED == rc) { + return; + } + ASSERT_EQ(Status::OK, rc); + + camera_metadata_ro_entry entry; + // Check capabilities + int retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if ((0 == retcode) && (entry.count > 0)) { + ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING), + entry.data.u8 + entry.count); + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_5) { + ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW), + entry.data.u8 + entry.count); + } + } + + if (deviceVersion >= CAMERA_DEVICE_API_VERSION_3_5) { + // Check Cfa + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &entry); + if ((0 == retcode) && (entry.count == 1)) { + ASSERT_TRUE(entry.data.i32[0] == static_cast( + CameraMetadataEnumAndroidSensorInfoColorFilterArrangement::ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO) + || entry.data.i32[0] == static_cast( + CameraMetadataEnumAndroidSensorInfoColorFilterArrangement::ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR)); + } + + // Check availableRequestKeys + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (size_t i = 0; i < entry.count; i++) { + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS); + } + } else { + ADD_FAILURE() << "Get camera availableRequestKeys failed!"; + } + + // Check availableResultKeys + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (size_t i = 0; i < entry.count; i++) { + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_GREEN_SPLIT); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_NEUTRAL_COLOR_POINT); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS); + } + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + // Check availableCharacteristicKeys + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (size_t i = 0; i < entry.count; i++) { + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT2); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM2); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM2); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX2); + } + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + // Check blackLevelPattern + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry); + if ((0 == retcode) && (entry.count > 0)) { + ASSERT_EQ(entry.count, 4); + for (size_t i = 1; i < entry.count; i++) { + ASSERT_EQ(entry.data.i32[i], entry.data.i32[0]); + } + } + } +} + +void CameraHidlTest::verifyMonochromeCameraResult( + const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata) { + camera_metadata_ro_entry entry; + + // Check tags that are not applicable for monochrome camera + ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_GREEN_SPLIT)); + ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_NEUTRAL_COLOR_POINT)); + ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_MODE)); + ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)); + ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_GAINS)); + + // Check dynamicBlackLevel + entry = metadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL); + if (entry.count > 0) { + ASSERT_EQ(entry.count, 4); + for (size_t i = 1; i < entry.count; i++) { + ASSERT_FLOAT_EQ(entry.data.f[i], entry.data.f[0]); + } + } + + // Check noiseProfile + entry = metadata.find(ANDROID_SENSOR_NOISE_PROFILE); + if (entry.count > 0) { + ASSERT_EQ(entry.count, 2); + } + + // Check lensShadingMap + entry = metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP); + if (entry.count > 0) { + ASSERT_EQ(entry.count % 4, 0); + for (size_t i = 0; i < entry.count/4; i++) { + ASSERT_FLOAT_EQ(entry.data.f[i*4+1], entry.data.f[i*4]); + ASSERT_FLOAT_EQ(entry.data.f[i*4+2], entry.data.f[i*4]); + ASSERT_FLOAT_EQ(entry.data.f[i*4+3], entry.data.f[i*4]); + } + } + + // Check tonemapCurve + camera_metadata_ro_entry curveRed = metadata.find(ANDROID_TONEMAP_CURVE_RED); + camera_metadata_ro_entry curveGreen = metadata.find(ANDROID_TONEMAP_CURVE_GREEN); + camera_metadata_ro_entry curveBlue = metadata.find(ANDROID_TONEMAP_CURVE_BLUE); + if (curveRed.count > 0 && curveGreen.count > 0 && curveBlue.count > 0) { + ASSERT_EQ(curveRed.count, curveGreen.count); + ASSERT_EQ(curveRed.count, curveBlue.count); + for (size_t i = 0; i < curveRed.count; i++) { + ASSERT_FLOAT_EQ(curveGreen.data.f[i], curveRed.data.f[i]); + ASSERT_FLOAT_EQ(curveBlue.data.f[i], curveRed.data.f[i]); + } + } +} + +void CameraHidlTest::verifyBuffersReturned( + sp session, + int deviceVersion, int32_t streamId, + sp cb, uint32_t streamConfigCounter) { + sp session3_3; + sp session3_4; + sp session3_5; + sp session3_6; + sp session3_7; + castSession(session, deviceVersion, &session3_3, &session3_4, &session3_5, + &session3_6, &session3_7); + ASSERT_NE(nullptr, session3_5.get()); + + hidl_vec streamIds(1); + streamIds[0] = streamId; + session3_5->signalStreamFlush(streamIds, /*streamConfigCounter*/streamConfigCounter); + cb->waitForBuffersReturned(); +} + +void CameraHidlTest::verifyBuffersReturned( + sp session3_4, + hidl_vec streamIds, sp cb, uint32_t streamConfigCounter) { + auto castResult = device::V3_5::ICameraDeviceSession::castFrom(session3_4); + ASSERT_TRUE(castResult.isOk()); + sp session3_5 = castResult; + ASSERT_NE(nullptr, session3_5.get()); + + session3_5->signalStreamFlush(streamIds, /*streamConfigCounter*/streamConfigCounter); + cb->waitForBuffersReturned(); +} + +void CameraHidlTest::verifyBuffersReturned(sp session3_7, + hidl_vec streamIds, sp cb, + uint32_t streamConfigCounter) { + session3_7->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter); + cb->waitForBuffersReturned(); +} + +void CameraHidlTest::verifyLogicalCameraResult(const camera_metadata_t* staticMetadata, + const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& resultMetadata) { + std::unordered_set physicalIds; + Status rc = getPhysicalCameraIds(staticMetadata, &physicalIds); + ASSERT_TRUE(Status::OK == rc); + ASSERT_TRUE(physicalIds.size() > 1); + + camera_metadata_ro_entry entry; + // Check mainPhysicalId + entry = resultMetadata.find(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID); + if (entry.count > 0) { + std::string mainPhysicalId(reinterpret_cast(entry.data.u8)); + ASSERT_NE(physicalIds.find(mainPhysicalId), physicalIds.end()); + } else { + ADD_FAILURE() << "Get LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID failed!"; + } +} + +// Open a device session with empty callbacks and return static metadata. +void CameraHidlTest::openEmptyDeviceSession(const std::string &name, sp provider, + sp *session /*out*/, camera_metadata_t **staticMeta /*out*/, + ::android::sp *cameraDevice /*out*/) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, staticMeta); + + ::android::sp device3_x; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + Return ret; + ret = provider->getCameraDeviceInterface_V3_x( + name, + [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", + (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + if (cameraDevice != nullptr) { + *cameraDevice = device3_x; + } + + sp cb = new EmptyDeviceCb(); + ret = device3_x->open(cb, [&](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + *session = newSession; + }); + ASSERT_TRUE(ret.isOk()); + + ret = device3_x->getCameraCharacteristics([&] (Status s, + CameraMetadata metadata) { + ASSERT_EQ(Status::OK, s); + *staticMeta = clone_camera_metadata( + reinterpret_cast(metadata.data())); + ASSERT_NE(nullptr, *staticMeta); + }); + ASSERT_TRUE(ret.isOk()); +} + +void CameraHidlTest::notifyDeviceState(provider::V2_5::DeviceState newState) { + if (mProvider2_5.get() == nullptr) return; + + mProvider2_5->notifyDeviceStateChange( + static_cast>(newState)); +} + +// Open a particular camera device. +void CameraHidlTest::openCameraDevice(const std::string &name, + sp provider, + sp<::android::hardware::camera::device::V1_0::ICameraDevice> *device1 /*out*/) { + ASSERT_TRUE(nullptr != device1); + + Return ret; + ret = provider->getCameraDeviceInterface_V1_x( + name, + [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V1_x returns status:%d", + (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + *device1 = device; + }); + ASSERT_TRUE(ret.isOk()); + + sp deviceCb = new Camera1DeviceCb(this); + Return returnStatus = (*device1)->open(deviceCb); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); +} + +// Initialize and configure a preview window. +void CameraHidlTest::setupPreviewWindow( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device, + sp *bufferItemConsumer /*out*/, + sp *bufferHandler /*out*/) { + ASSERT_NE(nullptr, device.get()); + ASSERT_NE(nullptr, bufferItemConsumer); + ASSERT_NE(nullptr, bufferHandler); + + sp producer; + sp consumer; + BufferQueue::createBufferQueue(&producer, &consumer); + *bufferItemConsumer = new BufferItemConsumer(consumer, + GraphicBuffer::USAGE_HW_TEXTURE); //Use GLConsumer default usage flags + ASSERT_NE(nullptr, (*bufferItemConsumer).get()); + *bufferHandler = new BufferItemHander(*bufferItemConsumer); + ASSERT_NE(nullptr, (*bufferHandler).get()); + (*bufferItemConsumer)->setFrameAvailableListener(*bufferHandler); + sp surface = new Surface(producer); + sp previewCb = new PreviewWindowCb(surface); + + auto rc = device->setPreviewWindow(previewCb); + ASSERT_TRUE(rc.isOk()); + ASSERT_EQ(Status::OK, rc); +} + +// Stop camera preview and close camera. +void CameraHidlTest::stopPreviewAndClose( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device) { + Return ret = device->stopPreview(); + ASSERT_TRUE(ret.isOk()); + + ret = device->close(); + ASSERT_TRUE(ret.isOk()); +} + +// Enable a specific camera message type. +void CameraHidlTest::enableMsgType(unsigned int msgType, + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device) { + Return ret = device->enableMsgType(msgType); + ASSERT_TRUE(ret.isOk()); + + Return returnBoolStatus = device->msgTypeEnabled(msgType); + ASSERT_TRUE(returnBoolStatus.isOk()); + ASSERT_TRUE(returnBoolStatus); +} + +// Disable a specific camera message type. +void CameraHidlTest::disableMsgType(unsigned int msgType, + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device) { + Return ret = device->disableMsgType(msgType); + ASSERT_TRUE(ret.isOk()); + + Return returnBoolStatus = device->msgTypeEnabled(msgType); + ASSERT_TRUE(returnBoolStatus.isOk()); + ASSERT_FALSE(returnBoolStatus); +} + +// Wait until a specific frame notification arrives. +void CameraHidlTest::waitForFrameLocked(DataCallbackMsg msgFrame, + std::unique_lock &l) { + while (msgFrame != mDataMessageTypeReceived) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); + } +} + +// Start preview on a particular camera device +void CameraHidlTest::startPreview( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device) { + Return returnStatus = device->startPreview(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); +} + +// Retrieve camera parameters. +void CameraHidlTest::getParameters( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device, + CameraParameters *cameraParams /*out*/) { + ASSERT_NE(nullptr, cameraParams); + + Return ret; + ret = device->getParameters([&] (const ::android::hardware::hidl_string& params) { + ASSERT_FALSE(params.empty()); + ::android::String8 paramString(params.c_str()); + (*cameraParams).unflatten(paramString); + }); + ASSERT_TRUE(ret.isOk()); +} + +// Set camera parameters. +void CameraHidlTest::setParameters( + const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device, + const CameraParameters &cameraParams) { + Return returnStatus = device->setParameters( + cameraParams.flatten().string()); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); +} + +void CameraHidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage, + PixelFormat format, hidl_handle *buffer_handle /*out*/) { + ASSERT_NE(buffer_handle, nullptr); + + buffer_handle_t buffer; + uint32_t stride; + + android::status_t err = android::GraphicBufferAllocator::get().allocateRawHandle( + width, height, static_cast(format), 1u /*layerCount*/, usage, &buffer, &stride, + "VtsHalCameraProviderV2_4"); + ASSERT_EQ(err, android::NO_ERROR); + + buffer_handle->setTo(const_cast(buffer), true /*shouldOwn*/); +} + +void CameraHidlTest::verifyRecommendedConfigs(const CameraMetadata& chars) { + size_t CONFIG_ENTRY_SIZE = 5; + size_t CONFIG_ENTRY_TYPE_OFFSET = 3; + size_t CONFIG_ENTRY_BITFIELD_OFFSET = 4; + uint32_t maxPublicUsecase = + ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END; + uint32_t vendorUsecaseStart = + ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START; + uint32_t usecaseMask = (1 << vendorUsecaseStart) - 1; + usecaseMask &= ~((1 << maxPublicUsecase) - 1); + + const camera_metadata_t* metadata = reinterpret_cast (chars.data()); + + camera_metadata_ro_entry recommendedConfigsEntry, recommendedDepthConfigsEntry, ioMapEntry; + recommendedConfigsEntry.count = recommendedDepthConfigsEntry.count = ioMapEntry.count = 0; + int retCode = find_camera_metadata_ro_entry(metadata, + ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS, &recommendedConfigsEntry); + int depthRetCode = find_camera_metadata_ro_entry(metadata, + ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS, + &recommendedDepthConfigsEntry); + int ioRetCode = find_camera_metadata_ro_entry(metadata, + ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, &ioMapEntry); + if ((0 != retCode) && (0 != depthRetCode)) { + //In case both regular and depth recommended configurations are absent, + //I/O should be absent as well. + ASSERT_NE(ioRetCode, 0); + return; + } + + camera_metadata_ro_entry availableKeysEntry; + retCode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &availableKeysEntry); + ASSERT_TRUE((0 == retCode) && (availableKeysEntry.count > 0)); + std::vector availableKeys; + availableKeys.reserve(availableKeysEntry.count); + availableKeys.insert(availableKeys.end(), availableKeysEntry.data.i32, + availableKeysEntry.data.i32 + availableKeysEntry.count); + + if (recommendedConfigsEntry.count > 0) { + ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(), + ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS), + availableKeys.end()); + ASSERT_EQ((recommendedConfigsEntry.count % CONFIG_ENTRY_SIZE), 0); + for (size_t i = 0; i < recommendedConfigsEntry.count; i += CONFIG_ENTRY_SIZE) { + int32_t entryType = + recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET]; + uint32_t bitfield = + recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET]; + ASSERT_TRUE((entryType == + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) || + (entryType == + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT)); + ASSERT_TRUE((bitfield & usecaseMask) == 0); + } + } + + if (recommendedDepthConfigsEntry.count > 0) { + ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(), + ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS), + availableKeys.end()); + ASSERT_EQ((recommendedDepthConfigsEntry.count % CONFIG_ENTRY_SIZE), 0); + for (size_t i = 0; i < recommendedDepthConfigsEntry.count; i += CONFIG_ENTRY_SIZE) { + int32_t entryType = + recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET]; + uint32_t bitfield = + recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET]; + ASSERT_TRUE((entryType == + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) || + (entryType == + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT)); + ASSERT_TRUE((bitfield & usecaseMask) == 0); + } + + if (recommendedConfigsEntry.count == 0) { + //In case regular recommended configurations are absent but suggested depth + //configurations are present, I/O should be absent. + ASSERT_NE(ioRetCode, 0); + } + } + + if ((ioRetCode == 0) && (ioMapEntry.count > 0)) { + ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(), + ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP), + availableKeys.end()); + ASSERT_EQ(isZSLModeAvailable(metadata), Status::OK); + } +} + +void CameraHidlTest::verifySessionReconfigurationQuery( + sp session3_5, camera_metadata* oldSessionParams, + camera_metadata* newSessionParams) { + ASSERT_NE(nullptr, session3_5.get()); + ASSERT_NE(nullptr, oldSessionParams); + ASSERT_NE(nullptr, newSessionParams); + + android::hardware::hidl_vec oldParams, newParams; + oldParams.setToExternal(reinterpret_cast(oldSessionParams), + get_camera_metadata_size(oldSessionParams)); + newParams.setToExternal(reinterpret_cast(newSessionParams), + get_camera_metadata_size(newSessionParams)); + android::hardware::camera::common::V1_0::Status callStatus; + auto hidlCb = [&callStatus] (android::hardware::camera::common::V1_0::Status s, + bool /*requiredFlag*/) { + callStatus = s; + }; + auto ret = session3_5->isReconfigurationRequired(oldParams, newParams, hidlCb); + ASSERT_TRUE(ret.isOk()); + switch (callStatus) { + case android::hardware::camera::common::V1_0::Status::OK: + case android::hardware::camera::common::V1_0::Status::METHOD_NOT_SUPPORTED: + break; + case android::hardware::camera::common::V1_0::Status::INTERNAL_ERROR: + default: + ADD_FAILURE() << "Query calllback failed"; + } +} + +void CameraHidlTest::verifyRequestTemplate(const camera_metadata_t* metadata, + RequestTemplate requestTemplate) { + ASSERT_NE(nullptr, metadata); + size_t entryCount = + get_camera_metadata_entry_count(metadata); + ALOGI("template %u metadata entry count is %zu", (int32_t)requestTemplate, entryCount); + // TODO: we can do better than 0 here. Need to check how many required + // request keys we've defined for each template + ASSERT_GT(entryCount, 0u); + + // Check zoomRatio + camera_metadata_ro_entry zoomRatioEntry; + int foundZoomRatio = find_camera_metadata_ro_entry(metadata, + ANDROID_CONTROL_ZOOM_RATIO, &zoomRatioEntry); + if (foundZoomRatio == 0) { + ASSERT_EQ(zoomRatioEntry.count, 1); + ASSERT_EQ(zoomRatioEntry.data.f[0], 1.0f); + } +} + +void CameraHidlTest::overrideRotateAndCrop( + ::android::hardware::hidl_vec *settings /*in/out*/) { + if (settings == nullptr) { + return; + } + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta; + requestMeta.append(reinterpret_cast (settings->data())); + auto entry = requestMeta.find(ANDROID_SCALER_ROTATE_AND_CROP); + if ((entry.count > 0) && (entry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO)) { + uint8_t disableRotateAndCrop = ANDROID_SCALER_ROTATE_AND_CROP_NONE; + requestMeta.update(ANDROID_SCALER_ROTATE_AND_CROP, &disableRotateAndCrop, 1); + settings->releaseData(); + camera_metadata_t *metaBuffer = requestMeta.release(); + settings->setToExternal(reinterpret_cast (metaBuffer), + get_camera_metadata_size(metaBuffer), true); + } +} + +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraHidlTest); +INSTANTIATE_TEST_SUITE_P( + PerInstance, CameraHidlTest, + testing::ValuesIn(android::hardware::getAllHalInstanceNames(ICameraProvider::descriptor)), + android::hardware::PrintInstanceNameToString); diff --git a/camera/provider/2.5/ICameraProvider.hal b/camera/provider/2.5/ICameraProvider.hal new file mode 100644 index 0000000..b4cda6a --- /dev/null +++ b/camera/provider/2.5/ICameraProvider.hal @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.5; + +import android.hardware.camera.common@1.0::Status; +import android.hardware.camera.provider@2.4::ICameraProvider; + +/** + * Camera provider HAL + * + * Version 2.5 adds support for the notifyDeviceStateChange method + */ +interface ICameraProvider extends @2.4::ICameraProvider { + + /** + * notifyDeviceStateChange: + * + * Notify the HAL provider that the state of the overall device has + * changed in some way that the HAL may want to know about. + * + * For example, a physical shutter may have been uncovered or covered, + * or a camera may have been covered or uncovered by an add-on keyboard + * or other accessory. + * + * The state is a bitfield of potential states, and some physical configurations + * could plausibly correspond to multiple different combinations of state bits. + * The HAL must ignore any state bits it is not actively using to determine + * the appropriate camera configuration. + * + * For example, on some devices the FOLDED state could mean that + * backward-facing cameras are covered by the fold, so FOLDED by itself implies + * BACK_COVERED. But other devices may support folding but not cover any cameras + * when folded, so for those FOLDED would not imply any of the other flags. + * Since these relationships are very device-specific, it is difficult to specify + * a comprehensive policy. But as a recommendation, it is suggested that if a flag + * necessarily implies other flags are set as well, then those flags should be set. + * So even though FOLDED would be enough to infer BACK_COVERED on some devices, the + * BACK_COVERED flag should also be set for clarity. + * + * This method may be invoked by the HAL client at any time. It must not + * cause any active camera device sessions to be closed, but may dynamically + * change which physical camera a logical multi-camera is using for its + * active and future output. + * + * The method must be invoked by the HAL client at least once before the + * client calls ICameraDevice::open on any camera device interfaces listed + * by this provider, to establish the initial device state. + * + * @param newState + * The new state of the device. + */ + notifyDeviceStateChange(bitfield newState); + +}; diff --git a/camera/provider/2.5/default/Android.bp b/camera/provider/2.5/default/Android.bp new file mode 100644 index 0000000..4e9f0bc --- /dev/null +++ b/camera/provider/2.5/default/Android.bp @@ -0,0 +1,191 @@ + +cc_library_shared { + name: "android.vendor.hardware.camera.provider@2.5-legacy", + proprietary: true, + srcs: ["LegacyCameraProviderImpl_2_5.cpp"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.provider@2.4-legacy", + "android.hardware.camera.provider@2.5", + "android.hardware.graphics.mapper@2.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "camera.device@1.0-impl", + "camera.device@3.2-impl", + "camera.device@3.3-impl", + "camera.device@3.4-impl", + "camera.device@3.5-impl", + "libcamera_metadata", + "libcutils", + "libhardware", + "libhidlbase", + "liblog", + "libutils", + ], + static_libs: [ + "android.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "camera.device@3.4-impl_headers", + "camera.device@3.5-impl_headers", + ], + export_include_dirs: ["."], +} + +cc_library_shared { + name: "android.vendor.hardware.camera.provider@2.5-external", + proprietary: true, + srcs: ["ExternalCameraProviderImpl_2_5.cpp"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.provider@2.4-external", + "android.hardware.camera.provider@2.5", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "camera.device@3.3-impl", + "camera.device@3.4-external-impl", + "camera.device@3.4-impl", + "camera.device@3.5-external-impl", + "camera.device@3.5-impl", + "libcamera_metadata", + "libcutils", + "libhardware", + "libhidlbase", + "liblog", + "libtinyxml2", + "libutils", + ], + static_libs: [ + "android.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "camera.device@3.4-external-impl_headers", + "camera.device@3.5-external-impl_headers", + "camera.device@3.6-external-impl_headers" + ], + export_include_dirs: ["."], +} + +cc_defaults { + name: "vendor_camera_service_2_5_defaults", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["service.cpp"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.provider@2.4-legacy", + "android.hardware.camera.provider@2.5", + "android.hardware.camera.provider@2.5-legacy", + "android.hardware.graphics.mapper@2.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "libbinder", + "libcamera_metadata", + "libhardware", + "libhidlbase", + "liblog", + "libutils", + ], + static_libs: [ + "android.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "camera.device@3.4-impl_headers", + "camera.device@3.5-impl_headers" + ], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.5-service", + defaults: ["vendor_camera_service_2_5_defaults"], + compile_multilib: "32", + init_rc: ["android.hardware.camera.provider@2.5-service.rc"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.5-service_64", + defaults: ["vendor_camera_service_2_5_defaults"], + compile_multilib: "64", + init_rc: ["android.hardware.camera.provider@2.5-service_64.rc"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.5-service-lazy", + overrides: ["android.hardware.camera.provider@2.5-service"], + defaults: ["vendor_camera_service_2_5_defaults"], + compile_multilib: "32", + init_rc: ["android.hardware.camera.provider@2.5-service-lazy.rc"], + cflags: ["-DLAZY_SERVICE"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.5-service-lazy_64", + overrides: ["android.hardware.camera.provider@2.5-service_64"], + defaults: ["vendor_camera_service_2_5_defaults"], + compile_multilib: "64", + init_rc: ["android.hardware.camera.provider@2.5-service-lazy_64.rc"], + cflags: ["-DLAZY_SERVICE"], +} + +cc_binary { + name: "android.vendor.hardware.camera.provider@2.5-external-service", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["external-service.cpp"], + compile_multilib: "32", + init_rc: ["android.hardware.camera.provider@2.5-external-service.rc"], + shared_libs: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.device@3.5", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.provider@2.4-external", + "android.hardware.camera.provider@2.5", + "android.hardware.camera.provider@2.5-external", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + "libbinder", + "libcamera_metadata", + "libhidlbase", + "liblog", + "libtinyxml2", + "libutils", + ], + static_libs: [ + "android.hardware.camera.common@1.0-helper", + ], + header_libs: [ + "camera.device@3.4-external-impl_headers", + "camera.device@3.4-impl_headers", + "camera.device@3.5-external-impl_headers", + "camera.device@3.5-impl_headers", + "camera.device@3.6-external-impl_headers", + ], +} diff --git a/camera/provider/2.5/default/CameraProvider_2_5.h b/camera/provider/2.5/default/CameraProvider_2_5.h new file mode 100644 index 0000000..d0f1dda --- /dev/null +++ b/camera/provider/2.5/default/CameraProvider_2_5.h @@ -0,0 +1,94 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_CAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_CAMERAPROVIDER_H + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_5 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::provider::V2_5::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::Return; +using ::android::hardware::hidl_string; +using ::android::sp; + +// Default recommended RPC thread count for camera provider implementations +const int HWBINDER_THREAD_COUNT = 6; + +template +struct CameraProvider : public ICameraProvider { + CameraProvider() : impl() {} + ~CameraProvider() {} + + // Caller must use this method to check if CameraProvider ctor failed + bool isInitFailed() { return impl.isInitFailed(); } + + // Methods from ::android::hardware::camera::provider::V2_4::ICameraProvider follow. + Return setCallback(const sp& callback) override { + return impl.setCallback(callback); + } + + Return getVendorTags(getVendorTags_cb _hidl_cb) override { + return impl.getVendorTags(_hidl_cb); + } + + Return getCameraIdList(getCameraIdList_cb _hidl_cb) override { + return impl.getCameraIdList(_hidl_cb); + } + + Return isSetTorchModeSupported(isSetTorchModeSupported_cb _hidl_cb) override { + return impl.isSetTorchModeSupported(_hidl_cb); + } + + Return getCameraDeviceInterface_V1_x( + const hidl_string& cameraDeviceName, + getCameraDeviceInterface_V1_x_cb _hidl_cb) override { + return impl.getCameraDeviceInterface_V1_x(cameraDeviceName, _hidl_cb); + } + + Return getCameraDeviceInterface_V3_x( + const hidl_string& cameraDeviceName, + getCameraDeviceInterface_V3_x_cb _hidl_cb) override { + return impl.getCameraDeviceInterface_V3_x(cameraDeviceName, _hidl_cb); + } + + // Methods from ::android::hardware::camera::provider::V2_5::ICameraProvider follow. + Return notifyDeviceStateChange(hardware::hidl_bitfield newState) override { + return impl.notifyDeviceStateChange(newState); + } + +private: + IMPL impl; +}; + +} // namespace implementation +} // namespace V2_5 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_CAMERAPROVIDER_H diff --git a/camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.cpp b/camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.cpp new file mode 100644 index 0000000..87851cd --- /dev/null +++ b/camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.cpp @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamPrvdr@2.5-external" +//#define LOG_NDEBUG 0 +#include + +#include "ExternalCameraProviderImpl_2_5.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_5 { +namespace implementation { + +ExternalCameraProviderImpl_2_5::ExternalCameraProviderImpl_2_5() : + ExternalCameraProviderImpl_2_4() { +} + +ExternalCameraProviderImpl_2_5::~ExternalCameraProviderImpl_2_5() { +} + +Return ExternalCameraProviderImpl_2_5::notifyDeviceStateChange( + hidl_bitfield /*newState*/) { + return Void(); +} + +} // namespace implementation +} // namespace V2_5 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.h b/camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.h new file mode 100644 index 0000000..eeaa7cc --- /dev/null +++ b/camera/provider/2.5/default/ExternalCameraProviderImpl_2_5.h @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_EXTCAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_EXTCAMERAPROVIDER_H + +#include + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_5 { +namespace implementation { + +using namespace ::android::hardware::camera::provider; + +using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::VendorTagSection; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::provider::V2_5::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_bitfield; + +struct ExternalCameraProviderImpl_2_5 : + public V2_4::implementation::ExternalCameraProviderImpl_2_4 { + ExternalCameraProviderImpl_2_5(); + ~ExternalCameraProviderImpl_2_5(); + + // Methods from ::android::hardware::camera::provider::V2_5::ICameraProvider follow. + Return notifyDeviceStateChange(hidl_bitfield newState); +private: +}; + +} // namespace implementation +} // namespace V2_5 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_EXTCAMERAPROVIDER_H diff --git a/camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.cpp b/camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.cpp new file mode 100644 index 0000000..5233397 --- /dev/null +++ b/camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.cpp @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamPrvdr@2.5-legacy" +//#define LOG_NDEBUG 0 +#include +#include + +#include "LegacyCameraProviderImpl_2_5.h" +#include "CameraProvider_2_5.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_5 { +namespace implementation { + +template struct CameraProvider; + +LegacyCameraProviderImpl_2_5::LegacyCameraProviderImpl_2_5() : + LegacyCameraProviderImpl_2_4() { +} + +LegacyCameraProviderImpl_2_5::~LegacyCameraProviderImpl_2_5() {} + +Return LegacyCameraProviderImpl_2_5::notifyDeviceStateChange( + hidl_bitfield newState) { + ALOGD("%s: New device state: 0x%" PRIx64, __FUNCTION__, newState); + uint64_t state = static_cast(newState); + mModule->notifyDeviceStateChange(state); + return Void(); +} + +} // namespace implementation +} // namespace V2_5 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.h b/camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.h new file mode 100644 index 0000000..62dd97f --- /dev/null +++ b/camera/provider/2.5/default/LegacyCameraProviderImpl_2_5.h @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_LEGACYCAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_LEGACYCAMERAPROVIDER_H + +#include + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_5 { +namespace implementation { + +using namespace ::android::hardware::camera::provider; + +using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::TorchModeStatus; +using ::android::hardware::camera::common::V1_0::VendorTag; +using ::android::hardware::camera::common::V1_0::VendorTagSection; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::camera::common::V1_0::helper::VendorTagDescriptor; +using ::android::hardware::camera::provider::V2_5::DeviceState; +using ::android::hardware::hidl_bitfield; +using ::android::hardware::Return; + +struct LegacyCameraProviderImpl_2_5 : public V2_4::implementation::LegacyCameraProviderImpl_2_4 { + LegacyCameraProviderImpl_2_5(); + ~LegacyCameraProviderImpl_2_5(); + + // Methods from ::android::hardware::camera::provider::V2_5::ICameraProvider follow. + Return notifyDeviceStateChange(hidl_bitfield newState); +private: +}; + +} // namespace implementation +} // namespace V2_5 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_5_CAMERAPROVIDER_H diff --git a/camera/provider/2.5/default/OWNERS b/camera/provider/2.5/default/OWNERS new file mode 100644 index 0000000..f48a95c --- /dev/null +++ b/camera/provider/2.5/default/OWNERS @@ -0,0 +1 @@ +include platform/frameworks/av:/camera/OWNERS diff --git a/camera/provider/2.5/default/android.hardware.camera.provider@2.5-external-service.rc b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-external-service.rc new file mode 100644 index 0000000..b3b06b2 --- /dev/null +++ b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-external-service.rc @@ -0,0 +1,9 @@ +service vendor.camera-provider-2-5-ext /vendor/bin/hw/android.hardware.camera.provider@2.5-external-service + interface android.hardware.camera.provider@2.5::ICameraProvider external/0 + interface android.hardware.camera.provider@2.4::ICameraProvider external/0 + class hal + user cameraserver + group audio camera input drmrpc usb + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy.rc b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy.rc new file mode 100644 index 0000000..7c5e69b --- /dev/null +++ b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy.rc @@ -0,0 +1,11 @@ +service vendor.camera-provider-2-5 /vendor/bin/hw/android.hardware.camera.provider@2.5-service-lazy + interface android.hardware.camera.provider@2.5::ICameraProvider legacy/0 + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + oneshot + disabled + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy_64.rc b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy_64.rc new file mode 100644 index 0000000..49bca8f --- /dev/null +++ b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service-lazy_64.rc @@ -0,0 +1,11 @@ +service vendor.camera-provider-2-5 /vendor/bin/hw/android.hardware.camera.provider@2.5-service-lazy_64 + interface android.hardware.camera.provider@2.5::ICameraProvider legacy/0 + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + oneshot + disabled + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service.rc b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service.rc new file mode 100644 index 0000000..4bd1fb4 --- /dev/null +++ b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service.rc @@ -0,0 +1,9 @@ +service vendor.camera-provider-2-5 /vendor/bin/hw/android.hardware.camera.provider@2.5-service + interface android.hardware.camera.provider@2.5::ICameraProvider legacy/0 + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service_64.rc b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service_64.rc new file mode 100644 index 0000000..b444325 --- /dev/null +++ b/camera/provider/2.5/default/android.hardware.camera.provider@2.5-service_64.rc @@ -0,0 +1,9 @@ +service vendor.camera-provider-2-5 /vendor/bin/hw/android.hardware.camera.provider@2.5-service_64 + interface android.hardware.camera.provider@2.5::ICameraProvider legacy/0 + interface android.hardware.camera.provider@2.4::ICameraProvider legacy/0 + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + task_profiles CameraServiceCapacity MaxPerformance diff --git a/camera/provider/2.5/default/external-service.cpp b/camera/provider/2.5/default/external-service.cpp new file mode 100644 index 0000000..8788916 --- /dev/null +++ b/camera/provider/2.5/default/external-service.cpp @@ -0,0 +1,46 @@ +/* + * Copyright 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.provider@2.5-external-service" + +#include +#include +#include + +#include "CameraProvider_2_5.h" +#include "ExternalCameraProviderImpl_2_5.h" + +using android::status_t; +using android::hardware::camera::provider::V2_5::ICameraProvider; + +int main() +{ + using namespace android::hardware::camera::provider::V2_5::implementation; + + ALOGI("CameraProvider@2.5 external webcam service is starting."); + + ::android::hardware::configureRpcThreadpool(/*threads*/ HWBINDER_THREAD_COUNT, /*willJoin*/ true); + + ::android::sp provider = new CameraProvider(); + + status_t status = provider->registerAsService("external/0"); + LOG_ALWAYS_FATAL_IF(status != android::OK, "Error while registering provider service: %d", + status); + + ::android::hardware::joinRpcThreadpool(); + + return 0; +} diff --git a/camera/provider/2.5/default/service.cpp b/camera/provider/2.5/default/service.cpp new file mode 100644 index 0000000..ec30cbc --- /dev/null +++ b/camera/provider/2.5/default/service.cpp @@ -0,0 +1,63 @@ +/* + * Copyright 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifdef LAZY_SERVICE +#define LOG_TAG "android.hardware.camera.provider@2.5-service-lazy" +#else +#define LOG_TAG "android.hardware.camera.provider@2.5-service" +#endif + +#include +#include +#include +#include + +#include "CameraProvider_2_5.h" +#include "LegacyCameraProviderImpl_2_5.h" + +using android::status_t; +using android::hardware::camera::provider::V2_5::ICameraProvider; + +#ifdef LAZY_SERVICE +const bool kLazyService = true; +#else +const bool kLazyService = false; +#endif + +int main() +{ + using namespace android::hardware::camera::provider::V2_5::implementation; + + ALOGI("CameraProvider@2.5 legacy service is starting."); + + ::android::hardware::configureRpcThreadpool(/*threads*/ HWBINDER_THREAD_COUNT, /*willJoin*/ true); + + ::android::sp provider = new CameraProvider(); + + status_t status; + if (kLazyService) { + auto serviceRegistrar = ::android::hardware::LazyServiceRegistrar::getInstance(); + status = serviceRegistrar.registerService(provider, "legacy/0"); + } else { + status = provider->registerAsService("legacy/0"); + } + LOG_ALWAYS_FATAL_IF(status != android::OK, "Error while registering provider service: %d", + status); + + ::android::hardware::joinRpcThreadpool(); + + return 0; +} diff --git a/camera/provider/2.5/types.hal b/camera/provider/2.5/types.hal new file mode 100644 index 0000000..6a8ae83 --- /dev/null +++ b/camera/provider/2.5/types.hal @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.5; + +/** + * DeviceState: + * + * Possible physical states of the overall device, for use with + * ICameraProvider::notifyDeviceStateChange. + * + */ +enum DeviceState : uint64_t { + /** + * The device is in its normal physical configuration. This is the default if the + * device does not support multiple different states. + */ + NORMAL = 0, + + /** + * Camera device(s) facing backward are covered. + */ + BACK_COVERED = 1 << 0, + + /** + * Camera device(s) facing foward are covered. + */ + FRONT_COVERED = 1 << 1, + + /** + * The device is folded. If not set, the device is unfolded or does not + * support folding. + * + * The exact point when this status change happens during the folding + * operation is device-specific. + */ + FOLDED = 1 << 2, + +}; diff --git a/camera/provider/2.6/ICameraProvider.hal b/camera/provider/2.6/ICameraProvider.hal new file mode 100644 index 0000000..d720b26 --- /dev/null +++ b/camera/provider/2.6/ICameraProvider.hal @@ -0,0 +1,151 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.6; + +import @2.5::ICameraProvider; +import android.hardware.camera.common@1.0::Status; +import android.hardware.camera.device@3.4::StreamConfiguration; + +/** + * Camera provider HAL + * + * @2.6::adds support for the getConcurrentStreamingCameraIds() and + * isConcurrentStreamCombinationSupported() + * @2.6::ICameraProviderCallback to receive physical camera availability + * callbacks for logical multi-cameras. + */ +interface ICameraProvider extends @2.5::ICameraProvider { + /** + * getConcurrentStreamingCameraIds + * + * Get a vector of combinations of camera device ids that are able to + * configure streams concurrently. Each camera device advertised in a + * combination MUST at the very least support the following streams while + * streaming concurrently with the other camera ids in the combination. + * + * Target 1 Target 2 + * ----------------------------------------------------- + * | Type | Size | Type | Size | + * ----------------------------------------------------- + * | YUV | s1440p | | + * ----------------------------------------------------- + * | JPEG | s1440p | | + * ----------------------------------------------------- + * | PRIV | s1440p | | + * ----------------------------------------------------- + * | YUV / PRIV | s720p | YUV / PRIV | s1440p | + * ----------------------------------------------------- + * | YUV / PRIV | s720p | JPEG | s1440p | + * ----------------------------------------------------- + * + * where: + * s720p - min (max output resolution for the given format, 1280 X 720) + * s1440p - min (max output resolution for the given format, 1920 X 1440) + * + * If a device has MONOCHROME capability (device's capabilities include + * ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) and therefore supports Y8 + * outputs, stream combinations mentioned above, where YUV is substituted by + * Y8 must be also supported. + * + * Devices whose capabilities do not include + * ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE, must support + * at least a single Y16 stream, Dataspace::DEPTH with sVGA resolution, + * during concurrent operation. + * Where sVGA - min (max output resolution for the given format, 640 X 480) + * + * The camera framework must call this method whenever it gets a + * cameraDeviceStatusChange callback adding a new camera device or removing + * a camera device known to it. This is so that the camera framework can get new combinations + * of camera ids that can stream concurrently, that might have potentially appeared. + * + * For each combination (and their subsets) of camera device ids returned by + * getConcurrentStreamingCameraIds(): If only the mandatory combinations can + * be supported concurrently by each device, then the resource costs must + * sum up to > 100 for the concurrent set, to ensure arbitration between + * camera applications work as expected. Only if resources are sufficient + * to run a set of cameras at full capability (maximally + * resource-consuming framerate and stream size settings available in the + * configuration settings exposed through camera metadata), should the sum + * of resource costs for the combination be <= 100. + * + * For guaranteed concurrent camera operation, the camera framework must call + * ICameraDevice.open() on all devices (intended for concurrent operation), before configuring + * any streams on them. This gives the camera HAL process an opportunity to potentially + * distribute hardware resources better before stream configuration. + * + * Due to potential hardware constraints around internal switching of physical camera devices, + * a device's complete ZOOM_RATIO_RANGE(if supported), may not apply during concurrent + * operation. If ZOOM_RATIO is supported, camera HALs must ensure ZOOM_RATIO_RANGE of + * [1.0, ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM] is supported by that device, during + * concurrent operation. + * + * @return status Status code for the operation + * @return cameraIds a list of camera id combinations that support + * concurrent stream configurations with the minimum guarantees + * specified. + */ + getConcurrentStreamingCameraIds() generates (Status status, vec> cameraIds); + + /** + * isConcurrentStreamCombinationSupported: + * + * Check for device support of specific camera stream combinations while + * streaming concurrently with other devices. + * + * The per device streamList must contain at least one output-capable stream, and may + * not contain more than one input-capable stream. + * In contrast to regular stream configuration the framework does not create + * or initialize any actual streams. This means that Hal must not use or + * consider the stream "id" value. + * + * ------------------------------------------------------------------------ + * + * Preconditions: + * + * The framework can call this method at any time before, during and + * after active session configuration per device. This means that calls must not + * impact the performance of pending camera requests in any way. In + * particular there must not be any glitches or delays during normal + * camera streaming. + * + * The framework must not call this method with any combination of camera + * ids that is not a subset of the camera ids advertised by getConcurrentStreamingCameraIds of + * the same provider. + * + * Performance requirements: + * This call is expected to be significantly faster than stream + * configuration. In general HW and SW camera settings must not be + * changed and there must not be a user-visible impact on camera performance. + * + * @param configs a vector of camera ids and their corresponding stream + * configurations that need to be queried for support. + * + * @return status Status code for the operation, one of: + * OK: + * On successful stream combination query. + * METHOD_NOT_SUPPORTED: + * The camera provider does not support stream combination query. + * INTERNAL_ERROR: + * The stream combination query cannot complete due to internal + * error. + * @return true in case the stream combination is supported, false otherwise. + * + * + */ + isConcurrentStreamCombinationSupported(vec configs) + generates (Status status, bool queryStatus); +}; diff --git a/camera/provider/2.6/ICameraProviderCallback.hal b/camera/provider/2.6/ICameraProviderCallback.hal new file mode 100644 index 0000000..42c1092 --- /dev/null +++ b/camera/provider/2.6/ICameraProviderCallback.hal @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.6; + +import android.hardware.camera.common@1.0::types; +import android.hardware.camera.provider@2.4::ICameraProviderCallback; + +/** + * Callback functions for a camera provider HAL to use to inform the camera + * service of changes to the camera subsystem. + * + * Version 2.6 adds support for physical camera device status callback for + * multi-camera. + */ +interface ICameraProviderCallback extends @2.4::ICameraProviderCallback { + + /** + * cameraPhysicalDeviceStatusChange: + * + * Callback to the camera service to indicate that the state of a physical + * camera device of a logical multi-camera has changed. + * + * On camera service startup, when ICameraProvider::setCallback is invoked, + * the camera service must assume that all physical devices backing internal + * multi-camera devices are in the CAMERA_DEVICE_STATUS_PRESENT state. + * + * The provider must call this method to inform the camera service of any + * initially NOT_PRESENT physical devices, as soon as the callbacks are available + * through setCallback. + * + * @param cameraDeviceName The name of the logical multi-camera whose + * physical camera has a new status. + * @param physicalCameraDeviceName The name of the physical camera device + * that has a new status. + * @param newStatus The new status that device is in. + * + */ + physicalCameraDeviceStatusChange(string cameraDeviceName, + string physicalCameraDeviceName, CameraDeviceStatus newStatus); +}; diff --git a/camera/provider/2.6/types.hal b/camera/provider/2.6/types.hal new file mode 100644 index 0000000..24c62aa --- /dev/null +++ b/camera/provider/2.6/types.hal @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.6; + +import android.hardware.camera.device@3.4::StreamConfiguration; + +/** + * CameraIdAndStreamCombination: + * Pairs the cameraId and the StreamConfiguration to be + * tested with other concurrent camera id and StreamConfigurations + */ +struct CameraIdAndStreamCombination { + string cameraId; + + @3.4::StreamConfiguration streamConfiguration; +}; diff --git a/camera/provider/2.7/ICameraProvider.hal b/camera/provider/2.7/ICameraProvider.hal new file mode 100644 index 0000000..c9d52ee --- /dev/null +++ b/camera/provider/2.7/ICameraProvider.hal @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.7; + +import @2.6::ICameraProvider; +import android.hardware.camera.common@1.0::Status; + +/** + * Camera provider HAL + * + * Adds support for the isConcurrentStreamCombinationSupported() with + * ICameraDevice@3.7::StreamConfiguration. + */ +interface ICameraProvider extends @2.6::ICameraProvider { + /** + * isConcurrentStreamCombinationSupported_2_7: + * + * Identical to @2.6::isConcurrentStreamCombinationSupported except that + * this function takes a vector of @3.7::StreamConfiguration. + * + * @param configs a vector of camera ids and their corresponding stream + * configurations that need to be queried for support. + * + * @return status Status code for the operation, one of: + * OK: + * On successful stream combination query. + * METHOD_NOT_SUPPORTED: + * The camera provider does not support stream combination query. + * INTERNAL_ERROR: + * The stream combination query cannot complete due to internal + * error. + * @return true in case the stream combination is supported, false otherwise. + * + */ + isConcurrentStreamCombinationSupported_2_7(vec configs) + generates (Status status, bool queryStatus); +}; diff --git a/camera/provider/2.7/types.hal b/camera/provider/2.7/types.hal new file mode 100644 index 0000000..363e894 --- /dev/null +++ b/camera/provider/2.7/types.hal @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2021 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.provider@2.7; + +import android.hardware.camera.device@3.7::StreamConfiguration; + +/** + * CameraIdAndStreamCombination: + * + * This is identical to @2.6::CameraIdAndStreamCombination except that + * streamConfiguration is of version @3.7. + */ +struct CameraIdAndStreamCombination { + string cameraId; + @3.7::StreamConfiguration streamConfiguration; +}; diff --git a/camera/provider/README.md b/camera/provider/README.md new file mode 100644 index 0000000..0718fb1 --- /dev/null +++ b/camera/provider/README.md @@ -0,0 +1,37 @@ +## Camera Provider HAL ## +--- + +## Overview: ## + +The camera.provider HAL is used by the Android camera service to discover, +query, and open individual camera devices. + +It also allows for direct control of the flash unit of camera devices that have +one, for turning on/off torch mode. + +More complete information about the Android camera HAL and subsystem can be found at +[source.android.com](http://source.android.com/devices/camera/index.html). + +## Version history: ## + +## types.hal: ## + +### @0.0: + +Common enum and struct definitions for all camera HAL interfaces. Does not +define any interfaces of its own. + +## ICameraProvider.hal: ## + +### @2.4: + +First HIDL version of the camera provider HAL, closely matching the feature set +and operation of the pre-HIDL camera HAL module v2.4. + +## ICameraProviderCallback.hal: ## + +### @2.4: + +First HIDL version of the camara provider HAL callback interface, closely +matching the feature set and operation of the pre-HIDL camera HAL module +callbacks v2.4.