Create API for reporting color space support to Camera2 framework consumers.

Test: Ran CtsCameraTestCases.
Test: Ran VtsAidlHalCameraProvider_TargetTest.
Test: Created app to test display P3 camera, switching between color spaces
Bug: 238359088
Change-Id: I382dea5d13582817959001246240141e83d8172d
This commit is contained in:
Austin Borger
2022-07-15 11:27:53 -07:00
parent c8074521fb
commit 4728fc48cc
17 changed files with 636 additions and 12 deletions

View File

@@ -15,9 +15,9 @@ aidl_interface {
imports: [
"android.hardware.common-V2",
"android.hardware.common.fmq-V1",
"android.hardware.camera.common",
"android.hardware.camera.metadata",
"android.hardware.graphics.common",
"android.hardware.camera.common-V1",
"android.hardware.camera.metadata-V2",
"android.hardware.graphics.common-V3",
],
backend: {
cpp: {

View File

@@ -48,4 +48,5 @@ parcelable Stream {
android.hardware.camera.metadata.SensorPixelMode[] sensorPixelModesUsed;
android.hardware.camera.metadata.RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile;
android.hardware.camera.metadata.ScalerAvailableStreamUseCases useCase;
int colorSpace;
}

View File

@@ -98,14 +98,18 @@ parcelable Stream {
*
* For most formats, dataSpace defines the color space of the image data.
* In addition, for some formats, dataSpace indicates whether image- or
* depth-based data is requested. See
* android.hardware.graphics.common@1.0::types for details of formats and
* valid dataSpace values for each format.
* depth-based data is requested. For others, it merely describes an encoding
* scheme. See android.hardware.graphics.common@1.0::types for details of formats
* and valid dataSpace values for each format.
*
* The HAL must use this dataSpace to configure the stream to the correct
* colorspace, or to select between color and depth outputs if
* supported. The dataspace values are set using the V0 dataspace
* definitions.
*
* The color space implied by dataSpace should be overridden by colorSpace if
* the device supports the REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES
* capability.
*/
android.hardware.graphics.common.Dataspace dataSpace;
@@ -222,4 +226,19 @@ parcelable Stream {
* DEFAULT.
*/
android.hardware.camera.metadata.ScalerAvailableStreamUseCases useCase;
/**
* The color space of the stream.
*
* A client may not specify a color space. In this case, the value will be
* ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, and the color space
* implied by dataSpace should be used instead.
*
* When specified, this field is the ultimate authority over the color space of the stream,
* regardless of dataSpace. The purpose of this field is to support specifying wide gamut
* color spaces for dataSpace values such as JFIF and HEIF.
*
* Possible values are the ordinals of the ColorSpace.Named enum in the public-facing API.
*/
int colorSpace;
}

View File

@@ -171,6 +171,7 @@ enum CameraMetadataTag {
ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION = 786450,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP = 786451,
ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE = 786452,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP = 786453,
ANDROID_SCALER_CROP_REGION = 851968,
ANDROID_SCALER_AVAILABLE_FORMATS = 851969,
ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS = 851970,

View File

@@ -58,4 +58,5 @@ enum RequestAvailableCapabilities {
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING = 17,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT = 18,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE = 19,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES = 20,
}

View File

@@ -0,0 +1,58 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*//*
* Autogenerated from camera metadata definitions in
* /system/media/camera/docs/metadata_definitions.xml
* *** DO NOT EDIT BY HAND ***
*/
///////////////////////////////////////////////////////////////////////////////
// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
///////////////////////////////////////////////////////////////////////////////
// This file is a snapshot of an AIDL file. Do not edit it manually. There are
// two cases:
// 1). this is a frozen version file - do not edit this in any case.
// 2). this is a 'current' file. If you make a backwards compatible change to
// the interface (from the latest frozen version), the build system will
// prompt you to update this file with `m <name>-update-api`.
//
// You must not make a backward incompatible change to any AIDL file built
// with the aidl_interface module type with versions property set. The module
// type is used to build AIDL files in a way that they can be used across
// independently updatable components of the system. If a device is shipped
// with such a backward incompatible change, it has a high risk of breaking
// later when a module using the interface is updated, e.g., Mainline modules.
package android.hardware.camera.metadata;
@Backing(type="long") @VintfStability
enum RequestAvailableColorSpaceProfilesMap {
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED = -1,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB = 0,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_SRGB = 1,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_EXTENDED_SRGB = 2,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_EXTENDED_SRGB = 3,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT709 = 4,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020 = 5,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DCI_P3 = 6,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3 = 7,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_NTSC_1953 = 8,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SMPTE_C = 9,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ADOBE_RGB = 10,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_PRO_PHOTO_RGB = 11,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACES = 12,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACESCG = 13,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_XYZ = 14,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_LAB = 15,
}

View File

@@ -1029,6 +1029,12 @@ enum CameraMetadataTag {
* <p>Recommended 10-bit dynamic range profile.</p>
*/
ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
/**
* android.request.availableColorSpaceProfilesMap [static, enum[], ndk_public]
*
* <p>A list of all possible color space profiles supported by a camera device.</p>
*/
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
/**
* android.scaler.cropRegion [dynamic, int32[], public]
*

View File

@@ -49,4 +49,5 @@ enum RequestAvailableCapabilities {
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES,
}

View File

@@ -0,0 +1,49 @@
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Autogenerated from camera metadata definitions in
* /system/media/camera/docs/metadata_definitions.xml
* *** DO NOT EDIT BY HAND ***
*/
package android.hardware.camera.metadata;
/**
* android.request.availableColorSpaceProfilesMap enumeration values
* @see ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP
*/
@VintfStability
@Backing(type="long")
enum RequestAvailableColorSpaceProfilesMap {
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED = -1L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SRGB = 0L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_SRGB = 1L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_EXTENDED_SRGB = 2L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_LINEAR_EXTENDED_SRGB = 3L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT709 = 4L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_BT2020 = 5L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DCI_P3 = 6L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_DISPLAY_P3 = 7L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_NTSC_1953 = 8L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_SMPTE_C = 9L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ADOBE_RGB = 10L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_PRO_PHOTO_RGB = 11L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACES = 12L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_ACESCG = 13L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_XYZ = 14L,
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_CIE_LAB = 15L,
}

View File

@@ -14,8 +14,8 @@ aidl_interface {
"android/hardware/camera/provider/*.aidl",
],
imports: [
"android.hardware.camera.device",
"android.hardware.camera.common",
"android.hardware.camera.device-V2",
"android.hardware.camera.common-V1",
],
stability: "vintf",
backend: {

View File

@@ -41,6 +41,7 @@ interface ICameraProvider {
void notifyDeviceStateChange(long deviceState);
android.hardware.camera.provider.ConcurrentCameraIdCombination[] getConcurrentCameraIds();
boolean isConcurrentStreamCombinationSupported(in android.hardware.camera.provider.CameraIdAndStreamCombination[] configs);
void placeholder();
const long DEVICE_STATE_NORMAL = 0;
const long DEVICE_STATE_BACK_COVERED = 1;
const long DEVICE_STATE_FRONT_COVERED = 2;

View File

@@ -304,4 +304,12 @@ interface ICameraProvider {
*
*/
boolean isConcurrentStreamCombinationSupported(in CameraIdAndStreamCombination[] configs);
/*
* Due to a bug in vintf regarding aidl changes that are contained to fields,
* we need a placeholder method that will be removed after this patch.
*
* TODO(b/237048744): Remove this once fixed.
*/
void placeholder();
}

View File

@@ -59,9 +59,9 @@ cc_test {
static_libs: [
"android.hardware.camera.common@1.0-helper",
"android.hardware.camera.common-V1-ndk",
"android.hardware.camera.device-V1-ndk",
"android.hardware.camera.metadata-V1-ndk",
"android.hardware.camera.provider-V1-ndk",
"android.hardware.camera.device-V2-ndk",
"android.hardware.camera.metadata-V2-ndk",
"android.hardware.camera.provider-V2-ndk",
"android.hidl.allocator@1.0",
"libgrallocusage",
"libhidlmemory",

View File

@@ -36,6 +36,7 @@ using ::aidl::android::hardware::camera::common::CameraResourceCost;
using ::aidl::android::hardware::camera::common::TorchModeStatus;
using ::aidl::android::hardware::camera::common::VendorTagSection;
using ::aidl::android::hardware::camera::device::ICameraDevice;
using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination;
@@ -2037,6 +2038,49 @@ TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) {
}
}
TEST_P(CameraAidlTest, process8BitColorSpaceRequests) {
static int profiles[] = {
ColorSpaceNamed::BT709,
ColorSpaceNamed::DCI_P3,
ColorSpaceNamed::DISPLAY_P3,
ColorSpaceNamed::EXTENDED_SRGB,
ColorSpaceNamed::LINEAR_EXTENDED_SRGB,
ColorSpaceNamed::NTSC_1953,
ColorSpaceNamed::SMPTE_C,
ColorSpaceNamed::SRGB
};
for (int32_t i = 0; i < sizeof(profiles) / sizeof(profiles[0]); i++) {
processColorSpaceRequest(static_cast<RequestAvailableColorSpaceProfilesMap>(profiles[i]),
static_cast<RequestAvailableDynamicRangeProfilesMap>(
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD));
}
}
TEST_P(CameraAidlTest, process10BitColorSpaceRequests) {
static const camera_metadata_enum_android_request_available_dynamic_range_profiles_map
dynamicRangeProfiles[] = {
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
};
// Process all dynamic range profiles with BT2020
for (int32_t i = 0; i < sizeof(dynamicRangeProfiles) / sizeof(dynamicRangeProfiles[0]); i++) {
processColorSpaceRequest(
static_cast<RequestAvailableColorSpaceProfilesMap>(ColorSpaceNamed::BT2020),
static_cast<RequestAvailableDynamicRangeProfilesMap>(dynamicRangeProfiles[i]));
}
}
// Generate and verify a burst containing alternating sensor sensitivity values
TEST_P(CameraAidlTest, processCaptureRequestBurstISO) {
std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);

View File

@@ -22,6 +22,7 @@
#include <HandleImporter.h>
#include <aidl/android/hardware/camera/device/ICameraDevice.h>
#include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h>
#include <aidl/android/hardware/camera/metadata/RequestAvailableColorSpaceProfilesMap.h>
#include <aidl/android/hardware/camera/metadata/RequestAvailableDynamicRangeProfilesMap.h>
#include <aidl/android/hardware/camera/metadata/SensorInfoColorFilterArrangement.h>
#include <aidl/android/hardware/camera/metadata/SensorPixelMode.h>
@@ -43,6 +44,7 @@ using ::aidl::android::hardware::camera::common::TorchModeStatus;
using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::device::ICameraDevice;
using ::aidl::android::hardware::camera::metadata::CameraMetadataTag;
using ::aidl::android::hardware::camera::metadata::RequestAvailableColorSpaceProfilesMap;
using ::aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
using ::aidl::android::hardware::camera::metadata::SensorInfoColorFilterArrangement;
using ::aidl::android::hardware::camera::metadata::SensorPixelMode;
@@ -2789,6 +2791,168 @@ void CameraAidlTest::verify10BitMetadata(
}
}
bool CameraAidlTest::reportsColorSpaces(const camera_metadata_t* staticMeta) {
camera_metadata_ro_entry capabilityEntry;
int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
&capabilityEntry);
if (rc == 0) {
for (uint32_t i = 0; i < capabilityEntry.count; i++) {
if (capabilityEntry.data.u8[i] ==
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_COLOR_SPACE_PROFILES) {
return true;
}
}
}
return false;
}
void CameraAidlTest::getColorSpaceProfiles(
const camera_metadata_t* staticMeta,
std::vector<RequestAvailableColorSpaceProfilesMap>* profiles) {
ASSERT_NE(nullptr, staticMeta);
ASSERT_NE(nullptr, profiles);
camera_metadata_ro_entry entry;
int rc = find_camera_metadata_ro_entry(
staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
ASSERT_EQ(rc, 0);
ASSERT_TRUE(entry.count > 0);
ASSERT_EQ(entry.count % 3, 0);
for (uint32_t i = 0; i < entry.count; i += 3) {
ASSERT_NE(entry.data.i64[i],
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED);
if (std::find(profiles->begin(), profiles->end(),
static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]))
== profiles->end()) {
profiles->emplace_back(
static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]));
}
}
}
bool CameraAidlTest::isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
const camera_metadata_t* staticMeta,
RequestAvailableColorSpaceProfilesMap colorSpace,
RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,
aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
camera_metadata_ro_entry entry;
int rc = find_camera_metadata_ro_entry(
staticMeta, ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP, &entry);
if (rc == 0) {
for (uint32_t i = 0; i < entry.count; i += 3) {
RequestAvailableColorSpaceProfilesMap entryColorSpace =
static_cast<RequestAvailableColorSpaceProfilesMap>(entry.data.i64[i]);
int64_t dynamicRangeProfileI64 = static_cast<int64_t>(dynamicRangeProfile);
int32_t entryImageFormat = static_cast<int32_t>(entry.data.i64[i + 1]);
int32_t expectedImageFormat = halFormatToPublicFormat(pixelFormat);
if (entryColorSpace == colorSpace
&& (entry.data.i64[i + 2] & dynamicRangeProfileI64) != 0
&& entryImageFormat == expectedImageFormat) {
return true;
}
}
}
return false;
}
const char* CameraAidlTest::getColorSpaceProfileString(
RequestAvailableColorSpaceProfilesMap colorSpace) {
auto colorSpaceCast = static_cast<int>(colorSpace);
switch (colorSpaceCast) {
case ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED:
return "UNSPECIFIED";
case ColorSpaceNamed::SRGB:
return "SRGB";
case ColorSpaceNamed::LINEAR_SRGB:
return "LINEAR_SRGB";
case ColorSpaceNamed::EXTENDED_SRGB:
return "EXTENDED_SRGB";
case ColorSpaceNamed::LINEAR_EXTENDED_SRGB:
return "LINEAR_EXTENDED_SRGB";
case ColorSpaceNamed::BT709:
return "BT709";
case ColorSpaceNamed::BT2020:
return "BT2020";
case ColorSpaceNamed::DCI_P3:
return "DCI_P3";
case ColorSpaceNamed::DISPLAY_P3:
return "DISPLAY_P3";
case ColorSpaceNamed::NTSC_1953:
return "NTSC_1953";
case ColorSpaceNamed::SMPTE_C:
return "SMPTE_C";
case ColorSpaceNamed::ADOBE_RGB:
return "ADOBE_RGB";
case ColorSpaceNamed::PRO_PHOTO_RGB:
return "PRO_PHOTO_RGB";
case ColorSpaceNamed::ACES:
return "ACES";
case ColorSpaceNamed::ACESCG:
return "ACESCG";
case ColorSpaceNamed::CIE_XYZ:
return "CIE_XYZ";
case ColorSpaceNamed::CIE_LAB:
return "CIE_LAB";
default:
return "INVALID";
}
return "INVALID";
}
const char* CameraAidlTest::getDynamicRangeProfileString(
RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
auto dynamicRangeProfileCast =
static_cast<camera_metadata_enum_android_request_available_dynamic_range_profiles_map>
(dynamicRangeProfile);
switch (dynamicRangeProfileCast) {
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD:
return "STANDARD";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10:
return "HLG10";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10:
return "HDR10";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS:
return "HDR10_PLUS";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF:
return "DOLBY_VISION_10B_HDR_REF";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO:
return "DOLBY_VISION_10B_HDR_REF_P0";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM:
return "DOLBY_VISION_10B_HDR_OEM";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO:
return "DOLBY_VISION_10B_HDR_OEM_P0";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF:
return "DOLBY_VISION_8B_HDR_REF";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO:
return "DOLBY_VISION_8B_HDR_REF_P0";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM:
return "DOLBY_VISION_8B_HDR_OEM";
case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO:
return "DOLBY_VISION_8B_HDR_OEM_P0";
default:
return "INVALID";
}
return "INVALID";
}
int32_t CameraAidlTest::halFormatToPublicFormat(
aidl::android::hardware::graphics::common::PixelFormat pixelFormat) {
// This is an incomplete mapping of pixel format to image format and assumes dataspaces
// (see getDataspace)
switch (pixelFormat) {
case PixelFormat::BLOB:
return 0x100; // ImageFormat.JPEG
case PixelFormat::Y16:
return 0x44363159; // ImageFormat.DEPTH16
default:
return static_cast<int32_t>(pixelFormat);
}
}
void CameraAidlTest::configurePreviewStream(
const std::string& name, const std::shared_ptr<ICameraProvider>& provider,
const AvailableStream* previewThreshold, std::shared_ptr<ICameraDeviceSession>* session,
@@ -2934,4 +3098,224 @@ void CameraAidlTest::updateInflightResultQueue(
for (auto& it : mInflightMap) {
it.second->resultQueue = resultQueue;
}
}
void CameraAidlTest::processColorSpaceRequest(
RequestAvailableColorSpaceProfilesMap colorSpace,
RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile) {
std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider);
int64_t bufferId = 1;
CameraMetadata settings;
for (const auto& name : cameraDeviceNames) {
std::string version, deviceId;
ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId));
CameraMetadata meta;
std::shared_ptr<ICameraDevice> device;
openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device);
camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data());
// Device does not report color spaces, skip.
if (!reportsColorSpaces(staticMeta)) {
ndk::ScopedAStatus ret = mSession->close();
mSession = nullptr;
ASSERT_TRUE(ret.isOk());
ALOGV("Camera %s does not report color spaces", name.c_str());
continue;
}
std::vector<RequestAvailableColorSpaceProfilesMap> profileList;
getColorSpaceProfiles(staticMeta, &profileList);
ASSERT_FALSE(profileList.empty());
// Device does not support color space / dynamic range profile, skip
if (std::find(profileList.begin(), profileList.end(), colorSpace)
== profileList.end() || !isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
staticMeta, colorSpace, dynamicRangeProfile,
PixelFormat::IMPLEMENTATION_DEFINED)) {
ndk::ScopedAStatus ret = mSession->close();
mSession = nullptr;
ASSERT_TRUE(ret.isOk());
ALOGV("Camera %s does not support color space %s with dynamic range profile %s and "
"pixel format %d", name.c_str(), getColorSpaceProfileString(colorSpace),
getDynamicRangeProfileString(dynamicRangeProfile),
PixelFormat::IMPLEMENTATION_DEFINED);
continue;
}
ALOGV("Camera %s supports color space %s with dynamic range profile %s and pixel format %d",
name.c_str(), getColorSpaceProfileString(colorSpace),
getDynamicRangeProfileString(dynamicRangeProfile),
PixelFormat::IMPLEMENTATION_DEFINED);
// If an HDR dynamic range profile is reported in the color space profile list,
// the device must also have the dynamic range profiles map capability and contain
// the dynamic range profile in the map.
if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
ASSERT_TRUE(is10BitDynamicRangeCapable(staticMeta));
std::vector<RequestAvailableDynamicRangeProfilesMap> dynamicRangeProfiles;
get10BitDynamicRangeProfiles(staticMeta, &dynamicRangeProfiles);
ASSERT_FALSE(dynamicRangeProfiles.empty());
ASSERT_FALSE(std::find(dynamicRangeProfiles.begin(), dynamicRangeProfiles.end(),
dynamicRangeProfile) == dynamicRangeProfiles.end());
}
CameraMetadata req;
android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings;
ndk::ScopedAStatus ret =
mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &req);
ASSERT_TRUE(ret.isOk());
const camera_metadata_t* metadata =
reinterpret_cast<const camera_metadata_t*>(req.metadata.data());
size_t expectedSize = req.metadata.size();
int result = validate_camera_metadata_structure(metadata, &expectedSize);
ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED));
size_t entryCount = get_camera_metadata_entry_count(metadata);
ASSERT_GT(entryCount, 0u);
defaultSettings = metadata;
const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock();
uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer;
settings.metadata = std::vector(
rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer));
overrideRotateAndCrop(&settings);
ret = mSession->close();
mSession = nullptr;
ASSERT_TRUE(ret.isOk());
std::vector<HalStream> halStreams;
bool supportsPartialResults = false;
bool useHalBufManager = false;
int32_t partialResultCount = 0;
Stream previewStream;
std::shared_ptr<DeviceCb> cb;
previewStream.usage =
static_cast<aidl::android::hardware::graphics::common::BufferUsage>(
GRALLOC1_CONSUMER_USAGE_HWCOMPOSER);
previewStream.dataSpace = getDataspace(PixelFormat::IMPLEMENTATION_DEFINED);
previewStream.colorSpace = static_cast<int32_t>(colorSpace);
configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession,
&previewStream, &halStreams, &supportsPartialResults,
&partialResultCount, &useHalBufManager, &cb, 0,
/*maxResolution*/ false, dynamicRangeProfile);
ASSERT_NE(mSession, nullptr);
::aidl::android::hardware::common::fmq::MQDescriptor<
int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite>
descriptor;
auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor);
ASSERT_TRUE(resultQueueRet.isOk());
std::shared_ptr<ResultMetadataQueue> resultQueue =
std::make_shared<ResultMetadataQueue>(descriptor);
if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) {
ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__);
resultQueue = nullptr;
// Don't use the queue onwards.
}
mInflightMap.clear();
// Stream as long as needed to fill the Hal inflight queue
std::vector<CaptureRequest> requests(halStreams[0].maxBuffers);
for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>(
static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults,
partialResultCount, std::unordered_set<std::string>(), resultQueue);
CaptureRequest& request = requests[frameNumber];
std::vector<StreamBuffer>& outputBuffers = request.outputBuffers;
outputBuffers.resize(halStreams.size());
size_t k = 0;
inflightReq->mOutstandingBufferIds.resize(halStreams.size());
std::vector<buffer_handle_t> graphicBuffers;
graphicBuffers.reserve(halStreams.size());
for (const auto& halStream : halStreams) {
buffer_handle_t buffer_handle;
if (useHalBufManager) {
outputBuffers[k] = {halStream.id, 0,
NativeHandle(), BufferStatus::OK,
NativeHandle(), NativeHandle()};
} else {
auto usage = android_convertGralloc1To0Usage(
static_cast<uint64_t>(halStream.producerUsage),
static_cast<uint64_t>(halStream.consumerUsage));
allocateGraphicBuffer(previewStream.width, previewStream.height, usage,
halStream.overrideFormat, &buffer_handle);
inflightReq->mOutstandingBufferIds[halStream.id][bufferId] = buffer_handle;
graphicBuffers.push_back(buffer_handle);
outputBuffers[k] = {halStream.id, bufferId,
android::makeToAidl(buffer_handle), BufferStatus::OK, NativeHandle(),
NativeHandle()};
bufferId++;
}
k++;
}
request.inputBuffer = {
-1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()};
request.frameNumber = frameNumber;
request.fmqSettingsSize = 0;
request.settings = settings;
request.inputWidth = 0;
request.inputHeight = 0;
{
std::unique_lock<std::mutex> l(mLock);
mInflightMap[frameNumber] = inflightReq;
}
}
int32_t numRequestProcessed = 0;
std::vector<BufferCache> cachesToRemove;
ndk::ScopedAStatus returnStatus =
mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed);
ASSERT_TRUE(returnStatus.isOk());
ASSERT_EQ(numRequestProcessed, requests.size());
returnStatus = mSession->repeatingRequestEnd(requests.size() - 1,
std::vector<int32_t> {halStreams[0].id});
ASSERT_TRUE(returnStatus.isOk());
for (int32_t frameNumber = 0; frameNumber < requests.size(); frameNumber++) {
const auto& inflightReq = mInflightMap[frameNumber];
std::unique_lock<std::mutex> l(mLock);
while (!inflightReq->errorCodeValid &&
((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) {
auto timeout = std::chrono::system_clock::now() +
std::chrono::seconds(kStreamBufferTimeoutSec);
ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout));
}
ASSERT_FALSE(inflightReq->errorCodeValid);
ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u);
if (dynamicRangeProfile != static_cast<RequestAvailableDynamicRangeProfilesMap>(
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
verify10BitMetadata(mHandleImporter, *inflightReq, dynamicRangeProfile);
}
}
if (useHalBufManager) {
std::vector<int32_t> streamIds(halStreams.size());
for (size_t i = 0; i < streamIds.size(); i++) {
streamIds[i] = halStreams[i].id;
}
mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0);
cb->waitForBuffersReturned();
}
ret = mSession->close();
mSession = nullptr;
ASSERT_TRUE(ret.isOk());
}
}

View File

@@ -44,6 +44,8 @@
#include <aidl/android/hardware/camera/provider/ICameraProvider.h>
#include <aidl/android/hardware/camera/metadata/RequestAvailableColorSpaceProfilesMap.h>
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
#include <gtest/gtest.h>
@@ -123,6 +125,26 @@ class CameraAidlTest : public ::testing::TestWithParam<std::string> {
YUV_REPROCESS,
};
// Copied from ColorSpace.java (see Named)
enum ColorSpaceNamed {
SRGB,
LINEAR_SRGB,
EXTENDED_SRGB,
LINEAR_EXTENDED_SRGB,
BT709,
BT2020,
DCI_P3,
DISPLAY_P3,
NTSC_1953,
SMPTE_C,
ADOBE_RGB,
PRO_PHOTO_RGB,
ACES,
ACESCG,
CIE_XYZ,
CIE_LAB
};
struct AvailableZSLInputOutput {
int32_t inputFormat;
int32_t outputFormat;
@@ -348,11 +370,40 @@ class CameraAidlTest : public ::testing::TestWithParam<std::string> {
std::vector<aidl::android::hardware::camera::metadata::
RequestAvailableDynamicRangeProfilesMap>* profiles);
static bool reportsColorSpaces(const camera_metadata_t* staticMeta);
static void getColorSpaceProfiles(
const camera_metadata_t* staticMeta,
std::vector<aidl::android::hardware::camera::metadata::
RequestAvailableColorSpaceProfilesMap>* profiles);
static bool isColorSpaceCompatibleWithDynamicRangeAndPixelFormat(
const camera_metadata_t* staticMeta,
aidl::android::hardware::camera::metadata::
RequestAvailableColorSpaceProfilesMap colorSpace,
aidl::android::hardware::camera::metadata::
RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile,
aidl::android::hardware::graphics::common::PixelFormat pixelFormat);
static const char* getColorSpaceProfileString(aidl::android::hardware::camera::metadata::
RequestAvailableColorSpaceProfilesMap colorSpace);
static const char* getDynamicRangeProfileString(aidl::android::hardware::camera::metadata::
RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile);
static int32_t halFormatToPublicFormat(
aidl::android::hardware::graphics::common::PixelFormat pixelFormat);
// Used by switchToOffline where a new result queue is created for offline reqs
void updateInflightResultQueue(const std::shared_ptr<ResultMetadataQueue>& resultQueue);
static Size getMinSize(Size a, Size b);
void processColorSpaceRequest(aidl::android::hardware::camera::metadata::
RequestAvailableColorSpaceProfilesMap colorSpace,
aidl::android::hardware::camera::metadata::
RequestAvailableDynamicRangeProfilesMap dynamicRangeProfile);
protected:
// In-flight queue for tracking completion of capture requests.
struct InFlightRequest {

View File

@@ -178,7 +178,7 @@
</hal>
<hal format="aidl" optional="true">
<name>android.hardware.camera.provider</name>
<version>1</version>
<version>1-2</version>
<interface>
<name>ICameraProvider</name>
<regex-instance>[^/]+/[0-9]+</regex-instance>