From 1903059507815cc89e9cf268f73d86b2bb3feef7 Mon Sep 17 00:00:00 2001 From: Yin-Chia Yeh Date: Thu, 19 Oct 2017 17:30:11 -0700 Subject: [PATCH] Camera: Add external camera provider Bug: 64874137 63873538 Change-Id: I4309874a7dedd3dd71d4bd0c2004d460421db679 --- camera/common/1.0/default/HandleImporter.cpp | 59 + .../1.0/default/include/HandleImporter.h | 9 +- camera/device/3.4/default/Android.bp | 45 +- .../3.4/default/ExternalCameraDevice.cpp | 793 +++++++ .../default/ExternalCameraDeviceSession.cpp | 1990 +++++++++++++++++ .../ExternalCameraDeviceSession.h | 441 ++++ .../ExternalCameraDevice_3_4.h | 110 + camera/provider/2.4/default/Android.bp | 27 +- .../provider/2.4/default/CameraProvider.cpp | 32 +- .../2.4/default/ExternalCameraProvider.cpp | 276 +++ .../2.4/default/ExternalCameraProvider.h | 102 + ...re.camera.provider@2.4-external-service.rc | 7 + .../provider/2.4/default/external-service.cpp | 34 + 13 files changed, 3909 insertions(+), 16 deletions(-) create mode 100644 camera/device/3.4/default/ExternalCameraDevice.cpp create mode 100644 camera/device/3.4/default/ExternalCameraDeviceSession.cpp create mode 100644 camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h create mode 100644 camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h create mode 100644 camera/provider/2.4/default/ExternalCameraProvider.cpp create mode 100644 camera/provider/2.4/default/ExternalCameraProvider.h create mode 100644 camera/provider/2.4/default/android.hardware.camera.provider@2.4-external-service.rc create mode 100644 camera/provider/2.4/default/external-service.cpp diff --git a/camera/common/1.0/default/HandleImporter.cpp b/camera/common/1.0/default/HandleImporter.cpp index fd8b943dc3..e9741efa9a 100644 --- a/camera/common/1.0/default/HandleImporter.cpp +++ b/camera/common/1.0/default/HandleImporter.cpp @@ -134,6 +134,65 @@ void HandleImporter::closeFence(int fd) const { } } +YCbCrLayout HandleImporter::lockYCbCr( + buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion) { + Mutex::Autolock lock(mLock); + YCbCrLayout layout = {}; + + if (!mInitialized) { + initializeLocked(); + } + + if (mMapper == nullptr) { + ALOGE("%s: mMapper is null!", __FUNCTION__); + return layout; + } + + hidl_handle acquireFenceHandle; + auto buffer = const_cast(buf); + mMapper->lockYCbCr(buffer, cpuUsage, accessRegion, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpLayout) { + if (tmpError == MapperError::NONE) { + layout = tmpLayout; + } else { + ALOGE("%s: failed to lockYCbCr error %d!", __FUNCTION__, tmpError); + } + }); + + ALOGV("%s: layout y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, layout.y, layout.cb, layout.cr, + layout.yStride, layout.cStride, layout.chromaStep); + return layout; +} + +int HandleImporter::unlock(buffer_handle_t& buf) { + int releaseFence = -1; + auto buffer = const_cast(buf); + mMapper->unlock( + buffer, [&](const auto& tmpError, const auto& tmpReleaseFence) { + if (tmpError == MapperError::NONE) { + auto fenceHandle = tmpReleaseFence.getNativeHandle(); + if (fenceHandle) { + if (fenceHandle->numInts != 0 || fenceHandle->numFds != 1) { + ALOGE("%s: bad release fence numInts %d numFds %d", + __FUNCTION__, fenceHandle->numInts, fenceHandle->numFds); + return; + } + releaseFence = dup(fenceHandle->data[0]); + if (releaseFence <= 0) { + ALOGE("%s: bad release fence FD %d", + __FUNCTION__, releaseFence); + } + } + } else { + ALOGE("%s: failed to unlock error %d!", __FUNCTION__, tmpError); + } + }); + + return releaseFence; +} + } // namespace helper } // namespace V1_0 } // namespace common diff --git a/camera/common/1.0/default/include/HandleImporter.h b/camera/common/1.0/default/include/HandleImporter.h index e47397c6a6..443362d3c2 100644 --- a/camera/common/1.0/default/include/HandleImporter.h +++ b/camera/common/1.0/default/include/HandleImporter.h @@ -22,6 +22,7 @@ #include using android::hardware::graphics::mapper::V2_0::IMapper; +using android::hardware::graphics::mapper::V2_0::YCbCrLayout; namespace android { namespace hardware { @@ -43,6 +44,12 @@ public: bool importFence(const native_handle_t* handle, int& fd) const; void closeFence(int fd) const; + // Assume caller has done waiting for acquire fences + YCbCrLayout lockYCbCr(buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion); + + int unlock(buffer_handle_t& buf); // returns release fence + private: void initializeLocked(); void cleanup(); @@ -60,4 +67,4 @@ private: } // namespace hardware } // namespace android -#endif // CAMERA_COMMON_1_0_HANDLEIMPORTED_H \ No newline at end of file +#endif // CAMERA_COMMON_1_0_HANDLEIMPORTED_H diff --git a/camera/device/3.4/default/Android.bp b/camera/device/3.4/default/Android.bp index a0ab167366..61ac244339 100644 --- a/camera/device/3.4/default/Android.bp +++ b/camera/device/3.4/default/Android.bp @@ -17,7 +17,13 @@ cc_library_headers { name: "camera.device@3.4-impl_headers", vendor: true, - export_include_dirs: ["include/device_v3_4_impl"], + export_include_dirs: ["include/device_v3_4_impl"] +} + +cc_library_headers { + name: "camera.device@3.4-external-impl_headers", + vendor: true, + export_include_dirs: ["include/ext_device_v3_4_impl"] } cc_library_shared { @@ -55,3 +61,40 @@ cc_library_shared { "libfmq", ], } + +cc_library_shared { + name: "camera.device@3.4-external-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "ExternalCameraDevice.cpp", + "ExternalCameraDeviceSession.cpp" + ], + shared_libs: [ + "libhidlbase", + "libhidltransport", + "libutils", + "libcutils", + "camera.device@3.2-impl", + "camera.device@3.3-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "liblog", + "libhardware", + "libcamera_metadata", + "libfmq", + "libsync", + "libyuv", + ], + static_libs: [ + "android.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/ext_device_v3_4_impl"], + export_shared_lib_headers: [ + "libfmq", + ], +} diff --git a/camera/device/3.4/default/ExternalCameraDevice.cpp b/camera/device/3.4/default/ExternalCameraDevice.cpp new file mode 100644 index 0000000000..4ad1768910 --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraDevice.cpp @@ -0,0 +1,793 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamDev@3.4" +#define LOG_NDEBUG 0 +#include + +#include +#include +#include "android-base/macros.h" +#include "CameraMetadata.h" +#include "../../3.2/default/include/convert.h" +#include "ExternalCameraDevice_3_4.h" + + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +namespace { +// Only support MJPEG for now as it seems to be the one supports higher fps +// Other formats to consider in the future: +// * V4L2_PIX_FMT_YVU420 (== YV12) +// * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats) +const std::array kSupportedFourCCs {{ + V4L2_PIX_FMT_MJPEG +}}; // double braces required in C++11 + +// TODO: b/72261897 +// Define max size/fps this Android device can advertise (and streaming at reasonable speed) +// Also make sure that can be done without editing source code + +// TODO: b/72261675: make it dynamic since this affects memory usage +const int kMaxJpegSize = {13 * 1024 * 1024}; // 13MB +} // anonymous namespace + +ExternalCameraDevice::ExternalCameraDevice(const std::string& cameraId) : + mCameraId(cameraId) { + status_t ret = initCameraCharacteristics(); + if (ret != OK) { + ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret); + mInitFailed = true; + } +} + +ExternalCameraDevice::~ExternalCameraDevice() {} + +bool ExternalCameraDevice::isInitFailed() { + return mInitFailed; +} + +Return ExternalCameraDevice::getResourceCost(getResourceCost_cb _hidl_cb) { + CameraResourceCost resCost; + resCost.resourceCost = 100; + _hidl_cb(Status::OK, resCost); + return Void(); +} + +Return ExternalCameraDevice::getCameraCharacteristics( + getCameraCharacteristics_cb _hidl_cb) { + Mutex::Autolock _l(mLock); + V3_2::CameraMetadata hidlChars; + + if (isInitFailed()) { + _hidl_cb(Status::INTERNAL_ERROR, hidlChars); + return Void(); + } + + const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock(); + V3_2::implementation::convertToHidl(rawMetadata, &hidlChars); + _hidl_cb(Status::OK, hidlChars); + mCameraCharacteristics.unlock(rawMetadata); + return Void(); +} + +Return ExternalCameraDevice::setTorchMode(TorchMode) { + return Status::METHOD_NOT_SUPPORTED; +} + +Return ExternalCameraDevice::open( + const sp& callback, open_cb _hidl_cb) { + Status status = Status::OK; + sp session = nullptr; + + if (callback == nullptr) { + ALOGE("%s: cannot open camera %s. callback is null!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + if (isInitFailed()) { + ALOGE("%s: cannot open camera %s. camera init failed!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + mLock.lock(); + + ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str()); + session = mSession.promote(); + if (session != nullptr && !session->isClosed()) { + ALOGE("%s: cannot open an already opened camera!", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::CAMERA_IN_USE, nullptr); + return Void(); + } + + unique_fd fd(::open(mCameraId.c_str(), O_RDWR)); + if (fd.get() < 0) { + ALOGE("%s: v4l2 device open %s failed: %s", + __FUNCTION__, mCameraId.c_str(), strerror(errno)); + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + session = new ExternalCameraDeviceSession( + callback, mSupportedFormats, mCameraCharacteristics, std::move(fd)); + if (session == nullptr) { + ALOGE("%s: camera device session allocation failed", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + if (session->isInitFailed()) { + ALOGE("%s: camera device session init failed", __FUNCTION__); + session = nullptr; + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + mSession = session; + + mLock.unlock(); + + _hidl_cb(status, session->getInterface()); + return Void(); +} + +Return ExternalCameraDevice::dumpState(const ::android::hardware::hidl_handle& handle) { + Mutex::Autolock _l(mLock); + if (handle.getNativeHandle() == nullptr) { + ALOGE("%s: handle must not be null", __FUNCTION__); + return Void(); + } + if (handle->numFds != 1 || handle->numInts != 0) { + ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", + __FUNCTION__, handle->numFds, handle->numInts); + return Void(); + } + int fd = handle->data[0]; + if (mSession == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + auto session = mSession.promote(); + if (session == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + // Call into active session to dump states + session->dumpState(handle); + return Void(); +} + + +status_t ExternalCameraDevice::initCameraCharacteristics() { + if (mCameraCharacteristics.isEmpty()) { + // init camera characteristics + unique_fd fd(::open(mCameraId.c_str(), O_RDWR)); + if (fd.get() < 0) { + ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mCameraId.c_str()); + return DEAD_OBJECT; + } + + status_t ret; + ret = initDefaultCharsKeys(&mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + + ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + + ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + } + return OK; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(tag, data, size) \ +do { \ + if (metadata->update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return -EINVAL; \ + } \ +} while (0) + +status_t ExternalCameraDevice::initDefaultCharsKeys( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + // TODO: changed to HARDWARELEVEL_EXTERNAL later + const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1); + + // android.colorCorrection + const uint8_t availableAberrationModes[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; + UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + availableAberrationModes, ARRAY_SIZE(availableAberrationModes)); + + // android.control + const uint8_t antibandingMode = + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + &antibandingMode, 1); + + const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0}; + UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, + ARRAY_SIZE(controlMaxRegions)); + + const uint8_t videoStabilizationMode = + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + &videoStabilizationMode, 1); + + const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO; + UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1); + + const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON; + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1); + + const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF; + UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1); + + const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF, + ANDROID_CONTROL_MODE_AUTO}; + UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes, + ARRAY_SIZE(controlAvailableModes)); + + // android.edge + const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF; + UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1); + + // android.flash + const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1); + + // android.hotPixel + const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF; + UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1); + + // android.jpeg + // TODO: b/72261675 See if we can provide thumbnail size for all jpeg aspect ratios + const int32_t jpegAvailableThumbnailSizes[] = {0, 0, 240, 180}; + UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, + ARRAY_SIZE(jpegAvailableThumbnailSizes)); + + const int32_t jpegMaxSize = kMaxJpegSize; + UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); + + const uint8_t jpegQuality = 90; + UPDATE(ANDROID_JPEG_QUALITY, &jpegQuality, 1); + UPDATE(ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); + + const int32_t jpegOrientation = 0; + UPDATE(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); + + // android.lens + const uint8_t focusDistanceCalibration = + ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; + UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1); + + const uint8_t opticalStabilizationMode = + ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + &opticalStabilizationMode, 1); + + const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL; + UPDATE(ANDROID_LENS_FACING, &facing, 1); + + // android.noiseReduction + const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF; + UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + &noiseReductionMode, 1); + UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1); + + // android.request + const uint8_t availableCapabilities[] = { + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE}; + UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities, + ARRAY_SIZE(availableCapabilities)); + + const int32_t partialResultCount = 1; + UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1); + + // This means pipeline latency of X frame intervals. The maximum number is 4. + const uint8_t requestPipelineMaxDepth = 4; + UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1); + UPDATE(ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1); + + // Three numbers represent the maximum numbers of different types of output + // streams simultaneously. The types are raw sensor, processed (but not + // stalling), and processed (but stalling). For usb limited mode, raw sensor + // is not supported. Stalling stream is JPEG. Non-stalling streams are + // YUV_420_888 or YV12. + const int32_t requestMaxNumOutputStreams[] = { + /*RAW*/0, + /*Processed*/ExternalCameraDeviceSession::kMaxProcessedStream, + /*Stall*/ExternalCameraDeviceSession::kMaxStallStream}; + UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams, + ARRAY_SIZE(requestMaxNumOutputStreams)); + + // Limited mode doesn't support reprocessing. + const int32_t requestMaxNumInputStreams = 0; + UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams, + 1); + + // android.scaler + // TODO: b/72263447 V4L2_CID_ZOOM_* + const float scalerAvailableMaxDigitalZoom[] = {1}; + UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + scalerAvailableMaxDigitalZoom, + ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); + + const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); + + const int32_t testPatternModes[] = { + ANDROID_SENSOR_TEST_PATTERN_MODE_OFF}; + UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes, + ARRAY_SIZE(testPatternModes)); + UPDATE(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternModes[0], 1); + + const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; + UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); + + // Orientation probably isn't useful for external facing camera? + const int32_t orientation = 0; + UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1); + + // android.shading + const uint8_t availabeMode = ANDROID_SHADING_MODE_OFF; + UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availabeMode, 1); + + // android.statistics + const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode, + 1); + + const int32_t maxFaceCount = 0; + UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1); + + const uint8_t availableHotpixelMode = + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + &availableHotpixelMode, 1); + + const uint8_t lensShadingMapMode = + ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + &lensShadingMapMode, 1); + + // android.sync + const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; + UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1); + + /* Other sensor/RAW realted keys: + * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW + * android.sensor.info.physicalSize -> not available + * android.sensor.info.whiteLevel -> not available/not needed + * android.sensor.info.lensShadingApplied -> not needed + * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed + * android.sensor.blackLevelPattern -> not available/not needed + */ + + const int32_t availableRequestKeys[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + ANDROID_CONTROL_AE_ANTIBANDING_MODE, + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + ANDROID_CONTROL_AE_LOCK, + ANDROID_CONTROL_AE_MODE, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + ANDROID_CONTROL_AF_MODE, + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AWB_LOCK, + ANDROID_CONTROL_AWB_MODE, + ANDROID_CONTROL_CAPTURE_INTENT, + ANDROID_CONTROL_EFFECT_MODE, + ANDROID_CONTROL_MODE, + ANDROID_CONTROL_SCENE_MODE, + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + ANDROID_FLASH_MODE, + ANDROID_JPEG_ORIENTATION, + ANDROID_JPEG_QUALITY, + ANDROID_JPEG_THUMBNAIL_QUALITY, + ANDROID_JPEG_THUMBNAIL_SIZE, + ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + ANDROID_NOISE_REDUCTION_MODE, + ANDROID_SCALER_CROP_REGION, + ANDROID_SENSOR_TEST_PATTERN_MODE, + ANDROID_STATISTICS_FACE_DETECT_MODE, + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE}; + UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys, + ARRAY_SIZE(availableRequestKeys)); + + const int32_t availableResultKeys[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + ANDROID_CONTROL_AE_ANTIBANDING_MODE, + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + ANDROID_CONTROL_AE_LOCK, + ANDROID_CONTROL_AE_MODE, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_STATE, + ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + ANDROID_CONTROL_AF_MODE, + ANDROID_CONTROL_AF_STATE, + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AWB_LOCK, + ANDROID_CONTROL_AWB_MODE, + ANDROID_CONTROL_AWB_STATE, + ANDROID_CONTROL_CAPTURE_INTENT, + ANDROID_CONTROL_EFFECT_MODE, + ANDROID_CONTROL_MODE, + ANDROID_CONTROL_SCENE_MODE, + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + ANDROID_FLASH_MODE, + ANDROID_FLASH_STATE, + ANDROID_JPEG_ORIENTATION, + ANDROID_JPEG_QUALITY, + ANDROID_JPEG_THUMBNAIL_QUALITY, + ANDROID_JPEG_THUMBNAIL_SIZE, + ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + ANDROID_NOISE_REDUCTION_MODE, + ANDROID_REQUEST_PIPELINE_DEPTH, + ANDROID_SCALER_CROP_REGION, + ANDROID_SENSOR_TIMESTAMP, + ANDROID_STATISTICS_FACE_DETECT_MODE, + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, + ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, + ANDROID_STATISTICS_SCENE_FLICKER}; + UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys, + ARRAY_SIZE(availableResultKeys)); + + const int32_t availableCharacteristicsKeys[] = { + ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + ANDROID_CONTROL_AE_AVAILABLE_MODES, + ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, + ANDROID_CONTROL_AE_COMPENSATION_RANGE, + ANDROID_CONTROL_AE_COMPENSATION_STEP, + ANDROID_CONTROL_AE_LOCK_AVAILABLE, + ANDROID_CONTROL_AF_AVAILABLE_MODES, + ANDROID_CONTROL_AVAILABLE_EFFECTS, + ANDROID_CONTROL_AVAILABLE_MODES, + ANDROID_CONTROL_AVAILABLE_SCENE_MODES, + ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + ANDROID_CONTROL_AWB_AVAILABLE_MODES, + ANDROID_CONTROL_AWB_LOCK_AVAILABLE, + ANDROID_CONTROL_MAX_REGIONS, + ANDROID_FLASH_INFO_AVAILABLE, + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, + ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, + ANDROID_LENS_FACING, + ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, + ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, + ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, + ANDROID_REQUEST_PARTIAL_RESULT_COUNT, + ANDROID_REQUEST_PIPELINE_MAX_DEPTH, + ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + ANDROID_SCALER_CROPPING_TYPE, + ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, + ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, + ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, + ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, + ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, + ANDROID_SENSOR_ORIENTATION, + ANDROID_SHADING_AVAILABLE_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, + ANDROID_SYNC_MAX_LATENCY}; + UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, + availableCharacteristicsKeys, + ARRAY_SIZE(availableCharacteristicsKeys)); + + return OK; +} + +status_t ExternalCameraDevice::initCameraControlsCharsKeys(int, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + /** + * android.sensor.info.sensitivityRange -> V4L2_CID_ISO_SENSITIVITY + * android.sensor.info.exposureTimeRange -> V4L2_CID_EXPOSURE_ABSOLUTE + * android.sensor.info.maxFrameDuration -> TBD + * android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE + * android.lens.info.hyperfocalDistance + * android.lens.info.availableFocalLengths -> not available? + */ + + // android.control + // No AE compensation support for now. + // TODO: V4L2_CID_EXPOSURE_BIAS + const int32_t controlAeCompensationRange[] = {0, 0}; + UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, + ARRAY_SIZE(controlAeCompensationRange)); + const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}}; + UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep, + ARRAY_SIZE(controlAeCompensationStep)); + + + // TODO: Check V4L2_CID_AUTO_FOCUS_*. + const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO, + ANDROID_CONTROL_AF_MODE_OFF}; + UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes, + ARRAY_SIZE(afAvailableModes)); + + // TODO: V4L2_CID_SCENE_MODE + const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; + UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1); + + // TODO: V4L2_CID_3A_LOCK + const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; + UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1); + const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; + UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1); + + // TODO: V4L2_CID_ZOOM_* + const float scalerAvailableMaxDigitalZoom[] = {1}; + UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + scalerAvailableMaxDigitalZoom, + ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); + + return OK; +} + +status_t ExternalCameraDevice::initOutputCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + initSupportedFormatsLocked(fd); + if (mSupportedFormats.empty()) { + ALOGE("%s: Init supported format list failed", __FUNCTION__); + return UNKNOWN_ERROR; + } + + std::vector streamConfigurations; + std::vector minFrameDurations; + std::vector stallDurations; + int64_t maxFrameDuration = 0; + int32_t maxFps = std::numeric_limits::min(); + int32_t minFps = std::numeric_limits::max(); + std::set framerates; + + std::array halFormats{{ + HAL_PIXEL_FORMAT_BLOB, + HAL_PIXEL_FORMAT_YCbCr_420_888, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}}; + + for (const auto& supportedFormat : mSupportedFormats) { + for (const auto& format : halFormats) { + streamConfigurations.push_back(format); + streamConfigurations.push_back(supportedFormat.width); + streamConfigurations.push_back(supportedFormat.height); + streamConfigurations.push_back( + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + + int64_t min_frame_duration = std::numeric_limits::max(); + for (const auto& frameRate : supportedFormat.frameRates) { + int64_t frame_duration = 1000000000LL / frameRate; + if (frame_duration < min_frame_duration) { + min_frame_duration = frame_duration; + } + if (frame_duration > maxFrameDuration) { + maxFrameDuration = frame_duration; + } + int32_t frameRateInt = static_cast(frameRate); + if (minFps > frameRateInt) { + minFps = frameRateInt; + } + if (maxFps < frameRateInt) { + maxFps = frameRateInt; + } + framerates.insert(frameRateInt); + } + + for (const auto& format : halFormats) { + minFrameDurations.push_back(format); + minFrameDurations.push_back(supportedFormat.width); + minFrameDurations.push_back(supportedFormat.height); + minFrameDurations.push_back(min_frame_duration); + } + + // The stall duration is 0 for non-jpeg formats. For JPEG format, stall + // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG. + // TODO: b/72261675. Maybe set this dynamically + for (const auto& format : halFormats) { + const int64_t NS_TO_SECOND = 1000000000; + int64_t stall_duration = + (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0; + stallDurations.push_back(format); + stallDurations.push_back(supportedFormat.width); + stallDurations.push_back(supportedFormat.height); + stallDurations.push_back(stall_duration); + } + } + + // The document in aeAvailableTargetFpsRanges section says the minFps should + // not be larger than 15. + // We cannot support fixed 30fps but Android requires (min, max) and + // (max, max) ranges. + // TODO: populate more, right now this does not support 30,30 if the device + // has higher than 30 fps modes + std::vector fpsRanges; + // Variable range + fpsRanges.push_back(minFps); + fpsRanges.push_back(maxFps); + // Fixed ranges + for (const auto& framerate : framerates) { + fpsRanges.push_back(framerate); + fpsRanges.push_back(framerate); + } + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), + fpsRanges.size()); + + UPDATE(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + streamConfigurations.data(), streamConfigurations.size()); + + UPDATE(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, + minFrameDurations.data(), minFrameDurations.size()); + + UPDATE(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stallDurations.data(), + stallDurations.size()); + + UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1); + + SupportedV4L2Format maximumFormat {.width = 0, .height = 0}; + for (const auto& supportedFormat : mSupportedFormats) { + if (supportedFormat.width >= maximumFormat.width && + supportedFormat.height >= maximumFormat.height) { + maximumFormat = supportedFormat; + } + } + int32_t activeArraySize[] = {0, 0, + static_cast(maximumFormat.width), + static_cast(maximumFormat.height)}; + UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, + activeArraySize, ARRAY_SIZE(activeArraySize)); + UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize, + ARRAY_SIZE(activeArraySize)); + + int32_t pixelArraySize[] = {static_cast(maximumFormat.width), + static_cast(maximumFormat.height)}; + UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize, + ARRAY_SIZE(pixelArraySize)); + return OK; +} + +#undef ARRAY_SIZE +#undef UPDATE + +void ExternalCameraDevice::getFrameRateList( + int fd, SupportedV4L2Format* format) { + format->frameRates.clear(); + + v4l2_frmivalenum frameInterval { + .pixel_format = format->fourcc, + .width = format->width, + .height = format->height, + .index = 0 + }; + + for (frameInterval.index = 0; + TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0; + ++frameInterval.index) { + if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) { + if (frameInterval.discrete.numerator != 0) { + float framerate = frameInterval.discrete.denominator / + static_cast(frameInterval.discrete.numerator); + ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f", + frameInterval.index, + frameInterval.pixel_format & 0xFF, + (frameInterval.pixel_format >> 8) & 0xFF, + (frameInterval.pixel_format >> 16) & 0xFF, + (frameInterval.pixel_format >> 24) & 0xFF, + frameInterval.width, frameInterval.height, framerate); + format->frameRates.push_back(framerate); + } + } + } + + if (format->frameRates.empty()) { + ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d", + __FUNCTION__, + frameInterval.pixel_format & 0xFF, + (frameInterval.pixel_format >> 8) & 0xFF, + (frameInterval.pixel_format >> 16) & 0xFF, + (frameInterval.pixel_format >> 24) & 0xFF, + frameInterval.width, frameInterval.height); + } +} + +void ExternalCameraDevice::initSupportedFormatsLocked(int fd) { + struct v4l2_fmtdesc fmtdesc { + .index = 0, + .type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; + int ret = 0; + while (ret == 0) { + ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)); + ALOGD("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret, + fmtdesc.pixelformat & 0xFF, + (fmtdesc.pixelformat >> 8) & 0xFF, + (fmtdesc.pixelformat >> 16) & 0xFF, + (fmtdesc.pixelformat >> 24) & 0xFF); + if (ret == 0 && !(fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) { + auto it = std::find ( + kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat); + if (it != kSupportedFourCCs.end()) { + // Found supported format + v4l2_frmsizeenum frameSize { + .index = 0, + .pixel_format = fmtdesc.pixelformat}; + for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0; + ++frameSize.index) { + if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) { + ALOGD("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index, + fmtdesc.pixelformat & 0xFF, + (fmtdesc.pixelformat >> 8) & 0xFF, + (fmtdesc.pixelformat >> 16) & 0xFF, + (fmtdesc.pixelformat >> 24) & 0xFF, + frameSize.discrete.width, frameSize.discrete.height); + // Disregard h > w formats so all aspect ratio (h/w) <= 1.0 + // This will simplify the crop/scaling logic down the road + if (frameSize.discrete.height > frameSize.discrete.width) { + continue; + } + SupportedV4L2Format format { + .width = frameSize.discrete.width, + .height = frameSize.discrete.height, + .fourcc = fmtdesc.pixelformat + }; + getFrameRateList(fd, &format); + if (!format.frameRates.empty()) { + mSupportedFormats.push_back(format); + } + } + } + } + } + fmtdesc.index++; + } +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + diff --git a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp new file mode 100644 index 0000000000..9589782c8d --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp @@ -0,0 +1,1990 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "ExtCamDevSsn@3.4" +//#define LOG_NDEBUG 0 +#include + +#include +#include "ExternalCameraDeviceSession.h" + +#include "android-base/macros.h" +#include "algorithm" +#include +#include +#include +#include + +#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. +static constexpr size_t kMetadataMsgQueueSize = 1 << 20 /* 1MB */; +const int ExternalCameraDeviceSession::kMaxProcessedStream; +const int ExternalCameraDeviceSession::kMaxStallStream; +const Size kMaxVideoSize = {1920, 1088}; // Maybe this should be programmable +const int kNumVideoBuffers = 4; // number of v4l2 buffers when streaming <= kMaxVideoSize +const int kNumStillBuffers = 2; // number of v4l2 buffers when streaming > kMaxVideoSize +const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial + // bad frames. TODO: develop a better bad frame detection + // method + +// Aspect ratio is defined as width/height here and ExternalCameraDevice +// will guarantee all supported sizes has width >= height (so aspect ratio >= 1.0) +#define ASPECT_RATIO(sz) (static_cast((sz).width) / (sz).height) +const float kMaxAspectRatio = std::numeric_limits::max(); +const float kMinAspectRatio = 1.f; + +HandleImporter ExternalCameraDeviceSession::sHandleImporter; + +bool isAspectRatioClose(float ar1, float ar2) { + const float kAspectRatioMatchThres = 0.01f; // This threshold is good enough to distinguish + // 4:3/16:9/20:9 + return (std::abs(ar1 - ar2) < kAspectRatioMatchThres); +} + +ExternalCameraDeviceSession::ExternalCameraDeviceSession( + const sp& callback, + const std::vector& supportedFormats, + const common::V1_0::helper::CameraMetadata& chars, + unique_fd v4l2Fd) : + mCallback(callback), + mCameraCharacteristics(chars), + mV4l2Fd(std::move(v4l2Fd)), + mSupportedFormats(sortFormats(supportedFormats)), + mCroppingType(initCroppingType(mSupportedFormats)), + mOutputThread(new OutputThread(this, mCroppingType)) { + mInitFail = initialize(); +} + +std::vector ExternalCameraDeviceSession::sortFormats( + const std::vector& inFmts) { + std::vector fmts = inFmts; + std::sort(fmts.begin(), fmts.end(), + [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { + if (a.width == b.width) { + return a.height < b.height; + } + return a.width < b.width; + }); + return fmts; +} + +CroppingType ExternalCameraDeviceSession::initCroppingType( + const std::vector& sortedFmts) { + const auto& maxSize = sortedFmts[sortedFmts.size() - 1]; + float maxSizeAr = ASPECT_RATIO(maxSize); + float minAr = kMinAspectRatio; + float maxAr = kMaxAspectRatio; + for (const auto& fmt : sortedFmts) { + float ar = ASPECT_RATIO(fmt); + if (ar < minAr) { + minAr = ar; + } + if (ar > maxAr) { + maxAr = ar; + } + } + + CroppingType ct = VERTICAL; + if (isAspectRatioClose(maxSizeAr, maxAr)) { + // Ex: 16:9 sensor, cropping horizontally to get to 4:3 + ct = HORIZONTAL; + } else if (isAspectRatioClose(maxSizeAr, minAr)) { + // Ex: 4:3 sensor, cropping vertically to get to 16:9 + ct = VERTICAL; + } else { + ALOGI("%s: camera maxSizeAr %f is not close to minAr %f or maxAr %f", + __FUNCTION__, maxSizeAr, minAr, maxAr); + if ((maxSizeAr - minAr) < (maxAr - maxSizeAr)) { + ct = VERTICAL; + } else { + ct = HORIZONTAL; + } + } + ALOGI("%s: camera croppingType is %d", __FUNCTION__, ct); + return ct; +} + + +bool ExternalCameraDeviceSession::initialize() { + if (mV4l2Fd.get() < 0) { + ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get()); + return true; + } + + status_t status = initDefaultRequests(); + if (status != OK) { + ALOGE("%s: init default requests failed!", __FUNCTION__); + return true; + } + + mRequestMetadataQueue = std::make_unique( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mRequestMetadataQueue->isValid()) { + ALOGE("%s: invalid request fmq", __FUNCTION__); + return true; + } + mResultMetadataQueue = std::make_shared( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mResultMetadataQueue->isValid()) { + ALOGE("%s: invalid result fmq", __FUNCTION__); + return true; + } + + // TODO: check is PRIORITY_DISPLAY enough? + mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY); + return false; +} + +Status ExternalCameraDeviceSession::initStatus() const { + Mutex::Autolock _l(mLock); + Status status = Status::OK; + if (mInitFail || mClosed) { + ALOGI("%s: sesssion initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed); + status = Status::INTERNAL_ERROR; + } + return status; +} + +ExternalCameraDeviceSession::~ExternalCameraDeviceSession() { + if (!isClosed()) { + ALOGE("ExternalCameraDeviceSession deleted before close!"); + close(); + } +} + +void ExternalCameraDeviceSession::dumpState(const native_handle_t*) { + // TODO: b/72261676 dump more runtime information +} + +Return ExternalCameraDeviceSession::constructDefaultRequestSettings( + RequestTemplate type, + ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { + CameraMetadata emptyMd; + Status status = initStatus(); + if (status != Status::OK) { + _hidl_cb(status, emptyMd); + return Void(); + } + + switch (type) { + case RequestTemplate::PREVIEW: + case RequestTemplate::STILL_CAPTURE: + case RequestTemplate::VIDEO_RECORD: + case RequestTemplate::VIDEO_SNAPSHOT: + _hidl_cb(Status::OK, mDefaultRequests[static_cast(type)]); + break; + case RequestTemplate::MANUAL: + case RequestTemplate::ZERO_SHUTTER_LAG: + // Don't support MANUAL or ZSL template + _hidl_cb(Status::ILLEGAL_ARGUMENT, emptyMd); + break; + default: + ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast(type)); + _hidl_cb(Status::ILLEGAL_ARGUMENT, emptyMd); + break; + } + return Void(); +} + +Return ExternalCameraDeviceSession::configureStreams( + const V3_2::StreamConfiguration& streams, + ICameraDeviceSession::configureStreams_cb _hidl_cb) { + V3_2::HalStreamConfiguration outStreams; + V3_3::HalStreamConfiguration outStreams_v33; + Mutex::Autolock _il(mInterfaceLock); + + Status status = configureStreams(streams, &outStreams_v33); + size_t size = outStreams_v33.streams.size(); + outStreams.streams.resize(size); + for (size_t i = 0; i < size; i++) { + outStreams.streams[i] = outStreams_v33.streams[i].v3_2; + } + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::configureStreams_3_3( + const V3_2::StreamConfiguration& streams, + ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) { + V3_3::HalStreamConfiguration outStreams; + Mutex::Autolock _il(mInterfaceLock); + + Status status = configureStreams(streams, &outStreams); + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { + V3_2::StreamConfiguration config_v32; + V3_3::HalStreamConfiguration outStreams_v33; + Mutex::Autolock _il(mInterfaceLock); + + config_v32.operationMode = requestedConfiguration.operationMode; + config_v32.streams.resize(requestedConfiguration.streams.size()); + for (size_t i = 0; i < config_v32.streams.size(); i++) { + config_v32.streams[i] = requestedConfiguration.streams[i].v3_2; + } + + // Ignore requestedConfiguration.sessionParams. External camera does not support it + Status status = configureStreams(config_v32, &outStreams_v33); + + V3_4::HalStreamConfiguration outStreams; + outStreams.streams.resize(outStreams_v33.streams.size()); + for (size_t i = 0; i < outStreams.streams.size(); i++) { + outStreams.streams[i].v3_3 = outStreams_v33.streams[i]; + } + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mRequestMetadataQueue->getDesc()); + return Void(); +} + +Return ExternalCameraDeviceSession::getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mResultMetadataQueue->getDesc()); + return Void(); +} + +Return ExternalCameraDeviceSession::processCaptureRequest( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest(requests[i]); + if (s != Status::OK) { + break; + } + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Return ExternalCameraDeviceSession::processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest(requests[i].v3_2); + if (s != Status::OK) { + break; + } + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Return ExternalCameraDeviceSession::flush() { + return Status::OK; +} + +Return ExternalCameraDeviceSession::close() { + Mutex::Autolock _il(mInterfaceLock); + Mutex::Autolock _l(mLock); + if (!mClosed) { + // TODO: b/72261676 Cleanup inflight buffers/V4L2 buffer queue + ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get()); + mV4l2Fd.reset(); + mOutputThread->requestExit(); // TODO: join? + + // free all imported buffers + for(auto& pair : mCirculatingBuffers) { + CirculatingBuffers& buffers = pair.second; + for (auto& p2 : buffers) { + sHandleImporter.freeBuffer(p2.second); + } + } + + mClosed = true; + } + return Void(); +} + +Status ExternalCameraDeviceSession::importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) { + size_t numOutputBufs = request.outputBuffers.size(); + size_t numBufs = numOutputBufs; + // Validate all I/O buffers + hidl_vec allBufs; + hidl_vec allBufIds; + allBufs.resize(numBufs); + allBufIds.resize(numBufs); + allBufPtrs.resize(numBufs); + allFences.resize(numBufs); + std::vector streamIds(numBufs); + + for (size_t i = 0; i < numOutputBufs; i++) { + allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle(); + allBufIds[i] = request.outputBuffers[i].bufferId; + allBufPtrs[i] = &allBufs[i]; + streamIds[i] = request.outputBuffers[i].streamId; + } + + for (size_t i = 0; i < numBufs; i++) { + buffer_handle_t buf = allBufs[i]; + uint64_t bufId = allBufIds[i]; + CirculatingBuffers& cbs = mCirculatingBuffers[streamIds[i]]; + if (cbs.count(bufId) == 0) { + if (buf == nullptr) { + ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); + return Status::ILLEGAL_ARGUMENT; + } + // Register a newly seen buffer + buffer_handle_t importedBuf = buf; + sHandleImporter.importBuffer(importedBuf); + if (importedBuf == nullptr) { + ALOGE("%s: output buffer %zu is invalid!", __FUNCTION__, i); + return Status::INTERNAL_ERROR; + } else { + cbs[bufId] = importedBuf; + } + } + allBufPtrs[i] = &cbs[bufId]; + } + + // All buffers are imported. Now validate output buffer acquire fences + for (size_t i = 0; i < numOutputBufs; i++) { + if (!sHandleImporter.importFence( + request.outputBuffers[i].acquireFence, allFences[i])) { + ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i); + cleanupInflightFences(allFences, i); + return Status::INTERNAL_ERROR; + } + } + return Status::OK; +} + +void ExternalCameraDeviceSession::cleanupInflightFences( + hidl_vec& allFences, size_t numFences) { + for (size_t j = 0; j < numFences; j++) { + sHandleImporter.closeFence(allFences[j]); + } +} + +Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) { + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + if (request.inputBuffer.streamId != -1) { + ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + Mutex::Autolock _l(mLock); + if (!mV4l2Streaming) { + ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + const camera_metadata_t *rawSettings = nullptr; + bool converted = true; + CameraMetadata settingsFmq; // settings from FMQ + if (request.fmqSettingsSize > 0) { + // non-blocking read; client must write metadata before calling + // processOneCaptureRequest + settingsFmq.resize(request.fmqSettingsSize); + bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize); + if (read) { + converted = V3_2::implementation::convertFromHidl(settingsFmq, &rawSettings); + } else { + ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); + converted = false; + } + } else { + converted = V3_2::implementation::convertFromHidl(request.settings, &rawSettings); + } + + if (converted && rawSettings != nullptr) { + mLatestReqSetting = rawSettings; + } + + if (!converted) { + ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && rawSettings == nullptr) { + ALOGE("%s: capture request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + hidl_vec allBufPtrs; + hidl_vec allFences; + size_t numOutputBufs = request.outputBuffers.size(); + + if (numOutputBufs == 0) { + ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + status = importRequest(request, allBufPtrs, allFences); + if (status != Status::OK) { + return status; + } + + // TODO: program fps range per capture request here + // or limit the set of availableFpsRange + + sp frameIn = dequeueV4l2FrameLocked(); + if ( frameIn == nullptr) { + ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + // TODO: This can probably be replaced by use v4lbuffer timestamp + // if the device supports it + nsecs_t shutterTs = systemTime(SYSTEM_TIME_MONOTONIC); + + + // TODO: reduce object copy in this path + HalRequest halReq = { + .frameNumber = request.frameNumber, + .setting = mLatestReqSetting, + .frameIn = frameIn, + .shutterTs = shutterTs}; + halReq.buffers.resize(numOutputBufs); + for (size_t i = 0; i < numOutputBufs; i++) { + HalStreamBuffer& halBuf = halReq.buffers[i]; + int streamId = halBuf.streamId = request.outputBuffers[i].streamId; + halBuf.bufferId = request.outputBuffers[i].bufferId; + const Stream& stream = mStreamMap[streamId]; + halBuf.width = stream.width; + halBuf.height = stream.height; + halBuf.format = stream.format; + halBuf.usage = stream.usage; + halBuf.bufPtr = allBufPtrs[i]; + halBuf.acquireFence = allFences[i]; + halBuf.fenceTimeout = false; + } + mInflightFrames.insert(halReq.frameNumber); + // Send request to OutputThread for the rest of processing + mOutputThread->submitRequest(halReq); + mFirstRequest = false; + return Status::OK; +} + +void ExternalCameraDeviceSession::notifyShutter(uint32_t frameNumber, nsecs_t shutterTs) { + NotifyMsg msg; + msg.type = MsgType::SHUTTER; + msg.msg.shutter.frameNumber = frameNumber; + msg.msg.shutter.timestamp = shutterTs; + mCallback->notify({msg}); +} + +void ExternalCameraDeviceSession::notifyError( + uint32_t frameNumber, int32_t streamId, ErrorCode ec) { + NotifyMsg msg; + msg.type = MsgType::ERROR; + msg.msg.error.frameNumber = frameNumber; + msg.msg.error.errorStreamId = streamId; + msg.msg.error.errorCode = ec; + mCallback->notify({msg}); +} + +//TODO: refactor with processCaptureResult +Status ExternalCameraDeviceSession::processCaptureRequestError(HalRequest& req) { + // Return V4L2 buffer to V4L2 buffer queue + enqueueV4l2Frame(req.frameIn); + + // NotifyShutter + notifyShutter(req.frameNumber, req.shutterTs); + + notifyError(/*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req.frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req.buffers.size()); + for (size_t i = 0; i < req.buffers.size(); i++) { + result.outputBuffers[i].streamId = req.buffers[i].streamId; + result.outputBuffers[i].bufferId = req.buffers[i].bufferId; + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req.buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req.buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/true); + } + } + + // update inflight records + { + Mutex::Autolock _l(mLock); + mInflightFrames.erase(req.frameNumber); + } + + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + return Status::OK; +} + +Status ExternalCameraDeviceSession::processCaptureResult(HalRequest& req) { + // Return V4L2 buffer to V4L2 buffer queue + enqueueV4l2Frame(req.frameIn); + + // NotifyShutter + notifyShutter(req.frameNumber, req.shutterTs); + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req.frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req.buffers.size()); + for (size_t i = 0; i < req.buffers.size(); i++) { + result.outputBuffers[i].streamId = req.buffers[i].streamId; + result.outputBuffers[i].bufferId = req.buffers[i].bufferId; + if (req.buffers[i].fenceTimeout) { + result.outputBuffers[i].status = BufferStatus::ERROR; + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req.buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/true); + notifyError(req.frameNumber, req.buffers[i].streamId, ErrorCode::ERROR_BUFFER); + } else { + result.outputBuffers[i].status = BufferStatus::OK; + // TODO: refactor + if (req.buffers[i].acquireFence > 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req.buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/true); + } + } + } + + // Fill capture result metadata + fillCaptureResult(req.setting, req.shutterTs); + const camera_metadata_t *rawResult = req.setting.getAndLock(); + V3_2::implementation::convertToHidl(rawResult, &result.result); + req.setting.unlock(rawResult); + + // update inflight records + { + Mutex::Autolock _l(mLock); + mInflightFrames.erase(req.frameNumber); + } + + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + return Status::OK; +} + +void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + const nsecs_t NS_TO_SECOND = 1000000000; + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.result.size() > 0) { + if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { + result.fmqResultSize = result.result.size(); + result.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + result.fmqResultSize = 0; + } + } else { + result.fmqResultSize = 0; + } + } + } + mCallback->processCaptureResult(results); + mProcessCaptureResultLock.unlock(); +} + +void ExternalCameraDeviceSession::freeReleaseFences(hidl_vec& results) { + for (auto& result : results) { + if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + result.inputBuffer.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + for (auto& buf : result.outputBuffers) { + if (buf.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + buf.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + } + } + return; +} + +ExternalCameraDeviceSession::OutputThread::OutputThread( + wp parent, + CroppingType ct) : mParent(parent), mCroppingType(ct) {} + +ExternalCameraDeviceSession::OutputThread::~OutputThread() {} + +uint32_t ExternalCameraDeviceSession::OutputThread::getFourCcFromLayout( + const YCbCrLayout& layout) { + intptr_t cb = reinterpret_cast(layout.cb); + intptr_t cr = reinterpret_cast(layout.cr); + if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { + // Interleaved format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_NV21; + } else { + return V4L2_PIX_FMT_NV12; + } + } else if (layout.chromaStep == 1) { + // Planar format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_YVU420; // YV12 + } else { + return V4L2_PIX_FMT_YUV420; // YU12 + } + } else { + return FLEX_YUV_GENERIC; + } +} + +int ExternalCameraDeviceSession::OutputThread::getCropRect( + CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { + if (out == nullptr) { + ALOGE("%s: out is null", __FUNCTION__); + return -1; + } + uint32_t inW = inSize.width; + uint32_t inH = inSize.height; + uint32_t outW = outSize.width; + uint32_t outH = outSize.height; + + if (ct == VERTICAL) { + uint64_t scaledOutH = static_cast(outH) * inW / outW; + if (scaledOutH > inH) { + ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 + + out->left = 0; + out->top = ((inH - scaledOutH) / 2) & ~0x1; + out->width = inW; + out->height = static_cast(scaledOutH); + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutH)); + } else { + uint64_t scaledOutW = static_cast(outW) * inH / outH; + if (scaledOutW > inW) { + ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 + + out->left = ((inW - scaledOutW) / 2) & ~0x1; + out->top = 0; + out->width = static_cast(scaledOutW); + out->height = inH; + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutW)); + } + + return 0; +} + +int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked( + sp& in, const HalStreamBuffer& halBuf, YCbCrLayout* out) { + Size inSz = {in->mWidth, in->mHeight}; + Size outSz = {halBuf.width, halBuf.height}; + int ret; + if (inSz == outSz) { + ret = in->getLayout(out); + if (ret != 0) { + ALOGE("%s: failed to get input image layout", __FUNCTION__); + return ret; + } + return ret; + } + + // Cropping to output aspect ratio + IMapper::Rect inputCrop; + ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop); + if (ret != 0) { + ALOGE("%s: failed to compute crop rect for output size %dx%d", + __FUNCTION__, outSz.width, outSz.height); + return ret; + } + + YCbCrLayout croppedLayout; + ret = in->getCroppedLayout(inputCrop, &croppedLayout); + if (ret != 0) { + ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + return ret; + } + + if ((mCroppingType == VERTICAL && inSz.width == outSz.width) || + (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) { + // No scale is needed + *out = croppedLayout; + return 0; + } + + auto it = mScaledYu12Frames.find(outSz); + sp scaledYu12Buf; + if (it != mScaledYu12Frames.end()) { + scaledYu12Buf = it->second; + } else { + it = mIntermediateBuffers.find(outSz); + if (it == mIntermediateBuffers.end()) { + ALOGE("%s: failed to find intermediate buffer size %dx%d", + __FUNCTION__, outSz.width, outSz.height); + return -1; + } + scaledYu12Buf = it->second; + } + // Scale + YCbCrLayout outLayout; + ret = scaledYu12Buf->getLayout(&outLayout); + if (ret != 0) { + ALOGE("%s: failed to get output buffer layout", __FUNCTION__); + return ret; + } + + ret = libyuv::I420Scale( + static_cast(croppedLayout.y), + croppedLayout.yStride, + static_cast(croppedLayout.cb), + croppedLayout.cStride, + static_cast(croppedLayout.cr), + croppedLayout.cStride, + inputCrop.width, + inputCrop.height, + static_cast(outLayout.y), + outLayout.yStride, + static_cast(outLayout.cb), + outLayout.cStride, + static_cast(outLayout.cr), + outLayout.cStride, + outSz.width, + outSz.height, + // TODO: b/72261744 see if we can use better filter without losing too much perf + libyuv::FilterMode::kFilterNone); + + if (ret != 0) { + ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", + __FUNCTION__, inputCrop.width, inputCrop.height, + outSz.width, outSz.height, ret); + return ret; + } + + *out = outLayout; + mScaledYu12Frames.insert({outSz, scaledYu12Buf}); + return 0; +} + +int ExternalCameraDeviceSession::OutputThread::formatConvertLocked( + const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { + int ret = 0; + switch (format) { + case V4L2_PIX_FMT_NV21: + ret = libyuv::I420ToNV21( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV21 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_NV12: + ret = libyuv::I420ToNV12( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_YVU420: // YV12 + case V4L2_PIX_FMT_YUV420: // YU12 + // TODO: maybe we can speed up here by somehow save this copy? + ret = libyuv::I420Copy( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case FLEX_YUV_GENERIC: + // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. + ALOGE("%s: unsupported flexible yuv layout" + " y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, out.y, out.cb, out.cr, + out.yStride, out.cStride, out.chromaStep); + return -1; + default: + ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); + return -1; + } + return 0; +} + +bool ExternalCameraDeviceSession::OutputThread::threadLoop() { + HalRequest req; + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return false; + } + + // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames + // regularly to prevent v4l buffer queue filled with stale buffers + // when app doesn't program a preveiw request + waitForNextRequest(&req); + if (req.frameIn == nullptr) { + // No new request, wait again + return true; + } + + if (req.frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) { + ALOGE("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, + req.frameIn->mFourcc & 0xFF, + (req.frameIn->mFourcc >> 8) & 0xFF, + (req.frameIn->mFourcc >> 16) & 0xFF, + (req.frameIn->mFourcc >> 24) & 0xFF); + parent->notifyError( + /*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + return false; + } + + std::unique_lock lk(mLock); + + // Convert input V4L2 frame to YU12 of the same size + // TODO: see if we can save some computation by converting to YV12 here + uint8_t* inData; + size_t inDataSize; + req.frameIn->map(&inData, &inDataSize); + // TODO: profile + // TODO: in some special case maybe we can decode jpg directly to gralloc output? + int res = libyuv::MJPGToI420( + inData, inDataSize, + static_cast(mYu12FrameLayout.y), + mYu12FrameLayout.yStride, + static_cast(mYu12FrameLayout.cb), + mYu12FrameLayout.cStride, + static_cast(mYu12FrameLayout.cr), + mYu12FrameLayout.cStride, + mYu12Frame->mWidth, mYu12Frame->mHeight, + mYu12Frame->mWidth, mYu12Frame->mHeight); + + if (res != 0) { + // For some webcam, the first few V4L2 frames might be malformed... + ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); + lk.unlock(); + Status st = parent->processCaptureRequestError(req); + if (st != Status::OK) { + ALOGE("%s: failed to process capture request error!", __FUNCTION__); + parent->notifyError( + /*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + return false; + } + return true; + } + + ALOGV("%s processing new request", __FUNCTION__); + const int kSyncWaitTimeoutMs = 500; + for (auto& halBuf : req.buffers) { + if (halBuf.acquireFence != -1) { + int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); + if (ret) { + halBuf.fenceTimeout = true; + } else { + ::close(halBuf.acquireFence); + } + } + + if (halBuf.fenceTimeout) { + continue; + } + + // Gralloc lockYCbCr the buffer + switch (halBuf.format) { + case PixelFormat::BLOB: + // TODO: b/72261675 implement JPEG output path + break; + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: { + IMapper::Rect outRect {0, 0, + static_cast(halBuf.width), + static_cast(halBuf.height)}; + YCbCrLayout outLayout = sHandleImporter.lockYCbCr( + *(halBuf.bufPtr), halBuf.usage, outRect); + ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, + outLayout.yStride, outLayout.cStride, outLayout.chromaStep); + + // Convert to output buffer size/format + uint32_t outputFourcc = getFourCcFromLayout(outLayout); + ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, + outputFourcc & 0xFF, + (outputFourcc >> 8) & 0xFF, + (outputFourcc >> 16) & 0xFF, + (outputFourcc >> 24) & 0xFF); + + YCbCrLayout cropAndScaled; + int ret = cropAndScaleLocked( + mYu12Frame, halBuf, &cropAndScaled); + if (ret != 0) { + ALOGE("%s: crop and scale failed!", __FUNCTION__); + lk.unlock(); + parent->notifyError( + /*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + return false; + } + + Size sz {halBuf.width, halBuf.height}; + ret = formatConvertLocked(cropAndScaled, outLayout, sz, outputFourcc); + if (ret != 0) { + ALOGE("%s: format coversion failed!", __FUNCTION__); + lk.unlock(); + parent->notifyError( + /*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + return false; + } + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence > 0) { + halBuf.acquireFence = relFence; + } + } break; + default: + ALOGE("%s: unknown output format %x", __FUNCTION__, halBuf.format); + lk.unlock(); + parent->notifyError( + /*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + return false; + } + } // for each buffer + mScaledYu12Frames.clear(); + + // Don't hold the lock while calling back to parent + lk.unlock(); + Status st = parent->processCaptureResult(req); + if (st != Status::OK) { + ALOGE("%s: failed to process capture result!", __FUNCTION__); + parent->notifyError( + /*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + return false; + } + return true; +} + +Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers( + const Size& v4lSize, const hidl_vec& streams) { + std::lock_guard lk(mLock); + if (mScaledYu12Frames.size() != 0) { + ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", + __FUNCTION__, mScaledYu12Frames.size()); + return Status::INTERNAL_ERROR; + } + + // Allocating intermediate YU12 frame + if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width || + mYu12Frame->mHeight != v4lSize.height) { + mYu12Frame.clear(); + mYu12Frame = new AllocatedFrame(v4lSize.width, v4lSize.height); + int ret = mYu12Frame->allocate(&mYu12FrameLayout); + if (ret != 0) { + ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + + // Allocating scaled buffers + for (const auto& stream : streams) { + Size sz = {stream.width, stream.height}; + if (sz == v4lSize) { + continue; // Don't need an intermediate buffer same size as v4lBuffer + } + if (mIntermediateBuffers.count(sz) == 0) { + // Create new intermediate buffer + sp buf = new AllocatedFrame(stream.width, stream.height); + int ret = buf->allocate(); + if (ret != 0) { + ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", + __FUNCTION__, stream.width, stream.height); + return Status::INTERNAL_ERROR; + } + mIntermediateBuffers[sz] = buf; + } + } + + // Remove unconfigured buffers + auto it = mIntermediateBuffers.begin(); + while (it != mIntermediateBuffers.end()) { + bool configured = false; + auto sz = it->first; + for (const auto& stream : streams) { + if (stream.width == sz.width && stream.height == sz.height) { + configured = true; + break; + } + } + if (configured) { + it++; + } else { + it = mIntermediateBuffers.erase(it); + } + } + return Status::OK; +} + +Status ExternalCameraDeviceSession::OutputThread::submitRequest(const HalRequest& req) { + std::lock_guard lk(mLock); + // TODO: reduce object copy in this path + mRequestList.push_back(req); + mRequestCond.notify_one(); + return Status::OK; +} + +void ExternalCameraDeviceSession::OutputThread::flush() { + std::lock_guard lk(mLock); + // TODO: send buffer/request errors back to framework + mRequestList.clear(); +} + +void ExternalCameraDeviceSession::OutputThread::waitForNextRequest(HalRequest* out) { + if (out == nullptr) { + ALOGE("%s: out is null", __FUNCTION__); + return; + } + + std::unique_lock lk(mLock); + while (mRequestList.empty()) { + std::chrono::seconds timeout = std::chrono::seconds(kReqWaitTimeoutSec); + auto st = mRequestCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + // no new request, return + return; + } + } + *out = mRequestList.front(); + mRequestList.pop_front(); +} + +void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) { + for (auto& pair : mCirculatingBuffers.at(id)) { + sHandleImporter.freeBuffer(pair.second); + } + mCirculatingBuffers[id].clear(); + mCirculatingBuffers.erase(id); +} + +void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec& cachesToRemove) { + Mutex::Autolock _l(mLock); + for (auto& cache : cachesToRemove) { + auto cbsIt = mCirculatingBuffers.find(cache.streamId); + if (cbsIt == mCirculatingBuffers.end()) { + // The stream could have been removed + continue; + } + CirculatingBuffers& cbs = cbsIt->second; + auto it = cbs.find(cache.bufferId); + if (it != cbs.end()) { + sHandleImporter.freeBuffer(it->second); + cbs.erase(it); + } else { + ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", + __FUNCTION__, cache.streamId, cache.bufferId); + } + } +} + +bool ExternalCameraDeviceSession::isSupported(const Stream& stream) { + int32_t ds = static_cast(stream.dataSpace); + PixelFormat fmt = stream.format; + uint32_t width = stream.width; + uint32_t height = stream.height; + // TODO: check usage flags + + if (stream.streamType != StreamType::OUTPUT) { + ALOGE("%s: does not support non-output stream type", __FUNCTION__); + return false; + } + + if (stream.rotation != StreamRotation::ROTATION_0) { + ALOGE("%s: does not support stream rotation", __FUNCTION__); + return false; + } + + if (ds & Dataspace::DEPTH) { + ALOGI("%s: does not support depth output", __FUNCTION__); + return false; + } + + switch (fmt) { + case PixelFormat::BLOB: + if (ds != static_cast(Dataspace::V0_JFIF)) { + ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds); + return false; + } + case PixelFormat::IMPLEMENTATION_DEFINED: + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: + // TODO: check what dataspace we can support here. + // intentional no-ops. + break; + default: + ALOGI("%s: does not support format %x", __FUNCTION__, fmt); + return false; + } + + // Assume we can convert any V4L2 format to any of supported output format for now, i.e, + // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format + // in the futrue. + for (const auto& v4l2Fmt : mSupportedFormats) { + if (width == v4l2Fmt.width && height == v4l2Fmt.height) { + return true; + } + } + ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height); + return false; +} + +int ExternalCameraDeviceSession::v4l2StreamOffLocked() { + if (!mV4l2Streaming) { + return OK; + } + + { + std::lock_guard lk(mV4l2BufferLock); + if (mNumDequeuedV4l2Buffers != 0) { + ALOGE("%s: there are %zu inflight V4L buffers", + __FUNCTION__, mNumDequeuedV4l2Buffers); + return -1; + } + } + mV4l2Buffers.clear(); // VIDIOC_REQBUFS will fail if FDs are not clear first + + // VIDIOC_STREAMOFF + v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) { + ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + // VIDIOC_REQBUFS: clear buffers + v4l2_requestbuffers req_buffers{}; + req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req_buffers.memory = V4L2_MEMORY_MMAP; + req_buffers.count = 0; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { + ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + mV4l2Streaming = false; + return OK; +} + +int ExternalCameraDeviceSession::configureV4l2StreamLocked(const SupportedV4L2Format& v4l2Fmt) { + int ret = v4l2StreamOffLocked(); + if (ret != OK) { + ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret); + return ret; + } + + // VIDIOC_S_FMT w/h/fmt + v4l2_format fmt; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = v4l2Fmt.width; + fmt.fmt.pix.height = v4l2Fmt.height; + fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc; + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); + if (ret < 0) { + ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height || + v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) { + ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__, + v4l2Fmt.fourcc & 0xFF, + (v4l2Fmt.fourcc >> 8) & 0xFF, + (v4l2Fmt.fourcc >> 16) & 0xFF, + (v4l2Fmt.fourcc >> 24) & 0xFF, + v4l2Fmt.width, v4l2Fmt.height, + fmt.fmt.pix.pixelformat & 0xFF, + (fmt.fmt.pix.pixelformat >> 8) & 0xFF, + (fmt.fmt.pix.pixelformat >> 16) & 0xFF, + (fmt.fmt.pix.pixelformat >> 24) & 0xFF, + fmt.fmt.pix.width, fmt.fmt.pix.height); + return -EINVAL; + } + uint32_t bufferSize = fmt.fmt.pix.sizeimage; + ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize); + + float maxFps = -1.f; + float fps = 1000.f; + const float kDefaultFps = 30.f; + // Try to pick the slowest fps that is at least 30 + for (const auto& f : v4l2Fmt.frameRates) { + if (maxFps < f) { + maxFps = f; + } + if (f >= kDefaultFps && f < fps) { + fps = f; + } + } + if (fps == 1000.f) { + fps = maxFps; + } + + // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps + v4l2_streamparm streamparm = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; + // The following line checks that the driver knows about framerate get/set. + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { + // Now check if the device is able to accept a capture framerate set. + if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { + // |frame_rate| is float, approximate by a fraction. + const int kFrameRatePrecision = 10000; + streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision; + streamparm.parm.capture.timeperframe.denominator = + (fps * kFrameRatePrecision); + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) { + ALOGE("%s: failed to set framerate to %f", __FUNCTION__, fps); + return UNKNOWN_ERROR; + } + } + } + float retFps = streamparm.parm.capture.timeperframe.denominator / + streamparm.parm.capture.timeperframe.numerator; + if (std::fabs(fps - retFps) > std::numeric_limits::epsilon()) { + ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps); + return BAD_VALUE; + } + + uint32_t v4lBufferCount = (v4l2Fmt.width <= kMaxVideoSize.width && + v4l2Fmt.height <= kMaxVideoSize.height) ? kNumVideoBuffers : kNumStillBuffers; + // VIDIOC_REQBUFS: create buffers + v4l2_requestbuffers req_buffers{}; + req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req_buffers.memory = V4L2_MEMORY_MMAP; + req_buffers.count = v4lBufferCount; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { + ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + // Driver can indeed return more buffer if it needs more to operate + if (req_buffers.count < v4lBufferCount) { + ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", + __FUNCTION__, v4lBufferCount, req_buffers.count); + return NO_MEMORY; + } + + // VIDIOC_EXPBUF: export buffers as FD + // VIDIOC_QBUF: send buffer to driver + mV4l2Buffers.resize(req_buffers.count); + for (uint32_t i = 0; i < req_buffers.count; i++) { + v4l2_exportbuffer expbuf {}; + expbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + expbuf.index = i; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_EXPBUF, &expbuf)) < 0) { + ALOGE("%s: EXPBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); + return -errno; + } + mV4l2Buffers[i].reset(expbuf.fd); + + v4l2_buffer buffer = { + .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, + .index = i, + .memory = V4L2_MEMORY_MMAP}; + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); + return -errno; + } + } + + // VIDIOC_STREAMON: start streaming + v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)) < 0) { + ALOGE("%s: VIDIOC_STREAMON failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + // Swallow first few frames after streamOn to account for bad frames from some devices + for (int i = 0; i < kBadFramesAfterStreamOn; i++) { + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { + ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno)); + return -errno; + } + } + + mV4l2StreamingFmt = v4l2Fmt; + mV4l2Streaming = true; + return OK; +} + +sp ExternalCameraDeviceSession::dequeueV4l2FrameLocked() { + sp ret = nullptr; + + { + std::unique_lock lk(mV4l2BufferLock); + if (mNumDequeuedV4l2Buffers == mV4l2Buffers.size()) { + std::chrono::seconds timeout = std::chrono::seconds(kBufferWaitTimeoutSec); + mLock.unlock(); + auto st = mV4L2BufferReturned.wait_for(lk, timeout); + mLock.lock(); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__); + return ret; + } + } + } + + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { + ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); + return ret; + } + + if (buffer.index >= mV4l2Buffers.size()) { + ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index); + return ret; + } + + if (buffer.flags & V4L2_BUF_FLAG_ERROR) { + ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags); + // TODO: try to dequeue again + } + + { + std::lock_guard lk(mV4l2BufferLock); + mNumDequeuedV4l2Buffers++; + } + return new V4L2Frame( + mV4l2StreamingFmt.width, mV4l2StreamingFmt.height, mV4l2StreamingFmt.fourcc, + buffer.index, mV4l2Buffers[buffer.index].get(), buffer.bytesused); +} + +void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp& frame) { + Mutex::Autolock _l(mLock); + frame->unmap(); + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + buffer.index = frame->mBufferIndex; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, frame->mBufferIndex, strerror(errno)); + return; + } + + { + std::lock_guard lk(mV4l2BufferLock); + mNumDequeuedV4l2Buffers--; + mV4L2BufferReturned.notify_one(); + } +} + +Status ExternalCameraDeviceSession::configureStreams( + const V3_2::StreamConfiguration& config, V3_3::HalStreamConfiguration* out) { + if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) { + ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode); + return Status::ILLEGAL_ARGUMENT; + } + + if (config.streams.size() == 0) { + ALOGE("%s: cannot configure zero stream", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + int numProcessedStream = 0; + int numStallStream = 0; + for (const auto& stream : config.streams) { + // Check if the format/width/height combo is supported + if (!isSupported(stream)) { + return Status::ILLEGAL_ARGUMENT; + } + if (stream.format == PixelFormat::BLOB) { + numStallStream++; + } else { + numProcessedStream++; + } + } + + if (numProcessedStream > kMaxProcessedStream) { + ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__, + kMaxProcessedStream, numProcessedStream); + return Status::ILLEGAL_ARGUMENT; + } + + if (numStallStream > kMaxStallStream) { + ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, + kMaxStallStream, numStallStream); + return Status::ILLEGAL_ARGUMENT; + } + + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + Mutex::Autolock _l(mLock); + if (!mInflightFrames.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!", + __FUNCTION__, mInflightFrames.size()); + return Status::INTERNAL_ERROR; + } + + // Add new streams + for (const auto& stream : config.streams) { + if (mStreamMap.count(stream.id) == 0) { + mStreamMap[stream.id] = stream; + mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{}); + } + } + + // Cleanup removed streams + for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { + int id = it->first; + bool found = false; + for (const auto& stream : config.streams) { + if (id == stream.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + cleanupBuffersLocked(id); + it = mStreamMap.erase(it); + } else { + ++it; + } + } + + // Now select a V4L2 format to produce all output streams + float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio; + uint32_t maxDim = 0; + for (const auto& stream : config.streams) { + float aspectRatio = ASPECT_RATIO(stream); + if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || + (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { + desiredAr = aspectRatio; + } + + // The dimension that's not cropped + uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height; + if (dim > maxDim) { + maxDim = dim; + } + } + // Find the smallest format that matches the desired aspect ratio and is wide/high enough + SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0}; + for (const auto& fmt : mSupportedFormats) { + uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; + if (dim >= maxDim) { + float aspectRatio = ASPECT_RATIO(fmt); + if (isAspectRatioClose(aspectRatio, desiredAr)) { + v4l2Fmt = fmt; + // since mSupportedFormats is sorted by width then height, the first matching fmt + // will be the smallest one with matching aspect ratio + break; + } + } + } + if (v4l2Fmt.width == 0) { + // Cannot find exact good aspect ratio candidate, try to find a close one + for (const auto& fmt : mSupportedFormats) { + uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; + if (dim >= maxDim) { + float aspectRatio = ASPECT_RATIO(fmt); + if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || + (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { + v4l2Fmt = fmt; + break; + } + } + } + } + + if (v4l2Fmt.width == 0) { + ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)" + , __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", + maxDim, desiredAr); + return Status::ILLEGAL_ARGUMENT; + } + + if (configureV4l2StreamLocked(v4l2Fmt) != 0) { + ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", + v4l2Fmt.fourcc & 0xFF, + (v4l2Fmt.fourcc >> 8) & 0xFF, + (v4l2Fmt.fourcc >> 16) & 0xFF, + (v4l2Fmt.fourcc >> 24) & 0xFF, + v4l2Fmt.width, v4l2Fmt.height); + return Status::INTERNAL_ERROR; + } + + Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height}; + status = mOutputThread->allocateIntermediateBuffers(v4lSize, config.streams); + if (status != Status::OK) { + ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__); + return status; + } + + out->streams.resize(config.streams.size()); + for (size_t i = 0; i < config.streams.size(); i++) { + out->streams[i].overrideDataSpace = config.streams[i].dataSpace; + out->streams[i].v3_2.id = config.streams[i].id; + // TODO: double check should we add those CAMERA flags + mStreamMap[config.streams[i].id].usage = + out->streams[i].v3_2.producerUsage = config.streams[i].usage | + BufferUsage::CPU_WRITE_OFTEN | + BufferUsage::CAMERA_OUTPUT; + out->streams[i].v3_2.consumerUsage = 0; + out->streams[i].v3_2.maxBuffers = mV4l2Buffers.size(); + + switch (config.streams[i].format) { + case PixelFormat::BLOB: + case PixelFormat::YCBCR_420_888: + // No override + out->streams[i].v3_2.overrideFormat = config.streams[i].format; + break; + case PixelFormat::IMPLEMENTATION_DEFINED: + // Override based on VIDEO or not + out->streams[i].v3_2.overrideFormat = + (config.streams[i].usage & BufferUsage::VIDEO_ENCODER) ? + PixelFormat::YCBCR_420_888 : PixelFormat::YV12; + // Save overridden formt in mStreamMap + mStreamMap[config.streams[i].id].format = out->streams[i].v3_2.overrideFormat; + break; + default: + ALOGE("%s: unsupported format %x", __FUNCTION__, config.streams[i].format); + return Status::ILLEGAL_ARGUMENT; + } + } + + mFirstRequest = true; + return Status::OK; +} + +bool ExternalCameraDeviceSession::isClosed() { + Mutex::Autolock _l(mLock); + return mClosed; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(md, tag, data, size) \ +do { \ + if ((md).update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return BAD_VALUE; \ + } \ +} while (0) + +status_t ExternalCameraDeviceSession::initDefaultRequests() { + ::android::hardware::camera::common::V1_0::helper::CameraMetadata md; + + const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; + UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); + + const int32_t exposureCompensation = 0; + UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1); + + const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1); + + const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1); + + const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; + UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1); + + const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); + + const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1); + + const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; + UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); + + const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; + UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); + + const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; + UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); + + const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; + UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1); + + const int32_t thumbnailSize[] = {240, 180}; + UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); + + const uint8_t jpegQuality = 90; + UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1); + UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); + + const int32_t jpegOrientation = 0; + UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); + + const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1); + + const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF; + UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1); + + const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1); + + const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1); + + bool support30Fps = false; + int32_t maxFps = std::numeric_limits::min(); + for (const auto& supportedFormat : mSupportedFormats) { + for (const auto& frameRate : supportedFormat.frameRates) { + int32_t framerateInt = static_cast(frameRate); + if (maxFps < framerateInt) { + maxFps = framerateInt; + } + if (framerateInt == 30) { + support30Fps = true; + break; + } + } + if (support30Fps) { + break; + } + } + int32_t defaultFramerate = support30Fps ? 30 : maxFps; + int32_t defaultFpsRange[] = {defaultFramerate, defaultFramerate}; + UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange)); + + uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1); + + const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1); + + for (int type = static_cast(RequestTemplate::PREVIEW); + type <= static_cast(RequestTemplate::VIDEO_SNAPSHOT); type++) { + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mdCopy = md; + uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + switch (type) { + case static_cast(RequestTemplate::PREVIEW): + intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + break; + case static_cast(RequestTemplate::STILL_CAPTURE): + intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + break; + case static_cast(RequestTemplate::VIDEO_RECORD): + intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; + break; + case static_cast(RequestTemplate::VIDEO_SNAPSHOT): + intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; + break; + default: + ALOGE("%s: unknown template type %d", __FUNCTION__, type); + return BAD_VALUE; + } + UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1); + + camera_metadata_t* rawMd = mdCopy.release(); + CameraMetadata hidlMd; + hidlMd.setToExternal( + (uint8_t*) rawMd, get_camera_metadata_size(rawMd)); + mDefaultRequests[type] = hidlMd; + free_camera_metadata(rawMd); + } + + return OK; +} + +status_t ExternalCameraDeviceSession::fillCaptureResult( + common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { + // android.control + // For USB camera, we don't know the AE state. Set the state to converged to + // indicate the frame should be good to use. Then apps don't have to wait the + // AE state. + const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); + + const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); + + + // TODO: b/72261912 AF should stay LOCKED until cancel is seen + bool afTrigger = false; + if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { + camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); + if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { + afTrigger = true; + } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { + afTrigger = false; + } + } + + // For USB camera, the USB camera handles everything and we don't have control + // over AF. We only simply fake the AF metadata based on the request + // received here. + uint8_t afState; + if (afTrigger) { + afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; + } else { + afState = ANDROID_CONTROL_AF_STATE_INACTIVE; + } + UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); + + // Set AWB state to converged to indicate the frame should be good to use. + const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); + + const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); + + camera_metadata_ro_entry active_array_size = + mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); + + if (active_array_size.count == 0) { + ALOGE("%s: cannot find active array size!", __FUNCTION__); + return -EINVAL; + } + + // android.scaler + const int32_t crop_region[] = { + active_array_size.data.i32[0], active_array_size.data.i32[1], + active_array_size.data.i32[2], active_array_size.data.i32[3], + }; + UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); + + // android.sensor + UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); + + // android.statistics + const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); + + const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; + UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); + + return OK; +} + +#undef ARRAY_SIZE +#undef UPDATE + +V4L2Frame::V4L2Frame( + uint32_t w, uint32_t h, uint32_t fourcc, + int bufIdx, int fd, uint32_t dataSize) : + mWidth(w), mHeight(h), mFourcc(fourcc), + mBufferIndex(bufIdx), mFd(fd), mDataSize(dataSize) {} + +int V4L2Frame::map(uint8_t** data, size_t* dataSize) { + if (data == nullptr || dataSize == nullptr) { + ALOGI("%s: V4L2 buffer map bad argument: data %p, dataSize %p", + __FUNCTION__, data, dataSize); + return -EINVAL; + } + + Mutex::Autolock _l(mLock); + if (!mMapped) { + void* addr = mmap(NULL, mDataSize, PROT_READ, MAP_SHARED, mFd, 0); + if (addr == MAP_FAILED) { + ALOGE("%s: V4L2 buffer map failed: %s", __FUNCTION__, strerror(errno)); + return -EINVAL; + } + mData = static_cast(addr); + mMapped = true; + } + *data = mData; + *dataSize = mDataSize; + ALOGV("%s: V4L map FD %d, data %p size %zu", __FUNCTION__, mFd, mData, mDataSize); + return 0; +} + +int V4L2Frame::unmap() { + Mutex::Autolock _l(mLock); + if (mMapped) { + ALOGV("%s: V4L unmap data %p size %zu", __FUNCTION__, mData, mDataSize); + if (munmap(mData, mDataSize) != 0) { + ALOGE("%s: V4L2 buffer unmap failed: %s", __FUNCTION__, strerror(errno)); + return -EINVAL; + } + mMapped = false; + } + return 0; +} + +V4L2Frame::~V4L2Frame() { + unmap(); +} + +AllocatedFrame::AllocatedFrame( + uint32_t w, uint32_t h) : + mWidth(w), mHeight(h), mFourcc(V4L2_PIX_FMT_YUV420) {}; + +AllocatedFrame::~AllocatedFrame() {} + +int AllocatedFrame::allocate(YCbCrLayout* out) { + if ((mWidth % 2) || (mHeight % 2)) { + ALOGE("%s: bad dimension %dx%d (not multiple of 2)", __FUNCTION__, mWidth, mHeight); + return -EINVAL; + } + + uint32_t dataSize = mWidth * mHeight * 3 / 2; // YUV420 + if (mData.size() != dataSize) { + mData.resize(dataSize); + } + + if (out != nullptr) { + out->y = mData.data(); + out->yStride = mWidth; + uint8_t* cbStart = mData.data() + mWidth * mHeight; + uint8_t* crStart = cbStart + mWidth * mHeight / 4; + out->cb = cbStart; + out->cr = crStart; + out->cStride = mWidth / 2; + out->chromaStep = 1; + } + return 0; +} + +int AllocatedFrame::getLayout(YCbCrLayout* out) { + IMapper::Rect noCrop = {0, 0, + static_cast(mWidth), + static_cast(mHeight)}; + return getCroppedLayout(noCrop, out); +} + +int AllocatedFrame::getCroppedLayout(const IMapper::Rect& rect, YCbCrLayout* out) { + if (out == nullptr) { + ALOGE("%s: null out", __FUNCTION__); + return -1; + } + if ((rect.left + rect.width) > static_cast(mWidth) || + (rect.top + rect.height) > static_cast(mHeight) || + (rect.left % 2) || (rect.top % 2) || (rect.width % 2) || (rect.height % 2)) { + ALOGE("%s: bad rect left %d top %d w %d h %d", __FUNCTION__, + rect.left, rect.top, rect.width, rect.height); + return -1; + } + + out->y = mData.data() + mWidth * rect.top + rect.left; + out->yStride = mWidth; + uint8_t* cbStart = mData.data() + mWidth * mHeight; + uint8_t* crStart = cbStart + mWidth * mHeight / 4; + out->cb = cbStart + mWidth * rect.top / 4 + rect.left / 2; + out->cr = crStart + mWidth * rect.top / 4 + rect.left / 2; + out->cStride = mWidth / 2; + out->chromaStep = 1; + return 0; +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h new file mode 100644 index 0000000000..404dfe00b1 --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h @@ -0,0 +1,441 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "HandleImporter.h" +#include "utils/KeyedVector.h" +#include "utils/Mutex.h" +#include "utils/Thread.h" +#include "android-base/unique_fd.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_2::ErrorCode; +using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::MsgType; +using ::android::hardware::camera::device::V3_2::NotifyMsg; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::Stream; +using ::android::hardware::camera::device::V3_4::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfigurationMode; +using ::android::hardware::camera::device::V3_2::StreamRotation; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_2::DataspaceFlags; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_4::ICameraDeviceSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; +using ::android::base::unique_fd; + +// TODO: put V4L2 related structs into separate header? +struct SupportedV4L2Format { + uint32_t width; + uint32_t height; + uint32_t fourcc; + // All supported frame rate for this w/h/fourcc combination + std::vector frameRates; +}; + +// A class provide access to a dequeued V4L2 frame buffer (mostly in MJPG format) +// Also contains necessary information to enqueue the buffer back to V4L2 buffer queue +class V4L2Frame : public virtual VirtualLightRefBase { +public: + V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize); + ~V4L2Frame() override; + const uint32_t mWidth; + const uint32_t mHeight; + const uint32_t mFourcc; + const int mBufferIndex; // for later enqueue + int map(uint8_t** data, size_t* dataSize); + int unmap(); +private: + Mutex mLock; + const int mFd; // used for mmap but doesn't claim ownership + const size_t mDataSize; + uint8_t* mData = nullptr; + bool mMapped = false; +}; + +// A RAII class representing a CPU allocated YUV frame used as intermeidate buffers +// when generating output images. +class AllocatedFrame : public virtual VirtualLightRefBase { +public: + AllocatedFrame(uint32_t w, uint32_t h); // TODO: use Size? + ~AllocatedFrame() override; + const uint32_t mWidth; + const uint32_t mHeight; + const uint32_t mFourcc; // Only support YU12 format for now + int allocate(YCbCrLayout* out = nullptr); + int getLayout(YCbCrLayout* out); + int getCroppedLayout(const IMapper::Rect&, YCbCrLayout* out); // return non-zero for bad input +private: + Mutex mLock; + std::vector mData; +}; + +struct Size { + uint32_t width; + uint32_t height; + + bool operator==(const Size& other) const { + return (width == other.width && height == other.height); + } +}; + +struct SizeHasher { + size_t operator()(const Size& sz) const { + size_t result = 1; + result = 31 * result + sz.width; + result = 31 * result + sz.height; + return result; + } +}; + +enum CroppingType { + HORIZONTAL = 0, + VERTICAL = 1 +}; + +struct ExternalCameraDeviceSession : public virtual RefBase { + + ExternalCameraDeviceSession(const sp&, + const std::vector& supportedFormats, + const common::V1_0::helper::CameraMetadata& chars, + unique_fd v4l2Fd); + virtual ~ExternalCameraDeviceSession(); + // Call by CameraDevice to dump active device states + void dumpState(const native_handle_t*); + // Caller must use this method to check if CameraDeviceSession ctor failed + bool isInitFailed() { return mInitFail; } + bool isClosed(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineSessionInterface_3_4(this); + } + + static const int kMaxProcessedStream = 2; + static const int kMaxStallStream = 1; + +protected: + + // Methods from ::android::hardware::camera::device::V3_2::ICameraDeviceSession follow + + Return constructDefaultRequestSettings( + RequestTemplate, + ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb); + + Return configureStreams( + const V3_2::StreamConfiguration&, + ICameraDeviceSession::configureStreams_cb); + + Return getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb); + + Return getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb); + + Return processCaptureRequest( + const hidl_vec&, + const hidl_vec&, + ICameraDeviceSession::processCaptureRequest_cb); + + Return flush(); + Return close(); + + Return configureStreams_3_3( + const V3_2::StreamConfiguration&, + ICameraDeviceSession::configureStreams_3_3_cb); + + Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb); + + Return processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb); + +protected: + struct HalStreamBuffer { + int32_t streamId; + uint64_t bufferId; + uint32_t width; + uint32_t height; + PixelFormat format; + V3_2::BufferUsageFlags usage; + buffer_handle_t* bufPtr; + int acquireFence; + bool fenceTimeout; + }; + + struct HalRequest { + uint32_t frameNumber; + common::V1_0::helper::CameraMetadata setting; + sp frameIn; + nsecs_t shutterTs; + std::vector buffers; + }; + + static std::vector sortFormats( + const std::vector&); + static CroppingType initCroppingType(const std::vector&); + bool initialize(); + Status initStatus() const; + status_t initDefaultRequests(); + status_t fillCaptureResult(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp); + Status configureStreams(const V3_2::StreamConfiguration&, V3_3::HalStreamConfiguration* out); + int configureV4l2StreamLocked(const SupportedV4L2Format& fmt); + int v4l2StreamOffLocked(); + + // TODO: change to unique_ptr for better tracking + sp dequeueV4l2FrameLocked(); // Called with mLock hold + void enqueueV4l2Frame(const sp&); + + // Check if input Stream is one of supported stream setting on this device + bool isSupported(const Stream&); + + // Validate and import request's output buffers and acquire fence + Status importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences); + static void cleanupInflightFences( + hidl_vec& allFences, size_t numFences); + void cleanupBuffersLocked(int id); + void updateBufferCaches(const hidl_vec& cachesToRemove); + + Status processOneCaptureRequest(const CaptureRequest& request); + + Status processCaptureResult(HalRequest&); + Status processCaptureRequestError(HalRequest&); + void notifyShutter(uint32_t frameNumber, nsecs_t shutterTs); + void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec); + void invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq); + static void freeReleaseFences(hidl_vec&); + + class OutputThread : public android::Thread { + public: + OutputThread(wp parent, CroppingType); + ~OutputThread(); + + Status allocateIntermediateBuffers( + const Size& v4lSize, const hidl_vec& streams); + Status submitRequest(const HalRequest&); + void flush(); + virtual bool threadLoop() override; + + private: + static const uint32_t FLEX_YUV_GENERIC = static_cast('F') | + static_cast('L') << 8 | static_cast('E') << 16 | + static_cast('X') << 24; + // returns FLEX_YUV_GENERIC for formats other than YV12/YU12/NV12/NV21 + static uint32_t getFourCcFromLayout(const YCbCrLayout&); + static int getCropRect( + CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out); + + static const int kReqWaitTimeoutSec = 3; + + void waitForNextRequest(HalRequest* out); + int cropAndScaleLocked( + sp& in, const HalStreamBuffer& halBuf, + YCbCrLayout* out); + + int formatConvertLocked(const YCbCrLayout& in, const YCbCrLayout& out, + Size sz, uint32_t format); + + mutable std::mutex mLock; + std::condition_variable mRequestCond; + wp mParent; + CroppingType mCroppingType; + std::list mRequestList; + // V4L2 frameIn + // (MJPG decode)-> mYu12Frame + // (Scale)-> mScaledYu12Frames + // (Format convert) -> output gralloc frames + sp mYu12Frame; + std::unordered_map, SizeHasher> mIntermediateBuffers; + std::unordered_map, SizeHasher> mScaledYu12Frames; + YCbCrLayout mYu12FrameLayout; + }; + + // Protect (most of) HIDL interface methods from synchronized-entering + mutable Mutex mInterfaceLock; + + mutable Mutex mLock; // Protect all private members except otherwise noted + const sp mCallback; + const common::V1_0::helper::CameraMetadata mCameraCharacteristics; + unique_fd mV4l2Fd; + // device is closed either + // - closed by user + // - init failed + // - camera disconnected + bool mClosed = false; + bool mInitFail = false; + bool mFirstRequest = false; + common::V1_0::helper::CameraMetadata mLatestReqSetting; + + bool mV4l2Streaming = false; + SupportedV4L2Format mV4l2StreamingFmt; + std::vector mV4l2Buffers; + + static const int kBufferWaitTimeoutSec = 3; // TODO: handle long exposure (or not allowing) + std::mutex mV4l2BufferLock; // protect the buffer count and condition below + std::condition_variable mV4L2BufferReturned; + size_t mNumDequeuedV4l2Buffers = 0; + + const std::vector mSupportedFormats; + const CroppingType mCroppingType; + sp mOutputThread; + + // Stream ID -> Camera3Stream cache + std::unordered_map mStreamMap; + std::unordered_set mInflightFrames; + + // buffers currently circulating between HAL and camera service + // key: bufferId sent via HIDL interface + // value: imported buffer_handle_t + // Buffer will be imported during processCaptureRequest and will be freed + // when the its stream is deleted or camera device session is closed + typedef std::unordered_map CirculatingBuffers; + // Stream ID -> circulating buffers map + std::map mCirculatingBuffers; + + static HandleImporter sHandleImporter; + + /* Beginning of members not changed after initialize() */ + using RequestMetadataQueue = MessageQueue; + std::unique_ptr mRequestMetadataQueue; + using ResultMetadataQueue = MessageQueue; + std::shared_ptr mResultMetadataQueue; + + // Protect against invokeProcessCaptureResultCallback() + Mutex mProcessCaptureResultLock; + + std::unordered_map mDefaultRequests; + /* End of members not changed after initialize() */ + +private: + + struct TrampolineSessionInterface_3_4 : public ICameraDeviceSession { + TrampolineSessionInterface_3_4(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h new file mode 100644 index 0000000000..606375e269 --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraMetadata.h" + +#include +#include +#include +#include "ExternalCameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_2::ICameraDevice; +using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; +using ::android::hardware::camera::common::V1_0::CameraResourceCost; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct ExternalCameraDevice : public ICameraDevice { + + // Called by external camera provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + ExternalCameraDevice(const std::string& cameraId); + ~ExternalCameraDevice(); + + // Caller must use this method to check if CameraDevice ctor failed + bool isInitFailed(); + + /* Methods from ::android::hardware::camera::device::V3_2::ICameraDevice follow. */ + // The following method can be called without opening the actual camera device + Return getResourceCost(getResourceCost_cb _hidl_cb) override; + + Return getCameraCharacteristics(getCameraCharacteristics_cb _hidl_cb) override; + + Return setTorchMode(TorchMode) override; + + // Open the device HAL and also return a default capture session + Return open(const sp&, open_cb) override; + + // Forward the dump call to the opened session, or do nothing + Return dumpState(const ::android::hardware::hidl_handle&) override; + /* End of Methods from ::android::hardware::camera::device::V3_2::ICameraDevice */ + +protected: + void getFrameRateList(int fd, SupportedV4L2Format* format); + // Init supported w/h/format/fps in mSupportedFormats. Caller still owns fd + void initSupportedFormatsLocked(int fd); + + status_t initCameraCharacteristics(); + // Init non-device dependent keys + status_t initDefaultCharsKeys(::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + // Init camera control chars keys. Caller still owns fd + status_t initCameraControlsCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + // Init camera output configuration related keys. Caller still owns fd + status_t initOutputCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + + Mutex mLock; + bool mInitFailed = false; + std::string mCameraId; + std::vector mSupportedFormats; + + wp mSession = nullptr; + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mCameraCharacteristics; +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H diff --git a/camera/provider/2.4/default/Android.bp b/camera/provider/2.4/default/Android.bp index 99c3e92810..1f46b89625 100644 --- a/camera/provider/2.4/default/Android.bp +++ b/camera/provider/2.4/default/Android.bp @@ -3,7 +3,8 @@ cc_library_shared { defaults: ["hidl_defaults"], proprietary: true, relative_install_path: "hw", - srcs: ["CameraProvider.cpp"], + srcs: ["CameraProvider.cpp", + "ExternalCameraProvider.cpp"], shared_libs: [ "libhidlbase", "libhidltransport", @@ -17,6 +18,7 @@ cc_library_shared { "camera.device@3.2-impl", "camera.device@3.3-impl", "camera.device@3.4-impl", + "camera.device@3.4-external-impl", "android.hardware.camera.provider@2.4", "android.hardware.camera.common@1.0", "android.hardware.graphics.mapper@2.0", @@ -28,6 +30,7 @@ cc_library_shared { ], header_libs: [ "camera.device@3.4-impl_headers", + "camera.device@3.4-external-impl_headers" ], static_libs: [ "android.hardware.camera.common@1.0-helper", @@ -56,3 +59,25 @@ cc_binary { "android.hardware.camera.common@1.0", ], } + +cc_binary { + name: "android.hardware.camera.provider@2.4-external-service", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["external-service.cpp"], + compile_multilib: "32", + init_rc: ["android.hardware.camera.provider@2.4-external-service.rc"], + shared_libs: [ + "libhidlbase", + "libhidltransport", + "libbinder", + "liblog", + "libutils", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.common@1.0", + ], +} diff --git a/camera/provider/2.4/default/CameraProvider.cpp b/camera/provider/2.4/default/CameraProvider.cpp index e9588a770c..2fb920cc4f 100644 --- a/camera/provider/2.4/default/CameraProvider.cpp +++ b/camera/provider/2.4/default/CameraProvider.cpp @@ -19,6 +19,7 @@ #include #include "CameraProvider.h" +#include "ExternalCameraProvider.h" #include "CameraDevice_1_0.h" #include "CameraDevice_3_3.h" #include "CameraDevice_3_4.h" @@ -36,6 +37,7 @@ namespace implementation { namespace { const char *kLegacyProviderName = "legacy/0"; +const char *kExternalProviderName = "external/0"; // "device@/legacy/" const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/legacy/(.+)"); const char *kHAL3_2 = "3.2"; @@ -571,20 +573,24 @@ Return CameraProvider::getCameraDeviceInterface_V3_x( } ICameraProvider* HIDL_FETCH_ICameraProvider(const char* name) { - if (strcmp(name, kLegacyProviderName) != 0) { - return nullptr; + if (strcmp(name, kLegacyProviderName) == 0) { + CameraProvider* provider = new CameraProvider(); + if (provider == nullptr) { + ALOGE("%s: cannot allocate camera provider!", __FUNCTION__); + return nullptr; + } + if (provider->isInitFailed()) { + ALOGE("%s: camera provider init failed!", __FUNCTION__); + delete provider; + return nullptr; + } + return provider; + } else if (strcmp(name, kExternalProviderName) == 0) { + ExternalCameraProvider* provider = new ExternalCameraProvider(); + return provider; } - CameraProvider* provider = new CameraProvider(); - if (provider == nullptr) { - ALOGE("%s: cannot allocate camera provider!", __FUNCTION__); - return nullptr; - } - if (provider->isInitFailed()) { - ALOGE("%s: camera provider init failed!", __FUNCTION__); - delete provider; - return nullptr; - } - return provider; + ALOGE("%s: unknown instance name: %s", __FUNCTION__, name); + return nullptr; } } // namespace implementation diff --git a/camera/provider/2.4/default/ExternalCameraProvider.cpp b/camera/provider/2.4/default/ExternalCameraProvider.cpp new file mode 100644 index 0000000000..bb5c3369c8 --- /dev/null +++ b/camera/provider/2.4/default/ExternalCameraProvider.cpp @@ -0,0 +1,276 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamPvdr@2.4-external" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include +#include +#include "ExternalCameraProvider.h" +#include "ExternalCameraDevice_3_4.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +namespace { +// "device@/external/" +const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/external/(.+)"); +const int kMaxDevicePathLen = 256; +const char* kDevicePath = "/dev/"; + +bool matchDeviceName(const hidl_string& deviceName, std::string* deviceVersion, + std::string* cameraId) { + std::string deviceNameStd(deviceName.c_str()); + std::smatch sm; + if (std::regex_match(deviceNameStd, sm, kDeviceNameRE)) { + if (deviceVersion != nullptr) { + *deviceVersion = sm[1]; + } + if (cameraId != nullptr) { + *cameraId = sm[2]; + } + return true; + } + return false; +} + +} // anonymous namespace + +ExternalCameraProvider::ExternalCameraProvider() : mHotPlugThread(this) { + mHotPlugThread.run("ExtCamHotPlug", PRIORITY_BACKGROUND); +} + +ExternalCameraProvider::~ExternalCameraProvider() { + mHotPlugThread.requestExit(); +} + + +Return ExternalCameraProvider::setCallback( + const sp& callback) { + Mutex::Autolock _l(mLock); + mCallbacks = callback; + return Status::OK; +} + +Return ExternalCameraProvider::getVendorTags(getVendorTags_cb _hidl_cb) { + // No vendor tag support for USB camera + hidl_vec zeroSections; + _hidl_cb(Status::OK, zeroSections); + return Void(); +} + +Return ExternalCameraProvider::getCameraIdList(getCameraIdList_cb _hidl_cb) { + std::vector deviceNameList; + for (auto const& kvPair : mCameraStatusMap) { + if (kvPair.second == CameraDeviceStatus::PRESENT) { + deviceNameList.push_back(kvPair.first); + } + } + hidl_vec hidlDeviceNameList(deviceNameList); + ALOGV("ExtCam: number of cameras is %zu", deviceNameList.size()); + _hidl_cb(Status::OK, hidlDeviceNameList); + return Void(); +} + +Return ExternalCameraProvider::isSetTorchModeSupported( + isSetTorchModeSupported_cb _hidl_cb) { + // No torch mode support for USB camera + _hidl_cb (Status::OK, false); + return Void(); +} + +Return ExternalCameraProvider::getCameraDeviceInterface_V1_x( + const hidl_string&, + getCameraDeviceInterface_V1_x_cb _hidl_cb) { + // External Camera HAL does not support HAL1 + _hidl_cb(Status::OPERATION_NOT_SUPPORTED, nullptr); + return Void(); +} + +Return ExternalCameraProvider::getCameraDeviceInterface_V3_x( + const hidl_string& cameraDeviceName, + getCameraDeviceInterface_V3_x_cb _hidl_cb) { + + std::string cameraId, deviceVersion; + bool match = matchDeviceName(cameraDeviceName, &deviceVersion, &cameraId); + if (!match) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + if (mCameraStatusMap.count(cameraDeviceName) == 0 || + mCameraStatusMap[cameraDeviceName] != CameraDeviceStatus::PRESENT) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + ALOGV("Constructing v3.4 external camera device"); + sp device; + sp deviceImpl = + new device::V3_4::implementation::ExternalCameraDevice( + cameraId); + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str()); + device = nullptr; + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + device = deviceImpl; + + _hidl_cb (Status::OK, device); + + return Void(); +} + +void ExternalCameraProvider::addExternalCamera(const char* devName) { + ALOGE("ExtCam: adding %s to External Camera HAL!", devName); + Mutex::Autolock _l(mLock); + std::string deviceName = std::string("device@3.4/external/") + devName; + mCameraStatusMap[deviceName] = CameraDeviceStatus::PRESENT; + if (mCallbacks != nullptr) { + mCallbacks->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::PRESENT); + } +} + +void ExternalCameraProvider::deviceAdded(const char* devName) { + int fd = -1; + if ((fd = ::open(devName, O_RDWR)) < 0) { + ALOGE("%s open v4l2 device %s failed:%s", __FUNCTION__, devName, strerror(errno)); + return; + } + + do { + struct v4l2_capability capability; + int ret = ioctl(fd, VIDIOC_QUERYCAP, &capability); + if (ret < 0) { + ALOGE("%s v4l2 QUERYCAP %s failed", __FUNCTION__, devName); + break; + } + + if (!(capability.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + ALOGW("%s device %s does not support VIDEO_CAPTURE", __FUNCTION__, devName); + break; + } + + addExternalCamera(devName); + } while (0); + + close(fd); + return; +} + +void ExternalCameraProvider::deviceRemoved(const char* devName) { + Mutex::Autolock _l(mLock); + std::string deviceName = std::string("device@3.4/external/") + devName; + if (mCameraStatusMap.find(deviceName) != mCameraStatusMap.end()) { + mCameraStatusMap.erase(deviceName); + if (mCallbacks != nullptr) { + mCallbacks->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::NOT_PRESENT); + } + } else { + ALOGE("%s: cannot find camera device %s", __FUNCTION__, devName); + } +} + +ExternalCameraProvider::HotplugThread::HotplugThread(ExternalCameraProvider* parent) : + Thread(/*canCallJava*/false), mParent(parent) {} + +ExternalCameraProvider::HotplugThread::~HotplugThread() {} + +bool ExternalCameraProvider::HotplugThread::threadLoop() { + // Find existing /dev/video* devices + DIR* devdir = opendir(kDevicePath); + if(devdir == 0) { + ALOGE("%s: cannot open %s! Exiting threadloop", __FUNCTION__, kDevicePath); + return false; + } + + struct dirent* de; + // This list is device dependent. TODO: b/72261897 allow setting it from setprop/device boot + std::string internalDevices = "0,1"; + while ((de = readdir(devdir)) != 0) { + // Find external v4l devices that's existing before we start watching and add them + if (!strncmp("video", de->d_name, 5)) { + // TODO: This might reject some valid devices. Ex: internal is 33 and a device named 3 + // is added. + if (internalDevices.find(de->d_name + 5) == std::string::npos) { + ALOGV("Non-internal v4l device %s found", de->d_name); + char v4l2DevicePath[kMaxDevicePathLen]; + snprintf(v4l2DevicePath, kMaxDevicePathLen, + "%s%s", kDevicePath, de->d_name); + mParent->deviceAdded(v4l2DevicePath); + } + } + } + closedir(devdir); + + // Watch new video devices + mINotifyFD = inotify_init(); + if (mINotifyFD < 0) { + ALOGE("%s: inotify init failed! Exiting threadloop", __FUNCTION__); + return true; + } + + mWd = inotify_add_watch(mINotifyFD, kDevicePath, IN_CREATE | IN_DELETE); + if (mWd < 0) { + ALOGE("%s: inotify add watch failed! Exiting threadloop", __FUNCTION__); + return true; + } + + ALOGI("%s start monitoring new V4L2 devices", __FUNCTION__); + + bool done = false; + char eventBuf[512]; + while (!done) { + int offset = 0; + int ret = read(mINotifyFD, eventBuf, sizeof(eventBuf)); + if (ret >= (int)sizeof(struct inotify_event)) { + while (offset < ret) { + struct inotify_event* event = (struct inotify_event*)&eventBuf[offset]; + if (event->wd == mWd) { + if (!strncmp("video", event->name, 5)) { + char v4l2DevicePath[kMaxDevicePathLen]; + snprintf(v4l2DevicePath, kMaxDevicePathLen, + "%s%s", kDevicePath, event->name); + if (event->mask & IN_CREATE) { + mParent->deviceAdded(v4l2DevicePath); + } + if (event->mask & IN_DELETE) { + mParent->deviceRemoved(v4l2DevicePath); + } + } + } + offset += sizeof(struct inotify_event) + event->len; + } + } + } + + return true; +} + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/provider/2.4/default/ExternalCameraProvider.h b/camera/provider/2.4/default/ExternalCameraProvider.h new file mode 100644 index 0000000000..c7ed99e8ea --- /dev/null +++ b/camera/provider/2.4/default/ExternalCameraProvider.h @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H + +#include +#include "utils/Mutex.h" +#include "utils/Thread.h" +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::VendorTagSection; +using ::android::hardware::camera::provider::V2_4::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +struct ExternalCameraProvider : public ICameraProvider { + ExternalCameraProvider(); + ~ExternalCameraProvider(); + + // Methods from ::android::hardware::camera::provider::V2_4::ICameraProvider follow. + Return setCallback(const sp& callback) override; + + Return getVendorTags(getVendorTags_cb _hidl_cb) override; + + Return getCameraIdList(getCameraIdList_cb _hidl_cb) override; + + Return isSetTorchModeSupported(isSetTorchModeSupported_cb _hidl_cb) override; + + Return getCameraDeviceInterface_V1_x( + const hidl_string&, + getCameraDeviceInterface_V1_x_cb) override; + Return getCameraDeviceInterface_V3_x( + const hidl_string&, + getCameraDeviceInterface_V3_x_cb) override; + +private: + + void addExternalCamera(const char* devName); + + void deviceAdded(const char* devName); + + void deviceRemoved(const char* devName); + + class HotplugThread : public android::Thread { + public: + HotplugThread(ExternalCameraProvider* parent); + ~HotplugThread(); + + virtual bool threadLoop() override; + + private: + ExternalCameraProvider* mParent = nullptr; + + int mINotifyFD = -1; + int mWd = -1; + } mHotPlugThread; + + Mutex mLock; + sp mCallbacks = nullptr; + std::unordered_map mCameraStatusMap; // camera id -> status +}; + + + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H diff --git a/camera/provider/2.4/default/android.hardware.camera.provider@2.4-external-service.rc b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-external-service.rc new file mode 100644 index 0000000000..acdb2007a5 --- /dev/null +++ b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-external-service.rc @@ -0,0 +1,7 @@ +service vendor.camera-provider-2-4-ext /vendor/bin/hw/android.hardware.camera.provider@2.4-external-service + class hal + user cameraserver + group audio camera input drmrpc usb + ioprio rt 4 + capabilities SYS_NICE + writepid /dev/cpuset/camera-daemon/tasks /dev/stune/top-app/tasks diff --git a/camera/provider/2.4/default/external-service.cpp b/camera/provider/2.4/default/external-service.cpp new file mode 100644 index 0000000000..f91aa596c4 --- /dev/null +++ b/camera/provider/2.4/default/external-service.cpp @@ -0,0 +1,34 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.provider@2.4-external-service" + +#include +#include + +#include + +using android::hardware::camera::provider::V2_4::ICameraProvider; +using android::hardware::defaultPassthroughServiceImplementation; + +int main() +{ + ALOGI("External camera provider service is starting."); + // The camera HAL may communicate to other vendor components via + // /dev/vndbinder + android::ProcessState::initWithDriver("/dev/vndbinder"); + return defaultPassthroughServiceImplementation("external/0", /*maxThreads*/ 6); +}