camera: Add physical camera metadata in CaptureResult

- When physical stream of a logical multi-camera is requested, HAL needs
to generate metadata for the physical cameras.
- In case no physical stream is requested for the logical multi-camera, no
capture result metadata is required for physical camera.
- Batch physical and logical metadata within one capture_result call.

Test: testLogicalCamera CTS test
Bug: 64691172
Change-Id: Id040620f3f0c350711d49341ab31ab88ecd94888
This commit is contained in:
Shuzhen Wang
2017-12-29 16:17:09 -08:00
parent 0d29cf99f7
commit 39cf8fd9fe
7 changed files with 451 additions and 52 deletions

View File

@@ -1198,26 +1198,19 @@ Return<void> CameraDeviceSession::close() {
return Void();
}
/**
* Static callback forwarding methods from HAL to instance
*/
void CameraDeviceSession::sProcessCaptureResult(
const camera3_callback_ops *cb,
const camera3_capture_result *hal_result) {
CameraDeviceSession *d =
const_cast<CameraDeviceSession*>(static_cast<const CameraDeviceSession*>(cb));
void CameraDeviceSession::constructCaptureResult(CaptureResult& result,
const camera3_capture_result *hal_result) {
uint32_t frameNumber = hal_result->frame_number;
bool hasInputBuf = (hal_result->input_buffer != nullptr);
size_t numOutputBufs = hal_result->num_output_buffers;
size_t numBufs = numOutputBufs + (hasInputBuf ? 1 : 0);
if (numBufs > 0) {
Mutex::Autolock _l(d->mInflightLock);
Mutex::Autolock _l(mInflightLock);
if (hasInputBuf) {
int streamId = static_cast<Camera3Stream*>(hal_result->input_buffer->stream)->mId;
// validate if buffer is inflight
auto key = std::make_pair(streamId, frameNumber);
if (d->mInflightBuffers.count(key) != 1) {
if (mInflightBuffers.count(key) != 1) {
ALOGE("%s: input buffer for stream %d frame %d is not inflight!",
__FUNCTION__, streamId, frameNumber);
return;
@@ -1228,7 +1221,7 @@ void CameraDeviceSession::sProcessCaptureResult(
int streamId = static_cast<Camera3Stream*>(hal_result->output_buffers[i].stream)->mId;
// validate if buffer is inflight
auto key = std::make_pair(streamId, frameNumber);
if (d->mInflightBuffers.count(key) != 1) {
if (mInflightBuffers.count(key) != 1) {
ALOGE("%s: output buffer for stream %d frame %d is not inflight!",
__FUNCTION__, streamId, frameNumber);
return;
@@ -1237,64 +1230,63 @@ void CameraDeviceSession::sProcessCaptureResult(
}
// We don't need to validate/import fences here since we will be passing them to camera service
// within the scope of this function
CaptureResult result;
result.frameNumber = frameNumber;
result.fmqResultSize = 0;
result.partialResult = hal_result->partial_result;
convertToHidl(hal_result->result, &result.result);
if (nullptr != hal_result->result) {
bool resultOverriden = false;
Mutex::Autolock _l(d->mInflightLock);
Mutex::Autolock _l(mInflightLock);
// Derive some new keys for backward compatibility
if (d->mDerivePostRawSensKey) {
if (mDerivePostRawSensKey) {
camera_metadata_ro_entry entry;
if (find_camera_metadata_ro_entry(hal_result->result,
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &entry) == 0) {
d->mInflightRawBoostPresent[frameNumber] = true;
mInflightRawBoostPresent[frameNumber] = true;
} else {
auto entry = d->mInflightRawBoostPresent.find(frameNumber);
if (d->mInflightRawBoostPresent.end() == entry) {
d->mInflightRawBoostPresent[frameNumber] = false;
auto entry = mInflightRawBoostPresent.find(frameNumber);
if (mInflightRawBoostPresent.end() == entry) {
mInflightRawBoostPresent[frameNumber] = false;
}
}
if ((hal_result->partial_result == d->mNumPartialResults)) {
if (!d->mInflightRawBoostPresent[frameNumber]) {
if ((hal_result->partial_result == mNumPartialResults)) {
if (!mInflightRawBoostPresent[frameNumber]) {
if (!resultOverriden) {
d->mOverridenResult.clear();
d->mOverridenResult.append(hal_result->result);
mOverridenResult.clear();
mOverridenResult.append(hal_result->result);
resultOverriden = true;
}
int32_t defaultBoost[1] = {100};
d->mOverridenResult.update(
mOverridenResult.update(
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
defaultBoost, 1);
}
d->mInflightRawBoostPresent.erase(frameNumber);
mInflightRawBoostPresent.erase(frameNumber);
}
}
auto entry = d->mInflightAETriggerOverrides.find(frameNumber);
if (d->mInflightAETriggerOverrides.end() != entry) {
auto entry = mInflightAETriggerOverrides.find(frameNumber);
if (mInflightAETriggerOverrides.end() != entry) {
if (!resultOverriden) {
d->mOverridenResult.clear();
d->mOverridenResult.append(hal_result->result);
mOverridenResult.clear();
mOverridenResult.append(hal_result->result);
resultOverriden = true;
}
d->overrideResultForPrecaptureCancelLocked(entry->second,
&d->mOverridenResult);
if (hal_result->partial_result == d->mNumPartialResults) {
d->mInflightAETriggerOverrides.erase(frameNumber);
overrideResultForPrecaptureCancelLocked(entry->second,
&mOverridenResult);
if (hal_result->partial_result == mNumPartialResults) {
mInflightAETriggerOverrides.erase(frameNumber);
}
}
if (resultOverriden) {
const camera_metadata_t *metaBuffer =
d->mOverridenResult.getAndLock();
mOverridenResult.getAndLock();
convertToHidl(metaBuffer, &result.result);
d->mOverridenResult.unlock(metaBuffer);
mOverridenResult.unlock(metaBuffer);
}
}
if (hasInputBuf) {
@@ -1335,24 +1327,38 @@ void CameraDeviceSession::sProcessCaptureResult(
// configure_streams right after the processCaptureResult call so we need to finish
// updating inflight queues first
if (numBufs > 0) {
Mutex::Autolock _l(d->mInflightLock);
Mutex::Autolock _l(mInflightLock);
if (hasInputBuf) {
int streamId = static_cast<Camera3Stream*>(hal_result->input_buffer->stream)->mId;
auto key = std::make_pair(streamId, frameNumber);
d->mInflightBuffers.erase(key);
mInflightBuffers.erase(key);
}
for (size_t i = 0; i < numOutputBufs; i++) {
int streamId = static_cast<Camera3Stream*>(hal_result->output_buffers[i].stream)->mId;
auto key = std::make_pair(streamId, frameNumber);
d->mInflightBuffers.erase(key);
mInflightBuffers.erase(key);
}
if (d->mInflightBuffers.empty()) {
if (mInflightBuffers.empty()) {
ALOGV("%s: inflight buffer queue is now empty!", __FUNCTION__);
}
}
}
/**
* Static callback forwarding methods from HAL to instance
*/
void CameraDeviceSession::sProcessCaptureResult(
const camera3_callback_ops *cb,
const camera3_capture_result *hal_result) {
CameraDeviceSession *d =
const_cast<CameraDeviceSession*>(static_cast<const CameraDeviceSession*>(cb));
CaptureResult result;
d->constructCaptureResult(result, hal_result);
d->mResultBatcher.processCaptureResult(result);
}

View File

@@ -190,7 +190,7 @@ protected:
void notify(NotifyMsg& msg);
void processCaptureResult(CaptureResult& result);
private:
protected:
struct InflightBatch {
// Protect access to entire struct. Acquire this lock before read/write any data or
// calling any methods. processCaptureResult and notify will compete for this lock
@@ -235,7 +235,6 @@ protected:
bool mRemoved = false;
};
static const int NOT_BATCHED = -1;
// Get the batch index and pointer to InflightBatch (nullptrt if the frame is not batched)
// Caller must acquire the InflightBatch::mLock before accessing the InflightBatch
@@ -245,6 +244,16 @@ protected:
// This method will hold ResultBatcher::mLock briefly
std::pair<int, std::shared_ptr<InflightBatch>> getBatch(uint32_t frameNumber);
static const int NOT_BATCHED = -1;
// move/push function avoids "hidl_handle& operator=(hidl_handle&)", which clones native
// handle
void moveStreamBuffer(StreamBuffer&& src, StreamBuffer& dst);
void pushStreamBuffer(StreamBuffer&& src, std::vector<StreamBuffer>& dst);
void sendBatchMetadataLocked(
std::shared_ptr<InflightBatch> batch, uint32_t lastPartialResultIdx);
// Check if the first batch in mInflightBatches is ready to be removed, and remove it if so
// This method will hold ResultBatcher::mLock briefly
void checkAndRemoveFirstBatch();
@@ -257,9 +266,7 @@ protected:
// send buffers for specified streams
void sendBatchBuffersLocked(
std::shared_ptr<InflightBatch> batch, const std::vector<int>& streams);
void sendBatchMetadataLocked(
std::shared_ptr<InflightBatch> batch, uint32_t lastPartialResultIdx);
// End of sendXXXX methods
// End of sendXXXX methods
// helper methods
void freeReleaseFences(hidl_vec<CaptureResult>&);
@@ -267,11 +274,6 @@ protected:
void processOneCaptureResult(CaptureResult& result);
void invokeProcessCaptureResultCallback(hidl_vec<CaptureResult> &results, bool tryWriteFmq);
// move/push function avoids "hidl_handle& operator=(hidl_handle&)", which clones native
// handle
void moveStreamBuffer(StreamBuffer&& src, StreamBuffer& dst);
void pushStreamBuffer(StreamBuffer&& src, std::vector<StreamBuffer>& dst);
// Protect access to mInflightBatches, mNumPartialResults and mStreamsToBatch
// processCaptureRequest, processCaptureResult, notify will compete for this lock
// Do NOT issue HIDL IPCs while holding this lock (except when HAL reports error)
@@ -325,6 +327,8 @@ protected:
static callbacks_process_capture_result_t sProcessCaptureResult;
static callbacks_notify_t sNotify;
void constructCaptureResult(CaptureResult& result,
const camera3_capture_result *hal_result);
private:
struct TrampolineSessionInterface_3_2 : public ICameraDeviceSession {

View File

@@ -8,6 +8,7 @@ hidl_interface {
},
srcs: [
"types.hal",
"ICameraDeviceCallback.hal",
"ICameraDeviceSession.hal",
],
interfaces: [
@@ -19,8 +20,10 @@ hidl_interface {
],
types: [
"CaptureRequest",
"CaptureResult",
"HalStream",
"HalStreamConfiguration",
"PhysicalCameraMetadata",
"PhysicalCameraSetting",
"RequestTemplate",
"Stream",

View File

@@ -0,0 +1,43 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.hardware.camera.device@3.4;
import @3.2::ICameraDeviceCallback;
/**
*
* Callback methods for the HAL to call into the framework.
*
* These methods are used to return metadata and image buffers for a completed
* or failed captures, and to notify the framework of asynchronous events such
* as errors.
*
* The framework must not call back into the HAL from within these callbacks,
* and these calls must not block for extended periods.
*
*/
interface ICameraDeviceCallback extends @3.2::ICameraDeviceCallback {
/**
* processCaptureResult_3_4:
*
* Identical to @3.2::ICameraDeviceCallback.processCaptureResult, except
* that it takes a list of @3.4::CaptureResult, which could contain
* physical camera metadata for logical multi-camera.
*
*/
processCaptureResult_3_4(vec<@3.4::CaptureResult> results);
};

View File

@@ -34,7 +34,23 @@ CameraDeviceSession::CameraDeviceSession(
camera3_device_t* device,
const camera_metadata_t* deviceInfo,
const sp<V3_2::ICameraDeviceCallback>& callback) :
V3_3::implementation::CameraDeviceSession(device, deviceInfo, callback) {
V3_3::implementation::CameraDeviceSession(device, deviceInfo, callback),
mResultBatcher_3_4(callback) {
mHasCallback_3_4 = false;
auto castResult = ICameraDeviceCallback::castFrom(callback);
if (castResult.isOk()) {
sp<ICameraDeviceCallback> callback3_4 = castResult;
if (callback3_4 != nullptr) {
process_capture_result = sProcessCaptureResult_3_4;
notify = sNotify_3_4;
mHasCallback_3_4 = true;
if (!mInitFail) {
mResultBatcher_3_4.setResultMetadataQueue(mResultMetadataQueue);
}
}
}
}
CameraDeviceSession::~CameraDeviceSession() {
@@ -54,6 +70,18 @@ Return<void> CameraDeviceSession::configureStreams_3_4(
Status status = initStatus();
HalStreamConfiguration outStreams;
// If callback is 3.2, make sure no physical stream is configured
if (!mHasCallback_3_4) {
for (size_t i = 0; i < requestedConfiguration.streams.size(); i++) {
if (requestedConfiguration.streams[i].physicalCameraId.size() > 0) {
ALOGE("%s: trying to configureStreams with physical camera id with V3.2 callback",
__FUNCTION__);
_hidl_cb(Status::INTERNAL_ERROR, outStreams);
return Void();
}
}
}
// hold the inflight lock for entire configureStreams scope since there must not be any
// inflight request/results during stream configuration.
Mutex::Autolock _l(mInflightLock);
@@ -205,7 +233,7 @@ void CameraDeviceSession::postProcessConfigurationLocked_3_4(
mVideoStreamIds.push_back(stream.v3_2.id);
}
}
mResultBatcher.setBatchedStreams(mVideoStreamIds);
mResultBatcher_3_4.setBatchedStreams(mVideoStreamIds);
}
Return<void> CameraDeviceSession::processCaptureRequest_3_4(
@@ -224,7 +252,7 @@ Return<void> CameraDeviceSession::processCaptureRequest_3_4(
}
if (s == Status::OK && requests.size() > 1) {
mResultBatcher.registerBatch(requests[0].v3_2.frameNumber, requests.size());
mResultBatcher_3_4.registerBatch(requests[0].v3_2.frameNumber, requests.size());
}
_hidl_cb(s, numRequestProcessed);
@@ -237,6 +265,14 @@ Status CameraDeviceSession::processOneCaptureRequest_3_4(const V3_4::CaptureRequ
ALOGE("%s: camera init failed or disconnected", __FUNCTION__);
return status;
}
// If callback is 3.2, make sure there are no physical settings.
if (!mHasCallback_3_4) {
if (request.physicalCameraSettings.size() > 0) {
ALOGE("%s: trying to call processCaptureRequest_3_4 with physical camera id "
"and V3.2 callback", __FUNCTION__);
return Status::INTERNAL_ERROR;
}
}
camera3_capture_request_t halRequest;
halRequest.frame_number = request.v3_2.frameNumber;
@@ -407,6 +443,228 @@ Status CameraDeviceSession::processOneCaptureRequest_3_4(const V3_4::CaptureRequ
return Status::OK;
}
/**
* Static callback forwarding methods from HAL to instance
*/
void CameraDeviceSession::sProcessCaptureResult_3_4(
const camera3_callback_ops *cb,
const camera3_capture_result *hal_result) {
CameraDeviceSession *d =
const_cast<CameraDeviceSession*>(static_cast<const CameraDeviceSession*>(cb));
CaptureResult result;
d->constructCaptureResult(result.v3_2, hal_result);
result.physicalCameraMetadata.resize(hal_result->num_physcam_metadata);
for (uint32_t i = 0; i < hal_result->num_physcam_metadata; i++) {
std::string physicalId = hal_result->physcam_ids[i];
V3_2::CameraMetadata physicalMetadata;
V3_2::implementation::convertToHidl(hal_result->physcam_metadata[i], &physicalMetadata);
PhysicalCameraMetadata physicalCameraMetadata = {
.fmqMetadataSize = 0,
.physicalCameraId = physicalId,
.metadata = physicalMetadata };
result.physicalCameraMetadata[i] = physicalCameraMetadata;
}
d->mResultBatcher_3_4.processCaptureResult_3_4(result);
}
void CameraDeviceSession::sNotify_3_4(
const camera3_callback_ops *cb,
const camera3_notify_msg *msg) {
CameraDeviceSession *d =
const_cast<CameraDeviceSession*>(static_cast<const CameraDeviceSession*>(cb));
V3_2::NotifyMsg hidlMsg;
V3_2::implementation::convertToHidl(msg, &hidlMsg);
if (hidlMsg.type == (V3_2::MsgType) CAMERA3_MSG_ERROR &&
hidlMsg.msg.error.errorStreamId != -1) {
if (d->mStreamMap.count(hidlMsg.msg.error.errorStreamId) != 1) {
ALOGE("%s: unknown stream ID %d reports an error!",
__FUNCTION__, hidlMsg.msg.error.errorStreamId);
return;
}
}
if (static_cast<camera3_msg_type_t>(hidlMsg.type) == CAMERA3_MSG_ERROR) {
switch (hidlMsg.msg.error.errorCode) {
case V3_2::ErrorCode::ERROR_DEVICE:
case V3_2::ErrorCode::ERROR_REQUEST:
case V3_2::ErrorCode::ERROR_RESULT: {
Mutex::Autolock _l(d->mInflightLock);
auto entry = d->mInflightAETriggerOverrides.find(
hidlMsg.msg.error.frameNumber);
if (d->mInflightAETriggerOverrides.end() != entry) {
d->mInflightAETriggerOverrides.erase(
hidlMsg.msg.error.frameNumber);
}
auto boostEntry = d->mInflightRawBoostPresent.find(
hidlMsg.msg.error.frameNumber);
if (d->mInflightRawBoostPresent.end() != boostEntry) {
d->mInflightRawBoostPresent.erase(
hidlMsg.msg.error.frameNumber);
}
}
break;
case V3_2::ErrorCode::ERROR_BUFFER:
default:
break;
}
}
d->mResultBatcher_3_4.notify(hidlMsg);
}
CameraDeviceSession::ResultBatcher_3_4::ResultBatcher_3_4(
const sp<V3_2::ICameraDeviceCallback>& callback) :
V3_3::implementation::CameraDeviceSession::ResultBatcher(callback) {
auto castResult = ICameraDeviceCallback::castFrom(callback);
if (castResult.isOk()) {
mCallback_3_4 = castResult;
}
}
void CameraDeviceSession::ResultBatcher_3_4::processCaptureResult_3_4(CaptureResult& result) {
auto pair = getBatch(result.v3_2.frameNumber);
int batchIdx = pair.first;
if (batchIdx == NOT_BATCHED) {
processOneCaptureResult_3_4(result);
return;
}
std::shared_ptr<InflightBatch> batch = pair.second;
{
Mutex::Autolock _l(batch->mLock);
// Check if the batch is removed (mostly by notify error) before lock was acquired
if (batch->mRemoved) {
// Fall back to non-batch path
processOneCaptureResult_3_4(result);
return;
}
// queue metadata
if (result.v3_2.result.size() != 0) {
// Save a copy of metadata
batch->mResultMds[result.v3_2.partialResult].mMds.push_back(
std::make_pair(result.v3_2.frameNumber, result.v3_2.result));
}
// queue buffer
std::vector<int> filledStreams;
std::vector<V3_2::StreamBuffer> nonBatchedBuffers;
for (auto& buffer : result.v3_2.outputBuffers) {
auto it = batch->mBatchBufs.find(buffer.streamId);
if (it != batch->mBatchBufs.end()) {
InflightBatch::BufferBatch& bb = it->second;
pushStreamBuffer(std::move(buffer), bb.mBuffers);
filledStreams.push_back(buffer.streamId);
} else {
pushStreamBuffer(std::move(buffer), nonBatchedBuffers);
}
}
// send non-batched buffers up
if (nonBatchedBuffers.size() > 0 || result.v3_2.inputBuffer.streamId != -1) {
CaptureResult nonBatchedResult;
nonBatchedResult.v3_2.frameNumber = result.v3_2.frameNumber;
nonBatchedResult.v3_2.fmqResultSize = 0;
nonBatchedResult.v3_2.outputBuffers.resize(nonBatchedBuffers.size());
for (size_t i = 0; i < nonBatchedBuffers.size(); i++) {
moveStreamBuffer(
std::move(nonBatchedBuffers[i]), nonBatchedResult.v3_2.outputBuffers[i]);
}
moveStreamBuffer(std::move(result.v3_2.inputBuffer), nonBatchedResult.v3_2.inputBuffer);
nonBatchedResult.v3_2.partialResult = 0; // 0 for buffer only results
processOneCaptureResult_3_4(nonBatchedResult);
}
if (result.v3_2.frameNumber == batch->mLastFrame) {
// Send data up
if (result.v3_2.partialResult > 0) {
sendBatchMetadataLocked(batch, result.v3_2.partialResult);
}
// send buffer up
if (filledStreams.size() > 0) {
sendBatchBuffersLocked(batch, filledStreams);
}
}
} // end of batch lock scope
// see if the batch is complete
if (result.v3_2.frameNumber == batch->mLastFrame) {
checkAndRemoveFirstBatch();
}
}
void CameraDeviceSession::ResultBatcher_3_4::processOneCaptureResult_3_4(CaptureResult& result) {
hidl_vec<CaptureResult> results;
results.resize(1);
results[0] = std::move(result);
invokeProcessCaptureResultCallback_3_4(results, /* tryWriteFmq */true);
freeReleaseFences_3_4(results);
return;
}
void CameraDeviceSession::ResultBatcher_3_4::invokeProcessCaptureResultCallback_3_4(
hidl_vec<CaptureResult> &results, bool tryWriteFmq) {
if (mProcessCaptureResultLock.tryLock() != OK) {
ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__);
if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) {
ALOGE("%s: cannot acquire lock in 1s, cannot proceed",
__FUNCTION__);
return;
}
}
if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) {
for (CaptureResult &result : results) {
if (result.v3_2.result.size() > 0) {
if (mResultMetadataQueue->write(result.v3_2.result.data(),
result.v3_2.result.size())) {
result.v3_2.fmqResultSize = result.v3_2.result.size();
result.v3_2.result.resize(0);
} else {
ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
result.v3_2.fmqResultSize = 0;
}
}
for (auto& onePhysMetadata : result.physicalCameraMetadata) {
if (mResultMetadataQueue->write(onePhysMetadata.metadata.data(),
onePhysMetadata.metadata.size())) {
onePhysMetadata.fmqMetadataSize = onePhysMetadata.metadata.size();
onePhysMetadata.metadata.resize(0);
} else {
ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__);
onePhysMetadata.fmqMetadataSize = 0;
}
}
}
}
mCallback_3_4->processCaptureResult_3_4(results);
mProcessCaptureResultLock.unlock();
}
void CameraDeviceSession::ResultBatcher_3_4::freeReleaseFences_3_4(hidl_vec<CaptureResult>& results) {
for (auto& result : results) {
if (result.v3_2.inputBuffer.releaseFence.getNativeHandle() != nullptr) {
native_handle_t* handle = const_cast<native_handle_t*>(
result.v3_2.inputBuffer.releaseFence.getNativeHandle());
native_handle_close(handle);
native_handle_delete(handle);
}
for (auto& buf : result.v3_2.outputBuffers) {
if (buf.releaseFence.getNativeHandle() != nullptr) {
native_handle_t* handle = const_cast<native_handle_t*>(
buf.releaseFence.getNativeHandle());
native_handle_close(handle);
native_handle_delete(handle);
}
}
}
return;
}
} // namespace implementation
} // namespace V3_4
} // namespace device

View File

@@ -19,6 +19,7 @@
#include <android/hardware/camera/device/3.2/ICameraDevice.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceSession.h>
#include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
#include <../../3.3/default/CameraDeviceSession.h>
#include <../../3.3/default/include/convert.h>
#include <fmq/MessageQueue.h>
@@ -46,6 +47,7 @@ using ::android::hardware::camera::device::V3_2::StreamType;
using ::android::hardware::camera::device::V3_4::StreamConfiguration;
using ::android::hardware::camera::device::V3_4::HalStreamConfiguration;
using ::android::hardware::camera::device::V3_4::ICameraDeviceSession;
using ::android::hardware::camera::device::V3_4::ICameraDeviceCallback;
using ::android::hardware::camera::common::V1_0::Status;
using ::android::hardware::camera::common::V1_0::helper::HandleImporter;
using ::android::hardware::kSynchronizedReadWrite;
@@ -94,6 +96,25 @@ protected:
Status processOneCaptureRequest_3_4(const V3_4::CaptureRequest& request);
std::map<int, std::string> mPhysicalCameraIdMap;
static V3_2::implementation::callbacks_process_capture_result_t sProcessCaptureResult_3_4;
static V3_2::implementation::callbacks_notify_t sNotify_3_4;
class ResultBatcher_3_4 : public V3_3::implementation::CameraDeviceSession::ResultBatcher {
public:
ResultBatcher_3_4(const sp<V3_2::ICameraDeviceCallback>& callback);
void processCaptureResult_3_4(CaptureResult& result);
private:
void freeReleaseFences_3_4(hidl_vec<CaptureResult>&);
void processOneCaptureResult_3_4(CaptureResult& result);
void invokeProcessCaptureResultCallback_3_4(hidl_vec<CaptureResult> &results,
bool tryWriteFmq);
sp<ICameraDeviceCallback> mCallback_3_4;
} mResultBatcher_3_4;
// Whether this camera device session is created with version 3.4 callback.
bool mHasCallback_3_4;
private:
struct TrampolineSessionInterface_3_4 : public ICameraDeviceSession {

View File

@@ -22,6 +22,7 @@ import @3.2::Stream;
import @3.3::HalStream;
import @3.2::CameraMetadata;
import @3.2::CaptureRequest;
import @3.2::CaptureResult;
/**
* Stream:
@@ -226,3 +227,66 @@ struct CaptureRequest {
*/
vec<PhysicalCameraSetting> physicalCameraSettings;
};
/**
* PhysicalCameraMetadata:
*
* Individual camera metadata for a physical camera as part of a logical
* multi-camera. Camera HAL should return one such metadata for each physical
* camera being requested on.
*/
struct PhysicalCameraMetadata {
/**
* If non-zero, read metadata from result metadata queue instead
* (see ICameraDeviceSession.getCaptureResultMetadataQueue).
* If zero, read metadata from .metadata field.
*/
uint64_t fmqMetadataSize;
/**
* Contains the physical device camera id. As long as the corresponding
* processCaptureRequest requests on a particular physical camera stream,
* the metadata for that physical camera should be generated for the capture
* result. */
string physicalCameraId;
/**
* If fmqMetadataSize is zero, the metadata buffer contains the metadata
* for the physical device with physicalCameraId.
*
* The v3_2 CaptureResult metadata is read first from the FMQ, followed by
* the physical cameras' metadata starting from index 0.
*/
CameraMetadata metadata;
};
/**
* CaptureResult:
*
* Identical to @3.2::CaptureResult, except that it contains a list of
* physical camera metadata.
*
* Physical camera metadata needs to be generated if and only if a
* request is pending on a stream from that physical camera. For example,
* if the processCaptureRequest call doesn't request on physical camera
* streams, the physicalCameraMetadata field of the CaptureResult being returned
* should be an 0-size vector. If the processCaptureRequest call requests on
* streams from one of the physical camera, the physicalCameraMetadata field
* should contain one metadata describing the capture from that physical camera.
*
* For a CaptureResult that contains physical camera metadata, its
* partialResult field must be android.request.partialResultCount. In other
* words, the physicalCameraMetadata must only be contained in a final capture
* result.
*/
struct CaptureResult {
/**
* The definition of CaptureResult from the prior version.
*/
@3.2::CaptureResult v3_2;
/**
* The physical metadata for logical multi-camera.
*/
vec<PhysicalCameraMetadata> physicalCameraMetadata;
};