diff --git a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp index 9ff0d74687..5f8674219c 100644 --- a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp +++ b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp @@ -81,8 +81,6 @@ bool tryLock(std::mutex& mutex) return locked; } -buffer_handle_t sEmptyBuffer = nullptr; - } // Anonymous namespace // Static instances @@ -119,8 +117,8 @@ bool ExternalCameraDeviceSession::initialize() { std::string make, model; if (ret < 0) { ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__); - make = "Generic UVC webcam"; - model = "Generic UVC webcam"; + mExifMake = "Generic UVC webcam"; + mExifModel = "Generic UVC webcam"; } else { // capability.card is UTF-8 encoded char card[32]; @@ -134,11 +132,11 @@ bool ExternalCameraDeviceSession::initialize() { } } if (j == 0 || card[j - 1] != '\0') { - make = "Generic UVC webcam"; - model = "Generic UVC webcam"; + mExifMake = "Generic UVC webcam"; + mExifModel = "Generic UVC webcam"; } else { - make = card; - model = card; + mExifMake = card; + mExifModel = card; } } @@ -147,7 +145,7 @@ bool ExternalCameraDeviceSession::initialize() { ALOGE("%s: init OutputThread failed!", __FUNCTION__); return true; } - mOutputThread->setExifMakeModel(make, model); + mOutputThread->setExifMakeModel(mExifMake, mExifModel); status_t status = initDefaultRequests(); if (status != OK) { @@ -161,7 +159,7 @@ bool ExternalCameraDeviceSession::initialize() { ALOGE("%s: invalid request fmq", __FUNCTION__); return true; } - mResultMetadataQueue = std::make_shared( + mResultMetadataQueue = std::make_shared( kMetadataMsgQueueSize, false /* non blocking */); if (!mResultMetadataQueue->isValid()) { ALOGE("%s: invalid result fmq", __FUNCTION__); @@ -183,7 +181,7 @@ bool ExternalCameraDeviceSession::isInitFailed() { } void ExternalCameraDeviceSession::initOutputThread() { - mOutputThread = new OutputThread(this, mCroppingType); + mOutputThread = new OutputThread(this, mCroppingType, mCameraCharacteristics); } void ExternalCameraDeviceSession::closeOutputThread() { @@ -518,35 +516,9 @@ Status ExternalCameraDeviceSession::importBufferLocked(int32_t streamId, uint64_t bufId, buffer_handle_t buf, /*out*/buffer_handle_t** outBufPtr, bool allowEmptyBuf) { - - if (buf == nullptr && bufId == BUFFER_ID_NO_BUFFER) { - if (allowEmptyBuf) { - *outBufPtr = &sEmptyBuffer; - return Status::OK; - } else { - ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); - return Status::ILLEGAL_ARGUMENT; - } - } - - CirculatingBuffers& cbs = mCirculatingBuffers[streamId]; - if (cbs.count(bufId) == 0) { - if (buf == nullptr) { - ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); - return Status::ILLEGAL_ARGUMENT; - } - // Register a newly seen buffer - buffer_handle_t importedBuf = buf; - sHandleImporter.importBuffer(importedBuf); - if (importedBuf == nullptr) { - ALOGE("%s: output buffer for stream %d is invalid!", __FUNCTION__, streamId); - return Status::INTERNAL_ERROR; - } else { - cbs[bufId] = importedBuf; - } - } - *outBufPtr = &cbs[bufId]; - return Status::OK; + return importBufferImpl( + mCirculatingBuffers, sHandleImporter, streamId, + bufId, buf, outBufPtr, allowEmptyBuf); } Status ExternalCameraDeviceSession::importRequestLockedImpl( @@ -791,15 +763,32 @@ void ExternalCameraDeviceSession::notifyError( //TODO: refactor with processCaptureResult Status ExternalCameraDeviceSession::processCaptureRequestError( - const std::shared_ptr& req) { + const std::shared_ptr& req, + /*out*/std::vector* outMsgs, + /*out*/std::vector* outResults) { ATRACE_CALL(); // Return V4L2 buffer to V4L2 buffer queue - enqueueV4l2Frame(req->frameIn); + sp v4l2Frame = + static_cast(req->frameIn.get()); + enqueueV4l2Frame(v4l2Frame); - // NotifyShutter - notifyShutter(req->frameNumber, req->shutterTs); + if (outMsgs == nullptr) { + notifyShutter(req->frameNumber, req->shutterTs); + notifyError(/*frameNum*/req->frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); + } else { + NotifyMsg shutter; + shutter.type = MsgType::SHUTTER; + shutter.msg.shutter.frameNumber = req->frameNumber; + shutter.msg.shutter.timestamp = req->shutterTs; - notifyError(/*frameNum*/req->frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); + NotifyMsg error; + error.type = MsgType::ERROR; + error.msg.error.frameNumber = req->frameNumber; + error.msg.error.errorStreamId = -1; + error.msg.error.errorCode = ErrorCode::ERROR_REQUEST; + outMsgs->push_back(shutter); + outMsgs->push_back(error); + } // Fill output buffers hidl_vec results; @@ -826,16 +815,22 @@ Status ExternalCameraDeviceSession::processCaptureRequestError( mInflightFrames.erase(req->frameNumber); } - // Callback into framework - invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); - freeReleaseFences(results); + if (outResults == nullptr) { + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + } else { + outResults->push_back(result); + } return Status::OK; } Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr& req) { ATRACE_CALL(); // Return V4L2 buffer to V4L2 buffer queue - enqueueV4l2Frame(req->frameIn); + sp v4l2Frame = + static_cast(req->frameIn.get()); + enqueueV4l2Frame(v4l2Frame); // NotifyShutter notifyShutter(req->frameNumber, req->shutterTs); @@ -923,29 +918,10 @@ void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback( mProcessCaptureResultLock.unlock(); } -void ExternalCameraDeviceSession::freeReleaseFences(hidl_vec& results) { - for (auto& result : results) { - if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { - native_handle_t* handle = const_cast( - result.inputBuffer.releaseFence.getNativeHandle()); - native_handle_close(handle); - native_handle_delete(handle); - } - for (auto& buf : result.outputBuffers) { - if (buf.releaseFence.getNativeHandle() != nullptr) { - native_handle_t* handle = const_cast( - buf.releaseFence.getNativeHandle()); - native_handle_close(handle); - native_handle_delete(handle); - } - } - } - return; -} - ExternalCameraDeviceSession::OutputThread::OutputThread( - wp parent, - CroppingType ct) : mParent(parent), mCroppingType(ct) {} + wp parent, CroppingType ct, + const common::V1_0::helper::CameraMetadata& chars) : + mParent(parent), mCroppingType(ct), mCameraCharacteristics(chars) {} ExternalCameraDeviceSession::OutputThread::~OutputThread() {} @@ -955,88 +931,6 @@ void ExternalCameraDeviceSession::OutputThread::setExifMakeModel( mExifModel = model; } -uint32_t ExternalCameraDeviceSession::OutputThread::getFourCcFromLayout( - const YCbCrLayout& layout) { - intptr_t cb = reinterpret_cast(layout.cb); - intptr_t cr = reinterpret_cast(layout.cr); - if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { - // Interleaved format - if (layout.cb > layout.cr) { - return V4L2_PIX_FMT_NV21; - } else { - return V4L2_PIX_FMT_NV12; - } - } else if (layout.chromaStep == 1) { - // Planar format - if (layout.cb > layout.cr) { - return V4L2_PIX_FMT_YVU420; // YV12 - } else { - return V4L2_PIX_FMT_YUV420; // YU12 - } - } else { - return FLEX_YUV_GENERIC; - } -} - -int ExternalCameraDeviceSession::OutputThread::getCropRect( - CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { - if (out == nullptr) { - ALOGE("%s: out is null", __FUNCTION__); - return -1; - } - - uint32_t inW = inSize.width; - uint32_t inH = inSize.height; - uint32_t outW = outSize.width; - uint32_t outH = outSize.height; - - // Handle special case where aspect ratio is close to input but scaled - // dimension is slightly larger than input - float arIn = ASPECT_RATIO(inSize); - float arOut = ASPECT_RATIO(outSize); - if (isAspectRatioClose(arIn, arOut)) { - out->left = 0; - out->top = 0; - out->width = inW; - out->height = inH; - return 0; - } - - if (ct == VERTICAL) { - uint64_t scaledOutH = static_cast(outH) * inW / outW; - if (scaledOutH > inH) { - ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", - __FUNCTION__, outW, outH, inW, inH); - return -1; - } - scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 - - out->left = 0; - out->top = ((inH - scaledOutH) / 2) & ~0x1; - out->width = inW; - out->height = static_cast(scaledOutH); - ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", - __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutH)); - } else { - uint64_t scaledOutW = static_cast(outW) * inH / outH; - if (scaledOutW > inW) { - ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", - __FUNCTION__, outW, outH, inW, inH); - return -1; - } - scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 - - out->left = ((inW - scaledOutW) / 2) & ~0x1; - out->top = 0; - out->width = static_cast(scaledOutW); - out->height = inH; - ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", - __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutW)); - } - - return 0; -} - int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked( sp& in, const Size& outSz, YCbCrLayout* out) { Size inSz = {in->mWidth, in->mHeight}; @@ -1274,265 +1168,6 @@ int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked( return 0; } -int ExternalCameraDeviceSession::OutputThread::formatConvertLocked( - const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { - int ret = 0; - switch (format) { - case V4L2_PIX_FMT_NV21: - ret = libyuv::I420ToNV21( - static_cast(in.y), - in.yStride, - static_cast(in.cb), - in.cStride, - static_cast(in.cr), - in.cStride, - static_cast(out.y), - out.yStride, - static_cast(out.cr), - out.cStride, - sz.width, - sz.height); - if (ret != 0) { - ALOGE("%s: convert to NV21 buffer failed! ret %d", - __FUNCTION__, ret); - return ret; - } - break; - case V4L2_PIX_FMT_NV12: - ret = libyuv::I420ToNV12( - static_cast(in.y), - in.yStride, - static_cast(in.cb), - in.cStride, - static_cast(in.cr), - in.cStride, - static_cast(out.y), - out.yStride, - static_cast(out.cb), - out.cStride, - sz.width, - sz.height); - if (ret != 0) { - ALOGE("%s: convert to NV12 buffer failed! ret %d", - __FUNCTION__, ret); - return ret; - } - break; - case V4L2_PIX_FMT_YVU420: // YV12 - case V4L2_PIX_FMT_YUV420: // YU12 - // TODO: maybe we can speed up here by somehow save this copy? - ret = libyuv::I420Copy( - static_cast(in.y), - in.yStride, - static_cast(in.cb), - in.cStride, - static_cast(in.cr), - in.cStride, - static_cast(out.y), - out.yStride, - static_cast(out.cb), - out.cStride, - static_cast(out.cr), - out.cStride, - sz.width, - sz.height); - if (ret != 0) { - ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", - __FUNCTION__, ret); - return ret; - } - break; - case FLEX_YUV_GENERIC: - // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. - ALOGE("%s: unsupported flexible yuv layout" - " y %p cb %p cr %p y_str %d c_str %d c_step %d", - __FUNCTION__, out.y, out.cb, out.cr, - out.yStride, out.cStride, out.chromaStep); - return -1; - default: - ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); - return -1; - } - return 0; -} - -int ExternalCameraDeviceSession::OutputThread::encodeJpegYU12( - const Size & inSz, const YCbCrLayout& inLayout, - int jpegQuality, const void *app1Buffer, size_t app1Size, - void *out, const size_t maxOutSize, size_t &actualCodeSize) -{ - /* libjpeg is a C library so we use C-style "inheritance" by - * putting libjpeg's jpeg_destination_mgr first in our custom - * struct. This allows us to cast jpeg_destination_mgr* to - * CustomJpegDestMgr* when we get it passed to us in a callback */ - struct CustomJpegDestMgr { - struct jpeg_destination_mgr mgr; - JOCTET *mBuffer; - size_t mBufferSize; - size_t mEncodedSize; - bool mSuccess; - } dmgr; - - jpeg_compress_struct cinfo = {}; - jpeg_error_mgr jerr; - - /* Initialize error handling with standard callbacks, but - * then override output_message (to print to ALOG) and - * error_exit to set a flag and print a message instead - * of killing the whole process */ - cinfo.err = jpeg_std_error(&jerr); - - cinfo.err->output_message = [](j_common_ptr cinfo) { - char buffer[JMSG_LENGTH_MAX]; - - /* Create the message */ - (*cinfo->err->format_message)(cinfo, buffer); - ALOGE("libjpeg error: %s", buffer); - }; - cinfo.err->error_exit = [](j_common_ptr cinfo) { - (*cinfo->err->output_message)(cinfo); - if(cinfo->client_data) { - auto & dmgr = - *reinterpret_cast(cinfo->client_data); - dmgr.mSuccess = false; - } - }; - /* Now that we initialized some callbacks, let's create our compressor */ - jpeg_create_compress(&cinfo); - - /* Initialize our destination manager */ - dmgr.mBuffer = static_cast(out); - dmgr.mBufferSize = maxOutSize; - dmgr.mEncodedSize = 0; - dmgr.mSuccess = true; - cinfo.client_data = static_cast(&dmgr); - - /* These lambdas become C-style function pointers and as per C++11 spec - * may not capture anything */ - dmgr.mgr.init_destination = [](j_compress_ptr cinfo) { - auto & dmgr = reinterpret_cast(*cinfo->dest); - dmgr.mgr.next_output_byte = dmgr.mBuffer; - dmgr.mgr.free_in_buffer = dmgr.mBufferSize; - ALOGV("%s:%d jpeg start: %p [%zu]", - __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize); - }; - - dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { - ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); - return 0; - }; - - dmgr.mgr.term_destination = [](j_compress_ptr cinfo) { - auto & dmgr = reinterpret_cast(*cinfo->dest); - dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer; - ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize); - }; - cinfo.dest = reinterpret_cast(&dmgr); - - /* We are going to be using JPEG in raw data mode, so we are passing - * straight subsampled planar YCbCr and it will not touch our pixel - * data or do any scaling or anything */ - cinfo.image_width = inSz.width; - cinfo.image_height = inSz.height; - cinfo.input_components = 3; - cinfo.in_color_space = JCS_YCbCr; - - /* Initialize defaults and then override what we want */ - jpeg_set_defaults(&cinfo); - - jpeg_set_quality(&cinfo, jpegQuality, 1); - jpeg_set_colorspace(&cinfo, JCS_YCbCr); - cinfo.raw_data_in = 1; - cinfo.dct_method = JDCT_IFAST; - - /* Configure sampling factors. The sampling factor is JPEG subsampling 420 - * because the source format is YUV420. Note that libjpeg sampling factors - * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and - * 1 V value for each 2 Y values */ - cinfo.comp_info[0].h_samp_factor = 2; - cinfo.comp_info[0].v_samp_factor = 2; - cinfo.comp_info[1].h_samp_factor = 1; - cinfo.comp_info[1].v_samp_factor = 1; - cinfo.comp_info[2].h_samp_factor = 1; - cinfo.comp_info[2].v_samp_factor = 1; - - /* Let's not hardcode YUV420 in 6 places... 5 was enough */ - int maxVSampFactor = std::max( { - cinfo.comp_info[0].v_samp_factor, - cinfo.comp_info[1].v_samp_factor, - cinfo.comp_info[2].v_samp_factor - }); - int cVSubSampling = cinfo.comp_info[0].v_samp_factor / - cinfo.comp_info[1].v_samp_factor; - - /* Start the compressor */ - jpeg_start_compress(&cinfo, TRUE); - - /* Compute our macroblock height, so we can pad our input to be vertically - * macroblock aligned. - * TODO: Does it need to be horizontally MCU aligned too? */ - - size_t mcuV = DCTSIZE*maxVSampFactor; - size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV); - - /* libjpeg uses arrays of row pointers, which makes it really easy to pad - * data vertically (unfortunately doesn't help horizontally) */ - std::vector yLines (paddedHeight); - std::vector cbLines(paddedHeight/cVSubSampling); - std::vector crLines(paddedHeight/cVSubSampling); - - uint8_t *py = static_cast(inLayout.y); - uint8_t *pcr = static_cast(inLayout.cr); - uint8_t *pcb = static_cast(inLayout.cb); - - for(uint32_t i = 0; i < paddedHeight; i++) - { - /* Once we are in the padding territory we still point to the last line - * effectively replicating it several times ~ CLAMP_TO_EDGE */ - int li = std::min(i, inSz.height - 1); - yLines[i] = static_cast(py + li * inLayout.yStride); - if(i < paddedHeight / cVSubSampling) - { - crLines[i] = static_cast(pcr + li * inLayout.cStride); - cbLines[i] = static_cast(pcb + li * inLayout.cStride); - } - } - - /* If APP1 data was passed in, use it */ - if(app1Buffer && app1Size) - { - jpeg_write_marker(&cinfo, JPEG_APP0 + 1, - static_cast(app1Buffer), app1Size); - } - - /* While we still have padded height left to go, keep giving it one - * macroblock at a time. */ - while (cinfo.next_scanline < cinfo.image_height) { - const uint32_t batchSize = DCTSIZE * maxVSampFactor; - const uint32_t nl = cinfo.next_scanline; - JSAMPARRAY planes[3]{ &yLines[nl], - &cbLines[nl/cVSubSampling], - &crLines[nl/cVSubSampling] }; - - uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize); - - if (done != batchSize) { - ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", - __FUNCTION__, done, batchSize, cinfo.next_scanline, - cinfo.image_height); - return -1; - } - } - - /* This will flush everything */ - jpeg_finish_compress(&cinfo); - - /* Grab the actual code size and set it */ - actualCodeSize = dmgr.mEncodedSize; - - return 0; -} - /* * TODO: There needs to be a mechanism to discover allocated buffer size * in the HAL. @@ -1555,25 +1190,9 @@ Size ExternalCameraDeviceSession::getMaxJpegResolution() const { } Size ExternalCameraDeviceSession::getMaxThumbResolution() const { - Size thumbSize { 0, 0 }; - camera_metadata_ro_entry entry = - mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); - for(uint32_t i = 0; i < entry.count; i += 2) { - Size sz { static_cast(entry.data.i32[i]), - static_cast(entry.data.i32[i+1]) }; - if(sz.width * sz.height > thumbSize.width * thumbSize.height) { - thumbSize = sz; - } - } - - if (thumbSize.width * thumbSize.height == 0) { - ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__); - } - - return thumbSize; + return getMaxThumbnailResolution(mCameraCharacteristics); } - ssize_t ExternalCameraDeviceSession::getJpegBufferSize( uint32_t width, uint32_t height) const { // Constant from camera3.h @@ -1616,7 +1235,7 @@ ssize_t ExternalCameraDeviceSession::getJpegBufferSize( int ExternalCameraDeviceSession::OutputThread::createJpegLocked( HalStreamBuffer &halBuf, - const std::shared_ptr& req) + const common::V1_0::helper::CameraMetadata& setting) { ATRACE_CALL(); int ret; @@ -1645,17 +1264,17 @@ int ExternalCameraDeviceSession::OutputThread::createJpegLocked( Size thumbSize; bool outputThumbnail = true; - if (req->setting.exists(ANDROID_JPEG_QUALITY)) { - camera_metadata_entry entry = - req->setting.find(ANDROID_JPEG_QUALITY); + if (setting.exists(ANDROID_JPEG_QUALITY)) { + camera_metadata_ro_entry entry = + setting.find(ANDROID_JPEG_QUALITY); jpegQuality = entry.data.u8[0]; } else { return lfail("%s: ANDROID_JPEG_QUALITY not set",__FUNCTION__); } - if (req->setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { - camera_metadata_entry entry = - req->setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY); + if (setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { + camera_metadata_ro_entry entry = + setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY); thumbQuality = entry.data.u8[0]; } else { return lfail( @@ -1663,9 +1282,9 @@ int ExternalCameraDeviceSession::OutputThread::createJpegLocked( __FUNCTION__); } - if (req->setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { - camera_metadata_entry entry = - req->setting.find(ANDROID_JPEG_THUMBNAIL_SIZE); + if (setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { + camera_metadata_ro_entry entry = + setting.find(ANDROID_JPEG_THUMBNAIL_SIZE); thumbSize = Size { static_cast(entry.data.i32[0]), static_cast(entry.data.i32[1]) }; @@ -1732,8 +1351,8 @@ int ExternalCameraDeviceSession::OutputThread::createJpegLocked( /* Combine camera characteristics with request settings to form EXIF * metadata */ - common::V1_0::helper::CameraMetadata meta(parent->mCameraCharacteristics); - meta.append(req->setting); + common::V1_0::helper::CameraMetadata meta(mCameraCharacteristics); + meta.append(setting); /* Generate EXIF object */ std::unique_ptr utils(ExifUtils::create()); @@ -1838,7 +1457,7 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() { // TODO: see if we can save some computation by converting to YV12 here uint8_t* inData; size_t inDataSize; - if (req->frameIn->map(&inData, &inDataSize) != 0) { + if (req->frameIn->getData(&inData, &inDataSize) != 0) { lk.unlock(); return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__); } @@ -1899,7 +1518,7 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() { // Gralloc lockYCbCr the buffer switch (halBuf.format) { case PixelFormat::BLOB: { - int ret = createJpegLocked(halBuf, req); + int ret = createJpegLocked(halBuf, req->setting); if(ret != 0) { lk.unlock(); @@ -1949,8 +1568,8 @@ bool ExternalCameraDeviceSession::OutputThread::threadLoop() { } Size sz {halBuf.width, halBuf.height}; - ATRACE_BEGIN("formatConvertLocked"); - ret = formatConvertLocked(cropAndScaled, outLayout, sz, outputFourcc); + ATRACE_BEGIN("formatConvert"); + ret = formatConvert(cropAndScaled, outLayout, sz, outputFourcc); ATRACE_END(); if (ret != 0) { lk.unlock(); @@ -2055,6 +1674,14 @@ Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers( return Status::OK; } +void ExternalCameraDeviceSession::OutputThread::clearIntermediateBuffers() { + std::lock_guard lk(mBufferLock); + mYu12Frame.clear(); + mYu12ThumbFrame.clear(); + mIntermediateBuffers.clear(); + mBlobBufferSize = 0; +} + Status ExternalCameraDeviceSession::OutputThread::submitRequest( const std::shared_ptr& req) { std::unique_lock lk(mRequestListLock); @@ -2090,6 +1717,32 @@ void ExternalCameraDeviceSession::OutputThread::flush() { } } +std::list> +ExternalCameraDeviceSession::OutputThread::switchToOffline() { + ATRACE_CALL(); + std::list> emptyList; + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return emptyList; + } + + std::unique_lock lk(mRequestListLock); + std::list> reqs = std::move(mRequestList); + mRequestList.clear(); + if (mProcessingRequest) { + std::chrono::seconds timeout = std::chrono::seconds(kFlushWaitTimeoutSec); + auto st = mRequestDoneCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__); + } + } + lk.unlock(); + clearIntermediateBuffers(); + ALOGV("%s: returning %zu request for offline processing", __FUNCTION__, reqs.size()); + return reqs; +} + void ExternalCameraDeviceSession::OutputThread::waitForNextRequest( std::shared_ptr* out) { ATRACE_CALL(); @@ -2733,6 +2386,7 @@ Status ExternalCameraDeviceSession::configureStreams( return Status::INTERNAL_ERROR; } + mBlobBufferSize = blobBufferSize; status = mOutputThread->allocateIntermediateBuffers(v4lSize, mMaxThumbResolution, config.streams, blobBufferSize); if (status != Status::OK) { @@ -2916,16 +2570,6 @@ status_t ExternalCameraDeviceSession::initDefaultRequests() { status_t ExternalCameraDeviceSession::fillCaptureResult( common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { - // android.control - // For USB camera, we don't know the AE state. Set the state to converged to - // indicate the frame should be good to use. Then apps don't have to wait the - // AE state. - const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; - UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); - - const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; - UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); - bool afTrigger = false; { std::lock_guard lk(mAfTriggerLock); @@ -2951,46 +2595,10 @@ status_t ExternalCameraDeviceSession::fillCaptureResult( } UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); - // Set AWB state to converged to indicate the frame should be good to use. - const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; - UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); + camera_metadata_ro_entry activeArraySize = + mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); - const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; - UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); - - camera_metadata_ro_entry active_array_size = - mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); - - if (active_array_size.count == 0) { - ALOGE("%s: cannot find active array size!", __FUNCTION__); - return -EINVAL; - } - - const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; - UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1); - - // This means pipeline latency of X frame intervals. The maximum number is 4. - const uint8_t requestPipelineMaxDepth = 4; - UPDATE(md, ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1); - - // android.scaler - const int32_t crop_region[] = { - active_array_size.data.i32[0], active_array_size.data.i32[1], - active_array_size.data.i32[2], active_array_size.data.i32[3], - }; - UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); - - // android.sensor - UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); - - // android.statistics - const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; - UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); - - const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; - UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); - - return OK; + return fillCaptureResultCommon(md, timestamp, activeArraySize); } #undef ARRAY_SIZE diff --git a/camera/device/3.4/default/ExternalCameraUtils.cpp b/camera/device/3.4/default/ExternalCameraUtils.cpp index e25deff797..4a6381ea7b 100644 --- a/camera/device/3.4/default/ExternalCameraUtils.cpp +++ b/camera/device/3.4/default/ExternalCameraUtils.cpp @@ -18,10 +18,23 @@ #include #include +#include #include #include + +#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs +#include + +#include + #include "ExternalCameraUtils.h" +namespace { + +buffer_handle_t sEmptyBuffer = nullptr; + +} // Anonymous namespace + namespace android { namespace hardware { namespace camera { @@ -29,10 +42,13 @@ namespace device { namespace V3_4 { namespace implementation { +Frame::Frame(uint32_t width, uint32_t height, uint32_t fourcc) : + mWidth(width), mHeight(height), mFourcc(fourcc) {} + V4L2Frame::V4L2Frame( uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize, uint64_t offset) : - mWidth(w), mHeight(h), mFourcc(fourcc), + Frame(w, h, fourcc), mBufferIndex(bufIdx), mFd(fd), mDataSize(dataSize), mOffset(offset) {} int V4L2Frame::map(uint8_t** data, size_t* dataSize) { @@ -75,9 +91,13 @@ V4L2Frame::~V4L2Frame() { unmap(); } +int V4L2Frame::getData(uint8_t** outData, size_t* dataSize) { + return map(outData, dataSize); +} + AllocatedFrame::AllocatedFrame( uint32_t w, uint32_t h) : - mWidth(w), mHeight(h), mFourcc(V4L2_PIX_FMT_YUV420) {}; + Frame(w, h, V4L2_PIX_FMT_YUV420) {}; AllocatedFrame::~AllocatedFrame() {} @@ -106,6 +126,17 @@ int AllocatedFrame::allocate(YCbCrLayout* out) { return 0; } +int AllocatedFrame::getData(uint8_t** outData, size_t* dataSize) { + YCbCrLayout layout; + int ret = allocate(&layout); + if (ret != 0) { + return ret; + } + *outData = mData.data(); + *dataSize = mData.size(); + return 0; +} + int AllocatedFrame::getLayout(YCbCrLayout* out) { IMapper::Rect noCrop = {0, 0, static_cast(mWidth), @@ -150,8 +181,520 @@ double SupportedV4L2Format::FrameRate::getDouble() const { return durationDenominator / static_cast(durationNumerator); } +::android::hardware::camera::common::V1_0::Status importBufferImpl( + /*inout*/std::map& circulatingBuffers, + /*inout*/HandleImporter& handleImporter, + int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) { + using ::android::hardware::camera::common::V1_0::Status; + if (buf == nullptr && bufId == BUFFER_ID_NO_BUFFER) { + if (allowEmptyBuf) { + *outBufPtr = &sEmptyBuffer; + return Status::OK; + } else { + ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); + return Status::ILLEGAL_ARGUMENT; + } + } + + CirculatingBuffers& cbs = circulatingBuffers[streamId]; + if (cbs.count(bufId) == 0) { + if (buf == nullptr) { + ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); + return Status::ILLEGAL_ARGUMENT; + } + // Register a newly seen buffer + buffer_handle_t importedBuf = buf; + handleImporter.importBuffer(importedBuf); + if (importedBuf == nullptr) { + ALOGE("%s: output buffer for stream %d is invalid!", __FUNCTION__, streamId); + return Status::INTERNAL_ERROR; + } else { + cbs[bufId] = importedBuf; + } + } + *outBufPtr = &cbs[bufId]; + return Status::OK; +} + +uint32_t getFourCcFromLayout(const YCbCrLayout& layout) { + intptr_t cb = reinterpret_cast(layout.cb); + intptr_t cr = reinterpret_cast(layout.cr); + if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { + // Interleaved format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_NV21; + } else { + return V4L2_PIX_FMT_NV12; + } + } else if (layout.chromaStep == 1) { + // Planar format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_YVU420; // YV12 + } else { + return V4L2_PIX_FMT_YUV420; // YU12 + } + } else { + return FLEX_YUV_GENERIC; + } +} + +int getCropRect( + CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { + if (out == nullptr) { + ALOGE("%s: out is null", __FUNCTION__); + return -1; + } + + uint32_t inW = inSize.width; + uint32_t inH = inSize.height; + uint32_t outW = outSize.width; + uint32_t outH = outSize.height; + + // Handle special case where aspect ratio is close to input but scaled + // dimension is slightly larger than input + float arIn = ASPECT_RATIO(inSize); + float arOut = ASPECT_RATIO(outSize); + if (isAspectRatioClose(arIn, arOut)) { + out->left = 0; + out->top = 0; + out->width = inW; + out->height = inH; + return 0; + } + + if (ct == VERTICAL) { + uint64_t scaledOutH = static_cast(outH) * inW / outW; + if (scaledOutH > inH) { + ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 + + out->left = 0; + out->top = ((inH - scaledOutH) / 2) & ~0x1; + out->width = inW; + out->height = static_cast(scaledOutH); + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutH)); + } else { + uint64_t scaledOutW = static_cast(outW) * inH / outH; + if (scaledOutW > inW) { + ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 + + out->left = ((inW - scaledOutW) / 2) & ~0x1; + out->top = 0; + out->width = static_cast(scaledOutW); + out->height = inH; + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutW)); + } + + return 0; +} + +int formatConvert( + const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { + int ret = 0; + switch (format) { + case V4L2_PIX_FMT_NV21: + ret = libyuv::I420ToNV21( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV21 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_NV12: + ret = libyuv::I420ToNV12( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_YVU420: // YV12 + case V4L2_PIX_FMT_YUV420: // YU12 + // TODO: maybe we can speed up here by somehow save this copy? + ret = libyuv::I420Copy( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case FLEX_YUV_GENERIC: + // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. + ALOGE("%s: unsupported flexible yuv layout" + " y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, out.y, out.cb, out.cr, + out.yStride, out.cStride, out.chromaStep); + return -1; + default: + ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); + return -1; + } + return 0; +} + +int encodeJpegYU12( + const Size & inSz, const YCbCrLayout& inLayout, + int jpegQuality, const void *app1Buffer, size_t app1Size, + void *out, const size_t maxOutSize, size_t &actualCodeSize) +{ + /* libjpeg is a C library so we use C-style "inheritance" by + * putting libjpeg's jpeg_destination_mgr first in our custom + * struct. This allows us to cast jpeg_destination_mgr* to + * CustomJpegDestMgr* when we get it passed to us in a callback */ + struct CustomJpegDestMgr { + struct jpeg_destination_mgr mgr; + JOCTET *mBuffer; + size_t mBufferSize; + size_t mEncodedSize; + bool mSuccess; + } dmgr; + + jpeg_compress_struct cinfo = {}; + jpeg_error_mgr jerr; + + /* Initialize error handling with standard callbacks, but + * then override output_message (to print to ALOG) and + * error_exit to set a flag and print a message instead + * of killing the whole process */ + cinfo.err = jpeg_std_error(&jerr); + + cinfo.err->output_message = [](j_common_ptr cinfo) { + char buffer[JMSG_LENGTH_MAX]; + + /* Create the message */ + (*cinfo->err->format_message)(cinfo, buffer); + ALOGE("libjpeg error: %s", buffer); + }; + cinfo.err->error_exit = [](j_common_ptr cinfo) { + (*cinfo->err->output_message)(cinfo); + if(cinfo->client_data) { + auto & dmgr = + *reinterpret_cast(cinfo->client_data); + dmgr.mSuccess = false; + } + }; + /* Now that we initialized some callbacks, let's create our compressor */ + jpeg_create_compress(&cinfo); + + /* Initialize our destination manager */ + dmgr.mBuffer = static_cast(out); + dmgr.mBufferSize = maxOutSize; + dmgr.mEncodedSize = 0; + dmgr.mSuccess = true; + cinfo.client_data = static_cast(&dmgr); + + /* These lambdas become C-style function pointers and as per C++11 spec + * may not capture anything */ + dmgr.mgr.init_destination = [](j_compress_ptr cinfo) { + auto & dmgr = reinterpret_cast(*cinfo->dest); + dmgr.mgr.next_output_byte = dmgr.mBuffer; + dmgr.mgr.free_in_buffer = dmgr.mBufferSize; + ALOGV("%s:%d jpeg start: %p [%zu]", + __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize); + }; + + dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { + ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); + return 0; + }; + + dmgr.mgr.term_destination = [](j_compress_ptr cinfo) { + auto & dmgr = reinterpret_cast(*cinfo->dest); + dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer; + ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize); + }; + cinfo.dest = reinterpret_cast(&dmgr); + + /* We are going to be using JPEG in raw data mode, so we are passing + * straight subsampled planar YCbCr and it will not touch our pixel + * data or do any scaling or anything */ + cinfo.image_width = inSz.width; + cinfo.image_height = inSz.height; + cinfo.input_components = 3; + cinfo.in_color_space = JCS_YCbCr; + + /* Initialize defaults and then override what we want */ + jpeg_set_defaults(&cinfo); + + jpeg_set_quality(&cinfo, jpegQuality, 1); + jpeg_set_colorspace(&cinfo, JCS_YCbCr); + cinfo.raw_data_in = 1; + cinfo.dct_method = JDCT_IFAST; + + /* Configure sampling factors. The sampling factor is JPEG subsampling 420 + * because the source format is YUV420. Note that libjpeg sampling factors + * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and + * 1 V value for each 2 Y values */ + cinfo.comp_info[0].h_samp_factor = 2; + cinfo.comp_info[0].v_samp_factor = 2; + cinfo.comp_info[1].h_samp_factor = 1; + cinfo.comp_info[1].v_samp_factor = 1; + cinfo.comp_info[2].h_samp_factor = 1; + cinfo.comp_info[2].v_samp_factor = 1; + + /* Let's not hardcode YUV420 in 6 places... 5 was enough */ + int maxVSampFactor = std::max( { + cinfo.comp_info[0].v_samp_factor, + cinfo.comp_info[1].v_samp_factor, + cinfo.comp_info[2].v_samp_factor + }); + int cVSubSampling = cinfo.comp_info[0].v_samp_factor / + cinfo.comp_info[1].v_samp_factor; + + /* Start the compressor */ + jpeg_start_compress(&cinfo, TRUE); + + /* Compute our macroblock height, so we can pad our input to be vertically + * macroblock aligned. + * TODO: Does it need to be horizontally MCU aligned too? */ + + size_t mcuV = DCTSIZE*maxVSampFactor; + size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV); + + /* libjpeg uses arrays of row pointers, which makes it really easy to pad + * data vertically (unfortunately doesn't help horizontally) */ + std::vector yLines (paddedHeight); + std::vector cbLines(paddedHeight/cVSubSampling); + std::vector crLines(paddedHeight/cVSubSampling); + + uint8_t *py = static_cast(inLayout.y); + uint8_t *pcr = static_cast(inLayout.cr); + uint8_t *pcb = static_cast(inLayout.cb); + + for(uint32_t i = 0; i < paddedHeight; i++) + { + /* Once we are in the padding territory we still point to the last line + * effectively replicating it several times ~ CLAMP_TO_EDGE */ + int li = std::min(i, inSz.height - 1); + yLines[i] = static_cast(py + li * inLayout.yStride); + if(i < paddedHeight / cVSubSampling) + { + crLines[i] = static_cast(pcr + li * inLayout.cStride); + cbLines[i] = static_cast(pcb + li * inLayout.cStride); + } + } + + /* If APP1 data was passed in, use it */ + if(app1Buffer && app1Size) + { + jpeg_write_marker(&cinfo, JPEG_APP0 + 1, + static_cast(app1Buffer), app1Size); + } + + /* While we still have padded height left to go, keep giving it one + * macroblock at a time. */ + while (cinfo.next_scanline < cinfo.image_height) { + const uint32_t batchSize = DCTSIZE * maxVSampFactor; + const uint32_t nl = cinfo.next_scanline; + JSAMPARRAY planes[3]{ &yLines[nl], + &cbLines[nl/cVSubSampling], + &crLines[nl/cVSubSampling] }; + + uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize); + + if (done != batchSize) { + ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", + __FUNCTION__, done, batchSize, cinfo.next_scanline, + cinfo.image_height); + return -1; + } + } + + /* This will flush everything */ + jpeg_finish_compress(&cinfo); + + /* Grab the actual code size and set it */ + actualCodeSize = dmgr.mEncodedSize; + + return 0; +} + +Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata& chars) { + Size thumbSize { 0, 0 }; + camera_metadata_ro_entry entry = + chars.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); + for(uint32_t i = 0; i < entry.count; i += 2) { + Size sz { static_cast(entry.data.i32[i]), + static_cast(entry.data.i32[i+1]) }; + if(sz.width * sz.height > thumbSize.width * thumbSize.height) { + thumbSize = sz; + } + } + + if (thumbSize.width * thumbSize.height == 0) { + ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__); + } + + return thumbSize; +} + +void freeReleaseFences(hidl_vec& results) { + for (auto& result : results) { + if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + result.inputBuffer.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + for (auto& buf : result.outputBuffers) { + if (buf.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + buf.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + } + } + return; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(md, tag, data, size) \ +do { \ + if ((md).update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return BAD_VALUE; \ + } \ +} while (0) + +status_t fillCaptureResultCommon( + common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp, + camera_metadata_ro_entry& activeArraySize) { + if (activeArraySize.count < 4) { + ALOGE("%s: cannot find active array size!", __FUNCTION__); + return -EINVAL; + } + // android.control + // For USB camera, we don't know the AE state. Set the state to converged to + // indicate the frame should be good to use. Then apps don't have to wait the + // AE state. + const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); + + const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); + + // Set AWB state to converged to indicate the frame should be good to use. + const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); + + const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); + + const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; + UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1); + + // This means pipeline latency of X frame intervals. The maximum number is 4. + const uint8_t requestPipelineMaxDepth = 4; + UPDATE(md, ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1); + + // android.scaler + const int32_t crop_region[] = { + activeArraySize.data.i32[0], activeArraySize.data.i32[1], + activeArraySize.data.i32[2], activeArraySize.data.i32[3], + }; + UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); + + // android.sensor + UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); + + // android.statistics + const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); + + const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; + UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); + + return OK; +} + +#undef ARRAY_SIZE +#undef UPDATE + } // namespace implementation } // namespace V3_4 + +namespace V3_6 { +namespace implementation { + +AllocatedV4L2Frame::AllocatedV4L2Frame(sp frameIn) : + Frame(frameIn->mWidth, frameIn->mHeight, frameIn->mFourcc) { + uint8_t* dataIn; + size_t dataSize; + if (frameIn->getData(&dataIn, &dataSize) != 0) { + ALOGE("%s: map input V4L2 frame failed!", __FUNCTION__); + return; + } + + mData.resize(dataSize); + std::memcpy(mData.data(), dataIn, dataSize); +} + +int AllocatedV4L2Frame::getData(uint8_t** outData, size_t* dataSize) { + if (outData == nullptr || dataSize == nullptr) { + ALOGE("%s: outData(%p)/dataSize(%p) must not be null", __FUNCTION__, outData, dataSize); + return -1; + } + + *outData = mData.data(); + *dataSize = mData.size(); + return 0; +} + +AllocatedV4L2Frame::~AllocatedV4L2Frame() {} + +} // namespace implementation +} // namespace V3_6 } // namespace device diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h index 71b7c17dd6..ecab9cfa03 100644 --- a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H -#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICESESSION_H #include #include @@ -84,7 +84,8 @@ using ::android::sp; using ::android::Mutex; using ::android::base::unique_fd; -struct ExternalCameraDeviceSession : public virtual RefBase { +struct ExternalCameraDeviceSession : public virtual RefBase, + public virtual OutputThreadInterface { ExternalCameraDeviceSession(const sp&, const ExternalCameraConfig& cfg, @@ -110,6 +111,82 @@ struct ExternalCameraDeviceSession : public virtual RefBase { static const int kMaxStallStream = 1; static const uint32_t kMaxBytesPerPixel = 2; + class OutputThread : public android::Thread { + public: + OutputThread(wp parent, CroppingType, + const common::V1_0::helper::CameraMetadata&); + virtual ~OutputThread(); + + Status allocateIntermediateBuffers( + const Size& v4lSize, const Size& thumbSize, + const hidl_vec& streams, + uint32_t blobBufferSize); + Status submitRequest(const std::shared_ptr&); + void flush(); + void dump(int fd); + virtual bool threadLoop() override; + + void setExifMakeModel(const std::string& make, const std::string& model); + + // The remaining request list is returned for offline processing + std::list> switchToOffline(); + + protected: + // Methods to request output buffer in parallel + // No-op for device@3.4. Implemented in device@3.5 + virtual int requestBufferStart(const std::vector&) { return 0; } + virtual int waitForBufferRequestDone( + /*out*/std::vector*) { return 0; } + + static const int kFlushWaitTimeoutSec = 3; // 3 sec + static const int kReqWaitTimeoutMs = 33; // 33ms + static const int kReqWaitTimesMax = 90; // 33ms * 90 ~= 3 sec + + void waitForNextRequest(std::shared_ptr* out); + void signalRequestDone(); + + int cropAndScaleLocked( + sp& in, const Size& outSize, + YCbCrLayout* out); + + int cropAndScaleThumbLocked( + sp& in, const Size& outSize, + YCbCrLayout* out); + + int createJpegLocked(HalStreamBuffer &halBuf, + const common::V1_0::helper::CameraMetadata& settings); + + void clearIntermediateBuffers(); + + const wp mParent; + const CroppingType mCroppingType; + const common::V1_0::helper::CameraMetadata mCameraCharacteristics; + + mutable std::mutex mRequestListLock; // Protect acccess to mRequestList, + // mProcessingRequest and mProcessingFrameNumer + std::condition_variable mRequestCond; // signaled when a new request is submitted + std::condition_variable mRequestDoneCond; // signaled when a request is done processing + std::list> mRequestList; + bool mProcessingRequest = false; + uint32_t mProcessingFrameNumer = 0; + + // V4L2 frameIn + // (MJPG decode)-> mYu12Frame + // (Scale)-> mScaledYu12Frames + // (Format convert) -> output gralloc frames + mutable std::mutex mBufferLock; // Protect access to intermediate buffers + sp mYu12Frame; + sp mYu12ThumbFrame; + std::unordered_map, SizeHasher> mIntermediateBuffers; + std::unordered_map, SizeHasher> mScaledYu12Frames; + YCbCrLayout mYu12FrameLayout; + YCbCrLayout mYu12ThumbFrameLayout; + uint32_t mBlobBufferSize = 0; // 0 -> HAL derive buffer size, else: use given size + + std::string mExifMake; + std::string mExifModel; + }; + protected: // Methods from ::android::hardware::camera::device::V3_2::ICameraDeviceSession follow @@ -150,27 +227,22 @@ protected: ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb); protected: - struct HalStreamBuffer { - int32_t streamId; - uint64_t bufferId; - uint32_t width; - uint32_t height; - PixelFormat format; - V3_2::BufferUsageFlags usage; - buffer_handle_t* bufPtr; - int acquireFence; - bool fenceTimeout; - }; + // Methods from OutputThreadInterface + virtual Status importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) override; - struct HalRequest { - uint32_t frameNumber; - common::V1_0::helper::CameraMetadata setting; - sp frameIn; - nsecs_t shutterTs; - std::vector buffers; - }; + virtual Status processCaptureResult(std::shared_ptr&) override; - static const uint64_t BUFFER_ID_NO_BUFFER = 0; + virtual Status processCaptureRequestError(const std::shared_ptr&, + /*out*/std::vector* msgs = nullptr, + /*out*/std::vector* results = nullptr) override; + + virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override; + + virtual void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) override; + // End of OutputThreadInterface methods Status constructDefaultRequestSettingsRaw(RequestTemplate type, V3_2::CameraMetadata *outMetadata); @@ -219,11 +291,6 @@ protected: // Optional argument for ICameraDeviceSession@3.5 impl bool allowEmptyBuf = false); - Status importBuffer(int32_t streamId, - uint64_t bufId, buffer_handle_t buf, - /*out*/buffer_handle_t** outBufPtr, - bool allowEmptyBuf); - Status importBufferLocked(int32_t streamId, uint64_t bufId, buffer_handle_t buf, /*out*/buffer_handle_t** outBufPtr, @@ -236,106 +303,15 @@ protected: Status processOneCaptureRequest(const CaptureRequest& request); - Status processCaptureResult(std::shared_ptr&); - Status processCaptureRequestError(const std::shared_ptr&); void notifyShutter(uint32_t frameNumber, nsecs_t shutterTs); - void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec); void invokeProcessCaptureResultCallback( hidl_vec &results, bool tryWriteFmq); - static void freeReleaseFences(hidl_vec&); Size getMaxJpegResolution() const; Size getMaxThumbResolution() const; - ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const; - int waitForV4L2BufferReturnLocked(std::unique_lock& lk); - class OutputThread : public android::Thread { - public: - OutputThread(wp parent, CroppingType); - virtual ~OutputThread(); - - Status allocateIntermediateBuffers( - const Size& v4lSize, const Size& thumbSize, - const hidl_vec& streams, - uint32_t blobBufferSize); - Status submitRequest(const std::shared_ptr&); - void flush(); - void dump(int fd); - virtual bool threadLoop() override; - - void setExifMakeModel(const std::string& make, const std::string& model); - - protected: - // Methods to request output buffer in parallel - // No-op for device@3.4. Implemented in device@3.5 - virtual int requestBufferStart(const std::vector&) { return 0; } - virtual int waitForBufferRequestDone( - /*out*/std::vector*) { return 0; } - - static const uint32_t FLEX_YUV_GENERIC = static_cast('F') | - static_cast('L') << 8 | static_cast('E') << 16 | - static_cast('X') << 24; - // returns FLEX_YUV_GENERIC for formats other than YV12/YU12/NV12/NV21 - static uint32_t getFourCcFromLayout(const YCbCrLayout&); - static int getCropRect( - CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out); - - static const int kFlushWaitTimeoutSec = 3; // 3 sec - static const int kReqWaitTimeoutMs = 33; // 33ms - static const int kReqWaitTimesMax = 90; // 33ms * 90 ~= 3 sec - - void waitForNextRequest(std::shared_ptr* out); - void signalRequestDone(); - - int cropAndScaleLocked( - sp& in, const Size& outSize, - YCbCrLayout* out); - - int cropAndScaleThumbLocked( - sp& in, const Size& outSize, - YCbCrLayout* out); - - int formatConvertLocked(const YCbCrLayout& in, const YCbCrLayout& out, - Size sz, uint32_t format); - - static int encodeJpegYU12(const Size &inSz, - const YCbCrLayout& inLayout, int jpegQuality, - const void *app1Buffer, size_t app1Size, - void *out, size_t maxOutSize, - size_t &actualCodeSize); - - int createJpegLocked(HalStreamBuffer &halBuf, const std::shared_ptr& req); - - const wp mParent; - const CroppingType mCroppingType; - - mutable std::mutex mRequestListLock; // Protect acccess to mRequestList, - // mProcessingRequest and mProcessingFrameNumer - std::condition_variable mRequestCond; // signaled when a new request is submitted - std::condition_variable mRequestDoneCond; // signaled when a request is done processing - std::list> mRequestList; - bool mProcessingRequest = false; - uint32_t mProcessingFrameNumer = 0; - - // V4L2 frameIn - // (MJPG decode)-> mYu12Frame - // (Scale)-> mScaledYu12Frames - // (Format convert) -> output gralloc frames - mutable std::mutex mBufferLock; // Protect access to intermediate buffers - sp mYu12Frame; - sp mYu12ThumbFrame; - std::unordered_map, SizeHasher> mIntermediateBuffers; - std::unordered_map, SizeHasher> mScaledYu12Frames; - YCbCrLayout mYu12FrameLayout; - YCbCrLayout mYu12ThumbFrameLayout; - uint32_t mBlobBufferSize = 0; // 0 -> HAL derive buffer size, else: use given size - - std::string mExifMake; - std::string mExifModel; - }; - // Protect (most of) HIDL interface methods from synchronized-entering mutable Mutex mInterfaceLock; @@ -381,12 +357,6 @@ protected: std::mutex mInflightFramesLock; // protect mInflightFrames std::unordered_set mInflightFrames; - // buffers currently circulating between HAL and camera service - // key: bufferId sent via HIDL interface - // value: imported buffer_handle_t - // Buffer will be imported during processCaptureRequest and will be freed - // when the its stream is deleted or camera device session is closed - typedef std::unordered_map CirculatingBuffers; // Stream ID -> circulating buffers map std::map mCirculatingBuffers; // Protect mCirculatingBuffers, must not lock mLock after acquiring this lock @@ -395,6 +365,8 @@ protected: std::mutex mAfTriggerLock; // protect mAfTrigger bool mAfTrigger = false; + uint32_t mBlobBufferSize = 0; + static HandleImporter sHandleImporter; /* Beginning of members not changed after initialize() */ @@ -410,6 +382,9 @@ protected: const Size mMaxThumbResolution; const Size mMaxJpegResolution; + + std::string mExifMake; + std::string mExifModel; /* End of members not changed after initialize() */ private: @@ -484,4 +459,4 @@ private: } // namespace hardware } // namespace android -#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICESESSION_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h index 341c62218d..74f75eb246 100644 --- a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h @@ -17,16 +17,27 @@ #ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H #define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H +#include +#include +#include #include #include #include +#include #include #include #include "tinyxml2.h" // XML parsing #include "utils/LightRefBase.h" +#include "utils/Timers.h" +#include +#include -using android::hardware::graphics::mapper::V2_0::IMapper; -using android::hardware::graphics::mapper::V2_0::YCbCrLayout; + +using ::android::hardware::graphics::mapper::V2_0::IMapper; +using ::android::hardware::graphics::mapper::V2_0::YCbCrLayout; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::device::V3_2::ErrorCode; namespace android { namespace hardware { @@ -113,16 +124,28 @@ struct SupportedV4L2Format { std::vector frameRates; }; +// A Base class with basic information about a frame +struct Frame : public VirtualLightRefBase { +public: + Frame(uint32_t width, uint32_t height, uint32_t fourcc); + const uint32_t mWidth; + const uint32_t mHeight; + const uint32_t mFourcc; + + // getData might involve map/allocation + virtual int getData(uint8_t** outData, size_t* dataSize) = 0; +}; + // A class provide access to a dequeued V4L2 frame buffer (mostly in MJPG format) // Also contains necessary information to enqueue the buffer back to V4L2 buffer queue -class V4L2Frame : public virtual VirtualLightRefBase { +class V4L2Frame : public Frame { public: V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, uint32_t dataSize, uint64_t offset); ~V4L2Frame() override; - const uint32_t mWidth; - const uint32_t mHeight; - const uint32_t mFourcc; + + virtual int getData(uint8_t** outData, size_t* dataSize) override; + const int mBufferIndex; // for later enqueue int map(uint8_t** data, size_t* dataSize); int unmap(); @@ -137,13 +160,13 @@ private: // A RAII class representing a CPU allocated YUV frame used as intermeidate buffers // when generating output images. -class AllocatedFrame : public virtual VirtualLightRefBase { +class AllocatedFrame : public Frame { public: - AllocatedFrame(uint32_t w, uint32_t h); // TODO: use Size? + AllocatedFrame(uint32_t w, uint32_t h); // only support V4L2_PIX_FMT_YUV420 for now ~AllocatedFrame() override; - const uint32_t mWidth; - const uint32_t mHeight; - const uint32_t mFourcc; // Only support YU12 format for now + + virtual int getData(uint8_t** outData, size_t* dataSize) override; + int allocate(YCbCrLayout* out = nullptr); int getLayout(YCbCrLayout* out); int getCroppedLayout(const IMapper::Rect&, YCbCrLayout* out); // return non-zero for bad input @@ -165,8 +188,110 @@ const float kMinAspectRatio = 1.f; bool isAspectRatioClose(float ar1, float ar2); +struct HalStreamBuffer { + int32_t streamId; + uint64_t bufferId; + uint32_t width; + uint32_t height; + ::android::hardware::graphics::common::V1_0::PixelFormat format; + ::android::hardware::camera::device::V3_2::BufferUsageFlags usage; + buffer_handle_t* bufPtr; + int acquireFence; + bool fenceTimeout; +}; + +struct HalRequest { + uint32_t frameNumber; + common::V1_0::helper::CameraMetadata setting; + sp frameIn; + nsecs_t shutterTs; + std::vector buffers; +}; + +static const uint64_t BUFFER_ID_NO_BUFFER = 0; + +// buffers currently circulating between HAL and camera service +// key: bufferId sent via HIDL interface +// value: imported buffer_handle_t +// Buffer will be imported during processCaptureRequest (or requestStreamBuffer +// in the case of HAL buffer manager is enabled) and will be freed +// when the stream is deleted or camera device session is closed +typedef std::unordered_map CirculatingBuffers; + +::android::hardware::camera::common::V1_0::Status importBufferImpl( + /*inout*/std::map& circulatingBuffers, + /*inout*/HandleImporter& handleImporter, + int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf); + +static const uint32_t FLEX_YUV_GENERIC = static_cast('F') | + static_cast('L') << 8 | static_cast('E') << 16 | + static_cast('X') << 24; + +// returns FLEX_YUV_GENERIC for formats other than YV12/YU12/NV12/NV21 +uint32_t getFourCcFromLayout(const YCbCrLayout&); + +using ::android::hardware::camera::external::common::Size; +int getCropRect(CroppingType ct, const Size& inSize, + const Size& outSize, IMapper::Rect* out); + +int formatConvert(const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format); + +int encodeJpegYU12(const Size &inSz, + const YCbCrLayout& inLayout, int jpegQuality, + const void *app1Buffer, size_t app1Size, + void *out, size_t maxOutSize, + size_t &actualCodeSize); + +Size getMaxThumbnailResolution(const common::V1_0::helper::CameraMetadata&); + +void freeReleaseFences(hidl_vec&); + +status_t fillCaptureResultCommon(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp, + camera_metadata_ro_entry& activeArraySize); + +// Interface for OutputThread calling back to parent +struct OutputThreadInterface : public virtual RefBase { + virtual ::android::hardware::camera::common::V1_0::Status importBuffer( + int32_t streamId, uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, bool allowEmptyBuf) = 0; + + virtual void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) = 0; + + // Callbacks are fired within the method if msgs/results are nullptr. + // Otherwise the callbacks will be returned and caller is responsible to + // fire the callback later + virtual ::android::hardware::camera::common::V1_0::Status processCaptureRequestError( + const std::shared_ptr&, + /*out*/std::vector* msgs = nullptr, + /*out*/std::vector* results = nullptr) = 0; + + virtual ::android::hardware::camera::common::V1_0::Status processCaptureResult( + std::shared_ptr&) = 0; + + virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const = 0; +}; + } // namespace implementation } // namespace V3_4 + +namespace V3_6 { +namespace implementation { + +// A CPU copy of a mapped V4L2Frame. Will map the input V4L2 frame. +class AllocatedV4L2Frame : public V3_4::implementation::Frame { +public: + AllocatedV4L2Frame(sp frameIn); + ~AllocatedV4L2Frame() override; + virtual int getData(uint8_t** outData, size_t* dataSize) override; +private: + std::vector mData; +}; + +} // namespace implementation +} // namespace V3_6 } // namespace device } // namespace camera } // namespace hardware diff --git a/camera/device/3.5/default/ExternalCameraDeviceSession.cpp b/camera/device/3.5/default/ExternalCameraDeviceSession.cpp index 00c1d0de39..287ac324ec 100644 --- a/camera/device/3.5/default/ExternalCameraDeviceSession.cpp +++ b/camera/device/3.5/default/ExternalCameraDeviceSession.cpp @@ -80,7 +80,7 @@ Status ExternalCameraDeviceSession::importRequestLocked( ExternalCameraDeviceSession::BufferRequestThread::BufferRequestThread( - wp parent, + wp parent, sp callbacks) : mParent(parent), mCallbacks(callbacks) {} @@ -254,7 +254,8 @@ void ExternalCameraDeviceSession::initOutputThread() { mBufferRequestThread = new BufferRequestThread(this, mCallback_3_5); mBufferRequestThread->run("ExtCamBufReq", PRIORITY_DISPLAY); } - mOutputThread = new OutputThread(this, mCroppingType, mBufferRequestThread); + mOutputThread = new OutputThread( + this, mCroppingType, mCameraCharacteristics, mBufferRequestThread); } void ExternalCameraDeviceSession::closeOutputThreadImpl() { @@ -271,10 +272,11 @@ void ExternalCameraDeviceSession::closeOutputThread() { } ExternalCameraDeviceSession::OutputThread::OutputThread( - wp parent, + wp parent, CroppingType ct, + const common::V1_0::helper::CameraMetadata& chars, sp bufReqThread) : - V3_4::implementation::ExternalCameraDeviceSession::OutputThread(parent, ct), + V3_4::implementation::ExternalCameraDeviceSession::OutputThread(parent, ct, chars), mBufferRequestThread(bufReqThread) {} ExternalCameraDeviceSession::OutputThread::~OutputThread() {} diff --git a/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h index 281f93a13b..e89ef45f52 100644 --- a/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h +++ b/camera/device/3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICE3SESSION_H -#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICE3SESSION_H +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICESESSION_H #include #include @@ -72,6 +72,7 @@ using ::android::base::unique_fd; using ::android::hardware::camera::device::V3_4::implementation::SupportedV4L2Format; using ::android::hardware::camera::device::V3_4::implementation::CroppingType; +using ::android::hardware::camera::device::V3_4::implementation::HalStreamBuffer; struct ExternalCameraDeviceSession : public V3_4::implementation::ExternalCameraDeviceSession { @@ -97,6 +98,62 @@ struct ExternalCameraDeviceSession : public V3_4::implementation::ExternalCamera config, supportedFormats, devCfg); } + class BufferRequestThread : public android::Thread { + public: + BufferRequestThread( + wp parent, + sp callbacks); + + int requestBufferStart(const std::vector&); + int waitForBufferRequestDone( + /*out*/std::vector*); + + virtual bool threadLoop() override; + + private: + void waitForNextRequest(); + + const wp mParent; + const sp mCallbacks; + + std::mutex mLock; + bool mRequestingBuffer = false; + + std::vector mBufferReqs; + std::vector mPendingReturnBufferReqs; + // mHalBufferReqs is not under mLock protection during the HIDL transaction + hidl_vec mHalBufferReqs; + + // request buffers takes much less time in steady state, but can take much longer + // when requesting 1st buffer from a stream. + // TODO: consider a separate timeout for new vs. steady state? + // TODO: or make sure framework is warming up the pipeline during configure new stream? + static const int kReqProcTimeoutMs = 66; + + static const int kReqWaitTimeoutMs = 33; + static const int kReqWaitTimesWarn = 90; // 33ms * 90 ~= 3 sec + std::condition_variable mRequestCond; // signaled when a new buffer request incoming + std::condition_variable mRequestDoneCond; // signaled when a request is done + }; + + class OutputThread : + public V3_4::implementation::ExternalCameraDeviceSession::OutputThread { + public: + // TODO: pass buffer request thread to OutputThread ctor + OutputThread(wp parent, CroppingType, + const common::V1_0::helper::CameraMetadata&, + sp bufReqThread); + virtual ~OutputThread(); + + protected: + // Methods to request output buffer in parallel + virtual int requestBufferStart(const std::vector&) override; + virtual int waitForBufferRequestDone( + /*out*/std::vector*) override; + + const sp mBufferRequestThread; + }; + protected: // Methods from v3.4 and earlier will trampoline to inherited implementation Return configureStreams_3_5( @@ -120,63 +177,8 @@ protected: hidl_vec& allBufPtrs, hidl_vec& allFences) override; - class BufferRequestThread : public android::Thread { - public: - BufferRequestThread( - wp parent, - sp callbacks); - - int requestBufferStart(const std::vector&); - int waitForBufferRequestDone( - /*out*/std::vector*); - - virtual bool threadLoop() override; - - private: - void waitForNextRequest(); - - const wp mParent; - const sp mCallbacks; - - std::mutex mLock; - bool mRequestingBuffer = false; - - std::vector mBufferReqs; - std::vector mPendingReturnBufferReqs; - // mHalBufferReqs is not under mLock protection during the HIDL transaction - hidl_vec mHalBufferReqs; - - // request buffers takes much less time in steady state, but can take much longer - // when requesting 1st buffer from a stream. - // TODO: consider a separate timeout for new vs. steady state? - // TODO: or make sure framework is warming up the pipeline during configure new stream? - static const int kReqProcTimeoutMs = 66; - - static const int kReqWaitTimeoutMs = 33; - static const int kReqWaitTimesWarn = 90; // 33ms * 90 ~= 3 sec - std::condition_variable mRequestCond; // signaled when a new buffer request incoming - std::condition_variable mRequestDoneCond; // signaled when a request is done - }; - sp mBufferRequestThread; - class OutputThread : - public V3_4::implementation::ExternalCameraDeviceSession::OutputThread { - public: - // TODO: pass buffer request thread to OutputThread ctor - OutputThread(wp parent, CroppingType, - sp bufReqThread); - virtual ~OutputThread(); - - protected: - // Methods to request output buffer in parallel - virtual int requestBufferStart(const std::vector&) override; - virtual int waitForBufferRequestDone( - /*out*/std::vector*) override; - - const sp mBufferRequestThread; - }; - sp mCallback_3_5; bool mSupportBufMgr; @@ -270,4 +272,4 @@ private: } // namespace hardware } // namespace android -#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICE3SESSION_H +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_5_EXTCAMERADEVICESESSION_H diff --git a/camera/device/3.6/default/Android.bp b/camera/device/3.6/default/Android.bp index ce51185193..a2ddebdd24 100644 --- a/camera/device/3.6/default/Android.bp +++ b/camera/device/3.6/default/Android.bp @@ -28,6 +28,7 @@ cc_library_shared { srcs: [ "ExternalCameraDevice.cpp", "ExternalCameraDeviceSession.cpp", + "ExternalCameraOfflineSession.cpp", ], shared_libs: [ "libhidlbase", diff --git a/camera/device/3.6/default/ExternalCameraDeviceSession.cpp b/camera/device/3.6/default/ExternalCameraDeviceSession.cpp index e14ae992b8..0cc81bbe9e 100644 --- a/camera/device/3.6/default/ExternalCameraDeviceSession.cpp +++ b/camera/device/3.6/default/ExternalCameraDeviceSession.cpp @@ -73,12 +73,8 @@ Return ExternalCameraDeviceSession::configureStreams_3_6( Status status = configureStreams(config_v32, &outStreams_v33, blobBufferSize); - outStreams.streams.resize(outStreams_v33.streams.size()); - for (size_t i = 0; i < outStreams.streams.size(); i++) { - outStreams.streams[i].v3_4.v3_3 = outStreams_v33.streams[i]; - // TODO: implement it later - outStreams.streams[i].supportOffline = false; - } + fillOutputStream3_6(outStreams_v33, &outStreams); + _hidl_cb(status, outStreams); return Void(); } @@ -86,12 +82,273 @@ Return ExternalCameraDeviceSession::configureStreams_3_6( Return ExternalCameraDeviceSession::switchToOffline( const hidl_vec& streamsToKeep, ICameraDeviceSession::switchToOffline_cb _hidl_cb) { - // TODO: implement this - (void) streamsToKeep; - (void) _hidl_cb; + std::vector msgs; + std::vector results; + CameraOfflineSessionInfo info; + sp session; + + Status st = switchToOffline(streamsToKeep, &msgs, &results, &info, &session); + + mCallback->notify(msgs); + hidl_vec hidlResults(std::move(results)); + invokeProcessCaptureResultCallback(hidlResults, /* tryWriteFmq */true); + V3_4::implementation::freeReleaseFences(hidlResults); + + _hidl_cb(st, info, session); return Void(); } +void ExternalCameraDeviceSession::fillOutputStream3_6( + const V3_3::HalStreamConfiguration& outStreams_v33, + /*out*/V3_6::HalStreamConfiguration* outStreams_v36) { + if (outStreams_v36 == nullptr) { + ALOGE("%s: outStreams_v36 must not be null!", __FUNCTION__); + return; + } + Mutex::Autolock _l(mLock); + outStreams_v36->streams.resize(outStreams_v33.streams.size()); + for (size_t i = 0; i < outStreams_v36->streams.size(); i++) { + outStreams_v36->streams[i].v3_4.v3_3 = outStreams_v33.streams[i]; + outStreams_v36->streams[i].supportOffline = + supportOfflineLocked(outStreams_v33.streams[i].v3_2.id); + } +} + +bool ExternalCameraDeviceSession::supportOfflineLocked(int32_t streamId) { + const Stream& stream = mStreamMap[streamId]; + if (stream.format == PixelFormat::BLOB && + stream.dataSpace == static_cast(Dataspace::V0_JFIF)) { + return true; + } + // TODO: support YUV output stream? + return false; +} + +bool ExternalCameraDeviceSession::canDropRequest(const hidl_vec& offlineStreams, + std::shared_ptr halReq) { + for (const auto& buffer : halReq->buffers) { + for (auto offlineStreamId : offlineStreams) { + if (buffer.streamId == offlineStreamId) { + return false; + } + } + } + // Only drop a request completely if it has no offline output + return true; +} + +void ExternalCameraDeviceSession::fillOfflineSessionInfo(const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers, + /*out*/CameraOfflineSessionInfo* info) { + if (info == nullptr) { + ALOGE("%s: output info must not be null!", __FUNCTION__); + return; + } + + info->offlineStreams.resize(offlineStreams.size()); + info->offlineRequests.resize(offlineReqs.size()); + + std::unordered_map outstandingBufs(offlineStreams.size()); + for (const auto streamId : offlineStreams) { + outstandingBufs.insert({streamId, 0}); + } + // Fill in offline reqs and count outstanding buffers + for (size_t i = 0; i < offlineReqs.size(); i++) { + info->offlineRequests[i].frameNumber = offlineReqs[i]->frameNumber; + info->offlineRequests[i].pendingStreams.resize(offlineReqs[i]->buffers.size()); + for (size_t bIdx = 0; bIdx < offlineReqs[i]->buffers.size(); bIdx++) { + int32_t streamId = offlineReqs[i]->buffers[bIdx].streamId; + info->offlineRequests[i].pendingStreams[bIdx] = streamId; + outstandingBufs[streamId]++; + } + } + + for (size_t i = 0; i < offlineStreams.size(); i++) { + int32_t streamId = offlineStreams[i]; + info->offlineStreams[i].id = streamId; + info->offlineStreams[i].numOutstandingBuffers = outstandingBufs[streamId]; + const CirculatingBuffers& bufIdMap = circulatingBuffers.at(streamId); + info->offlineStreams[i].circulatingBufferIds.resize(bufIdMap.size()); + size_t bIdx = 0; + for (const auto& pair : bufIdMap) { + // Fill in bufferId + info->offlineStreams[i].circulatingBufferIds[bIdx++] = pair.first; + } + + } +} + +Status ExternalCameraDeviceSession::switchToOffline(const hidl_vec& offlineStreams, + /*out*/std::vector* msgs, + /*out*/std::vector* results, + /*out*/CameraOfflineSessionInfo* info, + /*out*/sp* session) { + ATRACE_CALL(); + if (offlineStreams.size() > 1) { + ALOGE("%s: more than one offline stream is not supported", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (msgs == nullptr || results == nullptr || info == nullptr || session == nullptr) { + ALOGE("%s: output arguments (%p, %p, %p, %p) must not be null", __FUNCTION__, + msgs, results, info, session); + return Status::ILLEGAL_ARGUMENT; + } + + msgs->clear(); + results->clear(); + + Mutex::Autolock _il(mInterfaceLock); + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + Mutex::Autolock _l(mLock); + for (auto streamId : offlineStreams) { + if (!supportOfflineLocked(streamId)) { + return Status::ILLEGAL_ARGUMENT; + } + } + + // pause output thread and get all remaining inflight requests + auto remainingReqs = mOutputThread->switchToOffline(); + std::vector> halReqs; + + // Send out buffer/request error for remaining requests and filter requests + // to be handled in offline mode + for (auto& halReq : remainingReqs) { + bool dropReq = canDropRequest(offlineStreams, halReq); + if (dropReq) { + // Request is dropped completely. Just send request error and + // there is no need to send the request to offline session + processCaptureRequestError(halReq, msgs, results); + continue; + } + + // All requests reach here must have at least one offline stream output + NotifyMsg shutter; + shutter.type = MsgType::SHUTTER; + shutter.msg.shutter.frameNumber = halReq->frameNumber; + shutter.msg.shutter.timestamp = halReq->shutterTs; + msgs->push_back(shutter); + + std::vector offlineBuffers; + for (const auto& buffer : halReq->buffers) { + bool dropBuffer = true; + for (auto offlineStreamId : offlineStreams) { + if (buffer.streamId == offlineStreamId) { + dropBuffer = false; + break; + } + } + if (dropBuffer) { + NotifyMsg error; + error.type = MsgType::ERROR; + error.msg.error.frameNumber = halReq->frameNumber; + error.msg.error.errorStreamId = buffer.streamId; + error.msg.error.errorCode = ErrorCode::ERROR_BUFFER; + msgs->push_back(error); + + CaptureResult result; + result.frameNumber = halReq->frameNumber; + result.partialResult = 0; // buffer only result + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(1); + result.outputBuffers[0].streamId = buffer.streamId; + result.outputBuffers[0].bufferId = buffer.bufferId; + result.outputBuffers[0].status = BufferStatus::ERROR; + if (buffer.acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = buffer.acquireFence; + result.outputBuffers[0].releaseFence.setTo(handle, /*shouldOwn*/false); + } + results->push_back(result); + } else { + offlineBuffers.push_back(buffer); + } + } + halReq->buffers = offlineBuffers; + halReqs.push_back(halReq); + } + + // convert hal requests to offline request + std::deque> offlineReqs(halReqs.size()); + for (auto& v4lReq : halReqs) { + std::shared_ptr halReq = std::make_shared(); + halReq->frameNumber = v4lReq->frameNumber; + halReq->setting = v4lReq->setting; + halReq->shutterTs = v4lReq->shutterTs; + halReq->buffers = v4lReq->buffers; + sp v4l2Frame = + static_cast(v4lReq->frameIn.get()); + halReq->frameIn = new AllocatedV4L2Frame(v4l2Frame); + offlineReqs.push_back(halReq); + // enqueue V4L2 frame + enqueueV4l2Frame(v4l2Frame); + } + + // Collect buffer caches/streams + hidl_vec streamInfos; + streamInfos.resize(offlineStreams.size()); + std::map circulatingBuffers; + { + Mutex::Autolock _l(mCbsLock); + size_t idx = 0; + for(auto streamId : offlineStreams) { + circulatingBuffers[streamId] = mCirculatingBuffers.at(streamId); + mCirculatingBuffers.erase(streamId); + streamInfos[idx++] = mStreamMap.at(streamId); + mStreamMap.erase(streamId); + } + } + + fillOfflineSessionInfo(offlineStreams, offlineReqs, circulatingBuffers, info); + + // create the offline session object + bool afTrigger; + { + std::lock_guard lk(mAfTriggerLock); + afTrigger = mAfTrigger; + } + sp sessionImpl = new ExternalCameraOfflineSession( + mCroppingType, mCameraCharacteristics, mCameraId, + mExifMake, mExifModel, mBlobBufferSize, afTrigger, + streamInfos, offlineReqs, circulatingBuffers); + + bool initFailed = sessionImpl->initialize(); + if (initFailed) { + ALOGE("%s: offline session initialize failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + // cleanup stream and buffer caches + { + Mutex::Autolock _l(mCbsLock); + for(auto pair : mStreamMap) { + cleanupBuffersLocked(/*Stream ID*/pair.first); + } + mCirculatingBuffers.clear(); + } + mStreamMap.clear(); + + // update inflight records + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.clear(); + } + + // stop v4l2 streaming + if (v4l2StreamOffLocked() !=0) { + ALOGE("%s: stop V4L2 streaming failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + *session = sessionImpl->getInterface(); + return Status::OK; +} + } // namespace implementation } // namespace V3_6 } // namespace device diff --git a/camera/device/3.6/default/ExternalCameraOfflineSession.cpp b/camera/device/3.6/default/ExternalCameraOfflineSession.cpp new file mode 100644 index 0000000000..e606fda832 --- /dev/null +++ b/camera/device/3.6/default/ExternalCameraOfflineSession.cpp @@ -0,0 +1,554 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamOfflnSsn@3.6" +#define ATRACE_TAG ATRACE_TAG_CAMERA +#include + +#include +#include + +#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs +#include + +#include +#include "ExternalCameraOfflineSession.h" + +namespace { + +// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. +static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; + +} // anonymous namespace + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +// static instance +HandleImporter ExternalCameraOfflineSession::sHandleImporter; + +using V3_5::implementation::ExternalCameraDeviceSession; + +ExternalCameraOfflineSession::ExternalCameraOfflineSession( + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + const std::string& exifMake, + const std::string& exifModel, + const uint32_t blobBufferSize, + const bool afTrigger, + const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers) : + mCroppingType(croppingType), mChars(chars), mCameraId(cameraId), + mExifMake(exifMake), mExifModel(exifModel), mBlobBufferSize(blobBufferSize), + mAfTrigger(afTrigger), mOfflineStreams(offlineStreams), mOfflineReqs(offlineReqs), + mCirculatingBuffers(circulatingBuffers) {} + +ExternalCameraOfflineSession::~ExternalCameraOfflineSession() { + close(); +} + +bool ExternalCameraOfflineSession::initialize() { + mResultMetadataQueue = std::make_shared( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mResultMetadataQueue->isValid()) { + ALOGE("%s: invalid result fmq", __FUNCTION__); + return true; + } + return false; +} + +void ExternalCameraOfflineSession::initOutputThread() { + if (mOutputThread != nullptr) { + ALOGE("%s: OutputThread already exist!", __FUNCTION__); + return; + } + + mBufferRequestThread = new ExternalCameraDeviceSession::BufferRequestThread( + this, mCallback); + mBufferRequestThread->run("ExtCamBufReq", PRIORITY_DISPLAY); + + mOutputThread = new OutputThread(this, mCroppingType, mChars, + mBufferRequestThread, mOfflineReqs); + + mOutputThread->setExifMakeModel(mExifMake, mExifModel); + + Size inputSize = { mOfflineReqs[0]->frameIn->mWidth, mOfflineReqs[0]->frameIn->mHeight}; + Size maxThumbSize = V3_4::implementation::getMaxThumbnailResolution(mChars); + mOutputThread->allocateIntermediateBuffers( + inputSize, maxThumbSize, mOfflineStreams, mBlobBufferSize); + + mOutputThread->run("ExtCamOfflnOut", PRIORITY_DISPLAY); +} + +bool ExternalCameraOfflineSession::OutputThread::threadLoop() { + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return false; + } + + if (mOfflineReqs.empty()) { + ALOGI("%s: all offline requests are processed. Stopping.", __FUNCTION__); + return false; + } + + std::shared_ptr req = mOfflineReqs.front(); + mOfflineReqs.pop_front(); + + auto onDeviceError = [&](auto... args) { + ALOGE(args...); + parent->notifyError( + req->frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + signalRequestDone(); + return false; + }; + + if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) { + return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, + req->frameIn->mFourcc & 0xFF, + (req->frameIn->mFourcc >> 8) & 0xFF, + (req->frameIn->mFourcc >> 16) & 0xFF, + (req->frameIn->mFourcc >> 24) & 0xFF); + } + + int res = requestBufferStart(req->buffers); + if (res != 0) { + ALOGE("%s: send BufferRequest failed! res %d", __FUNCTION__, res); + return onDeviceError("%s: failed to send buffer request!", __FUNCTION__); + } + + std::unique_lock lk(mBufferLock); + // Convert input V4L2 frame to YU12 of the same size + // TODO: see if we can save some computation by converting to YV12 here + uint8_t* inData; + size_t inDataSize; + if (req->frameIn->getData(&inData, &inDataSize) != 0) { + lk.unlock(); + return onDeviceError("%s: V4L2 buffer map failed", __FUNCTION__); + } + + // TODO: in some special case maybe we can decode jpg directly to gralloc output? + if (req->frameIn->mFourcc == V4L2_PIX_FMT_MJPEG) { + ATRACE_BEGIN("MJPGtoI420"); + int res = libyuv::MJPGToI420( + inData, inDataSize, static_cast(mYu12FrameLayout.y), mYu12FrameLayout.yStride, + static_cast(mYu12FrameLayout.cb), mYu12FrameLayout.cStride, + static_cast(mYu12FrameLayout.cr), mYu12FrameLayout.cStride, + mYu12Frame->mWidth, mYu12Frame->mHeight, mYu12Frame->mWidth, mYu12Frame->mHeight); + ATRACE_END(); + + if (res != 0) { + // For some webcam, the first few V4L2 frames might be malformed... + ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); + lk.unlock(); + Status st = parent->processCaptureRequestError(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture request error!", __FUNCTION__); + } + signalRequestDone(); + return true; + } + } + + ATRACE_BEGIN("Wait for BufferRequest done"); + res = waitForBufferRequestDone(&req->buffers); + ATRACE_END(); + + if (res != 0) { + ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res); + lk.unlock(); + return onDeviceError("%s: failed to process buffer request error!", __FUNCTION__); + } + + ALOGV("%s processing new request", __FUNCTION__); + const int kSyncWaitTimeoutMs = 500; + for (auto& halBuf : req->buffers) { + if (*(halBuf.bufPtr) == nullptr) { + ALOGW("%s: buffer for stream %d missing", __FUNCTION__, halBuf.streamId); + halBuf.fenceTimeout = true; + } else if (halBuf.acquireFence >= 0) { + int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); + if (ret) { + halBuf.fenceTimeout = true; + } else { + ::close(halBuf.acquireFence); + halBuf.acquireFence = -1; + } + } + + if (halBuf.fenceTimeout) { + continue; + } + + // Gralloc lockYCbCr the buffer + switch (halBuf.format) { + case PixelFormat::BLOB: { + int ret = createJpegLocked(halBuf, req->setting); + + if(ret != 0) { + lk.unlock(); + return onDeviceError("%s: createJpegLocked failed with %d", + __FUNCTION__, ret); + } + } break; + case PixelFormat::Y16: { + void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize); + + std::memcpy(outLayout, inData, inDataSize); + + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence >= 0) { + halBuf.acquireFence = relFence; + } + } break; + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: { + IMapper::Rect outRect {0, 0, + static_cast(halBuf.width), + static_cast(halBuf.height)}; + YCbCrLayout outLayout = sHandleImporter.lockYCbCr( + *(halBuf.bufPtr), halBuf.usage, outRect); + ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, + outLayout.yStride, outLayout.cStride, outLayout.chromaStep); + + // Convert to output buffer size/format + uint32_t outputFourcc = V3_4::implementation::getFourCcFromLayout(outLayout); + ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, + outputFourcc & 0xFF, + (outputFourcc >> 8) & 0xFF, + (outputFourcc >> 16) & 0xFF, + (outputFourcc >> 24) & 0xFF); + + YCbCrLayout cropAndScaled; + ATRACE_BEGIN("cropAndScaleLocked"); + int ret = cropAndScaleLocked( + mYu12Frame, + Size { halBuf.width, halBuf.height }, + &cropAndScaled); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: crop and scale failed!", __FUNCTION__); + } + + Size sz {halBuf.width, halBuf.height}; + ATRACE_BEGIN("formatConvert"); + ret = V3_4::implementation::formatConvert(cropAndScaled, outLayout, sz, outputFourcc); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: format coversion failed!", __FUNCTION__); + } + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence >= 0) { + halBuf.acquireFence = relFence; + } + } break; + default: + lk.unlock(); + return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format); + } + } // for each buffer + mScaledYu12Frames.clear(); + + // Don't hold the lock while calling back to parent + lk.unlock(); + Status st = parent->processCaptureResult(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture result!", __FUNCTION__); + } + signalRequestDone(); + return true; +} + +Status ExternalCameraOfflineSession::importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) { + Mutex::Autolock _l(mCbsLock); + return V3_4::implementation::importBufferImpl( + mCirculatingBuffers, sHandleImporter, streamId, + bufId, buf, outBufPtr, allowEmptyBuf); + return Status::OK; +}; + +#define UPDATE(md, tag, data, size) \ +do { \ + if ((md).update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return BAD_VALUE; \ + } \ +} while (0) + +status_t ExternalCameraOfflineSession::fillCaptureResult( + common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { + bool afTrigger = false; + { + std::lock_guard lk(mAfTriggerLock); + afTrigger = mAfTrigger; + if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { + camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); + if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { + mAfTrigger = afTrigger = true; + } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { + mAfTrigger = afTrigger = false; + } + } + } + + // For USB camera, the USB camera handles everything and we don't have control + // over AF. We only simply fake the AF metadata based on the request + // received here. + uint8_t afState; + if (afTrigger) { + afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; + } else { + afState = ANDROID_CONTROL_AF_STATE_INACTIVE; + } + UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); + + camera_metadata_ro_entry activeArraySize = + mChars.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); + + return V3_4::implementation::fillCaptureResultCommon(md, timestamp, activeArraySize); +} + +#undef UPDATE + +Status ExternalCameraOfflineSession::processCaptureResult(std::shared_ptr& req) { + ATRACE_CALL(); + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + if (req->buffers[i].fenceTimeout) { + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER); + } else { + result.outputBuffers[i].status = BufferStatus::OK; + // TODO: refactor + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + } + + // Fill capture result metadata + fillCaptureResult(req->setting, req->shutterTs); + const camera_metadata_t *rawResult = req->setting.getAndLock(); + V3_2::implementation::convertToHidl(rawResult, &result.result); + req->setting.unlock(rawResult); + + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + V3_4::implementation::freeReleaseFences(results); + return Status::OK; +}; + +void ExternalCameraOfflineSession::invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + const nsecs_t NS_TO_SECOND = 1000000000; + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.result.size() > 0) { + if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { + result.fmqResultSize = result.result.size(); + result.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + result.fmqResultSize = 0; + } + } else { + result.fmqResultSize = 0; + } + } + } + auto status = mCallback->processCaptureResult(results); + if (!status.isOk()) { + ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__, + status.description().c_str()); + } + + mProcessCaptureResultLock.unlock(); +} + +Status ExternalCameraOfflineSession::processCaptureRequestError( + const std::shared_ptr& req, + /*out*/std::vector* outMsgs, + /*out*/std::vector* outResults) { + ATRACE_CALL(); + + if (outMsgs == nullptr) { + notifyError(/*frameNum*/req->frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); + } else { + NotifyMsg shutter; + shutter.type = MsgType::SHUTTER; + shutter.msg.shutter.frameNumber = req->frameNumber; + shutter.msg.shutter.timestamp = req->shutterTs; + + NotifyMsg error; + error.type = MsgType::ERROR; + error.msg.error.frameNumber = req->frameNumber; + error.msg.error.errorStreamId = -1; + error.msg.error.errorCode = ErrorCode::ERROR_REQUEST; + outMsgs->push_back(shutter); + outMsgs->push_back(error); + } + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + + if (outResults == nullptr) { + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + V3_4::implementation::freeReleaseFences(results); + } else { + outResults->push_back(result); + } + return Status::OK; +}; + +ssize_t ExternalCameraOfflineSession::getJpegBufferSize( + uint32_t /*width*/, uint32_t /*height*/) const { + // Empty implementation here as the jpeg buffer size is passed in by ctor + return 0; +}; + +void ExternalCameraOfflineSession::notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) { + NotifyMsg msg; + msg.type = MsgType::ERROR; + msg.msg.error.frameNumber = frameNumber; + msg.msg.error.errorStreamId = streamId; + msg.msg.error.errorCode = ec; + mCallback->notify({msg}); +}; + +Return ExternalCameraOfflineSession::setCallback(const sp& cb) { + Mutex::Autolock _il(mInterfaceLock); + if (mCallback != nullptr && cb != nullptr) { + ALOGE("%s: callback must not be set twice!", __FUNCTION__); + return Void(); + } + mCallback = cb; + + initOutputThread(); + + if (mOutputThread == nullptr) { + ALOGE("%s: init OutputThread failed!", __FUNCTION__); + } + return Void(); +} + +Return ExternalCameraOfflineSession::getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mResultMetadataQueue->getDesc()); + return Void(); +} + +void ExternalCameraOfflineSession::cleanupBuffersLocked(int id) { + for (auto& pair : mCirculatingBuffers.at(id)) { + sHandleImporter.freeBuffer(pair.second); + } + mCirculatingBuffers[id].clear(); + mCirculatingBuffers.erase(id); +} + +Return ExternalCameraOfflineSession::close() { + Mutex::Autolock _il(mInterfaceLock); + { + Mutex::Autolock _l(mLock); + if (mClosed) { + ALOGW("%s: offline session already closed!", __FUNCTION__); + return Void(); + } + } + if (mBufferRequestThread) { + mBufferRequestThread->requestExit(); + mBufferRequestThread->join(); + mBufferRequestThread.clear(); + } + if (mOutputThread) { + mOutputThread->flush(); + mOutputThread->requestExit(); + mOutputThread->join(); + mOutputThread.clear(); + } + + Mutex::Autolock _l(mLock); + // free all buffers + { + Mutex::Autolock _cbl(mCbsLock); + for(auto stream : mOfflineStreams) { + cleanupBuffersLocked(stream.id); + } + } + mCallback.clear(); + mClosed = true; + return Void(); +} + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h index 0e57c4c243..db0d9a548b 100644 --- a/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h +++ b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraDeviceSession.h @@ -14,12 +14,13 @@ * limitations under the License. */ -#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICE3SESSION_H -#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICE3SESSION_H +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICESESSION_H #include #include #include <../../3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h> +#include "ExternalCameraOfflineSession.h" namespace android { namespace hardware { @@ -37,6 +38,7 @@ using ::android::hardware::camera::device::V3_2::RequestTemplate; using ::android::hardware::camera::device::V3_2::Stream; using ::android::hardware::camera::device::V3_5::StreamConfiguration; using ::android::hardware::camera::device::V3_6::ICameraDeviceSession; +using ::android::hardware::camera::device::V3_6::ICameraOfflineSession; using ::android::hardware::camera::common::V1_0::Status; using ::android::hardware::camera::external::common::ExternalCameraConfig; using ::android::hardware::graphics::common::V1_0::PixelFormat; @@ -69,13 +71,6 @@ struct ExternalCameraDeviceSession : public V3_5::implementation::ExternalCamera return new TrampolineSessionInterface_3_6(this); } - static Status isStreamCombinationSupported(const V3_2::StreamConfiguration& config, - const std::vector& supportedFormats, - const ExternalCameraConfig& devCfg) { - return V3_4::implementation::ExternalCameraDeviceSession::isStreamCombinationSupported( - config, supportedFormats, devCfg); - } - protected: // Methods from v3.5 and earlier will trampoline to inherited implementation Return configureStreams_3_6( @@ -86,6 +81,28 @@ protected: const hidl_vec& streamsToKeep, ICameraDeviceSession::switchToOffline_cb _hidl_cb); + void fillOutputStream3_6(const V3_3::HalStreamConfiguration& outStreams_v33, + /*out*/V3_6::HalStreamConfiguration* outStreams_v36); + bool supportOfflineLocked(int32_t streamId); + + // Main body of switchToOffline. This method does not invoke any callbacks + // but instead returns the necessary callbacks in output arguments so callers + // can callback later without holding any locks + Status switchToOffline(const hidl_vec& offlineStreams, + /*out*/std::vector* msgs, + /*out*/std::vector* results, + /*out*/CameraOfflineSessionInfo* info, + /*out*/sp* session); + + // Whether a request can be completely dropped when switching to offline + bool canDropRequest(const hidl_vec& offlineStreams, + std::shared_ptr halReq); + + void fillOfflineSessionInfo(const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers, + /*out*/CameraOfflineSessionInfo* info); + private: struct TrampolineSessionInterface_3_6 : public ICameraDeviceSession { @@ -188,4 +205,4 @@ private: } // namespace hardware } // namespace android -#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICE3SESSION_H +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERADEVICESESSION_H diff --git a/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraOfflineSession.h b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraOfflineSession.h new file mode 100644 index 0000000000..230b67c43c --- /dev/null +++ b/camera/device/3.6/default/include/ext_device_v3_6_impl/ExternalCameraOfflineSession.h @@ -0,0 +1,232 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERAOFFLINESESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERAOFFLINESESSION_H + +#include +#include +#include +#include +#include +#include +#include +#include <../../3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h> +#include <../../3.5/default/include/ext_device_v3_5_impl/ExternalCameraDeviceSession.h> +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_6 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_5::BufferRequest; +using ::android::hardware::camera::device::V3_5::BufferRequestStatus; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_2::ErrorCode; +using ::android::hardware::camera::device::V3_5::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::MsgType; +using ::android::hardware::camera::device::V3_2::NotifyMsg; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::Stream; +using ::android::hardware::camera::device::V3_5::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfigurationMode; +using ::android::hardware::camera::device::V3_2::StreamRotation; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_2::DataspaceFlags; +using ::android::hardware::camera::device::V3_2::CameraBlob; +using ::android::hardware::camera::device::V3_2::CameraBlobId; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_6::ICameraOfflineSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::helper::ExifUtils; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::camera::external::common::SizeHasher; +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; +using ::android::base::unique_fd; + +using ::android::hardware::camera::device::V3_4::implementation::SupportedV4L2Format; +using ::android::hardware::camera::device::V3_4::implementation::CroppingType; +using ::android::hardware::camera::device::V3_4::implementation::CirculatingBuffers; +using ::android::hardware::camera::device::V3_4::implementation::HalRequest; +using ::android::hardware::camera::device::V3_4::implementation::OutputThreadInterface; + +struct ExternalCameraOfflineSession : public virtual RefBase, + public virtual OutputThreadInterface { + + ExternalCameraOfflineSession( + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + const std::string& exifMake, + const std::string& exifModel, + uint32_t blobBufferSize, + bool afTrigger, + const hidl_vec& offlineStreams, + std::deque>& offlineReqs, + const std::map& circulatingBuffers); + + bool initialize(); + + virtual ~ExternalCameraOfflineSession(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineSessionInterface_3_6(this); + } + +protected: + + // Methods from OutputThreadInterface + virtual Status importBuffer(int32_t streamId, + uint64_t bufId, buffer_handle_t buf, + /*out*/buffer_handle_t** outBufPtr, + bool allowEmptyBuf) override; + + virtual Status processCaptureResult(std::shared_ptr&) override; + + virtual Status processCaptureRequestError(const std::shared_ptr&, + /*out*/std::vector* msgs = nullptr, + /*out*/std::vector* results = nullptr) override; + + virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override; + + virtual void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec) override; + // End of OutputThreadInterface methods + + class OutputThread : public V3_5::implementation::ExternalCameraDeviceSession::OutputThread { + public: + OutputThread( + wp parent, CroppingType ct, + const common::V1_0::helper::CameraMetadata& chars, + sp bufReqThread, + std::deque>& offlineReqs) : + V3_5::implementation::ExternalCameraDeviceSession::OutputThread( + parent, ct, chars, bufReqThread), + mOfflineReqs(offlineReqs) {} + + virtual bool threadLoop() override; + + protected: + std::deque> mOfflineReqs; + }; // OutputThread + + + Return setCallback(const sp& cb); + + Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb); + + Return close(); + + void initOutputThread(); + + void invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq); + + status_t fillCaptureResult(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp); + + void cleanupBuffersLocked(int id); + + // Protect (most of) HIDL interface methods from synchronized-entering + mutable Mutex mInterfaceLock; + + mutable Mutex mLock; // Protect all data members except otherwise noted + + bool mClosed = false; + const CroppingType mCroppingType; + const common::V1_0::helper::CameraMetadata mChars; + const std::string mCameraId; + const std::string mExifMake; + const std::string mExifModel; + const uint32_t mBlobBufferSize; + + std::mutex mAfTriggerLock; // protect mAfTrigger + bool mAfTrigger; + + const hidl_vec mOfflineStreams; + std::deque> mOfflineReqs; + + // Protect mCirculatingBuffers, must not lock mLock after acquiring this lock + mutable Mutex mCbsLock; + std::map mCirculatingBuffers; + + static HandleImporter sHandleImporter; + + using ResultMetadataQueue = MessageQueue; + std::shared_ptr mResultMetadataQueue; + + // Protect against invokeProcessCaptureResultCallback() + Mutex mProcessCaptureResultLock; + + sp mCallback; + + sp mBufferRequestThread; + sp mOutputThread; +private: + + struct TrampolineSessionInterface_3_6 : public ICameraOfflineSession { + TrampolineSessionInterface_3_6(sp parent) : + mParent(parent) {} + + virtual Return setCallback(const sp& cb) override { + return mParent->setCallback(cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return close() override { + return mParent->close(); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_6 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_6_EXTCAMERAOFFLINESESSION_H diff --git a/camera/provider/2.5/default/Android.bp b/camera/provider/2.5/default/Android.bp index 4563362ddb..9ddf651440 100644 --- a/camera/provider/2.5/default/Android.bp +++ b/camera/provider/2.5/default/Android.bp @@ -52,6 +52,8 @@ cc_library_shared { "android.hardware.camera.provider@2.4-external", "android.hardware.camera.provider@2.5", "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", "android.hidl.allocator@1.0", "android.hidl.memory@1.0", "camera.device@3.3-impl", @@ -72,7 +74,8 @@ cc_library_shared { ], header_libs: [ "camera.device@3.4-external-impl_headers", - "camera.device@3.5-external-impl_headers" + "camera.device@3.5-external-impl_headers", + "camera.device@3.6-external-impl_headers" ], export_include_dirs: ["."], } @@ -165,7 +168,10 @@ cc_binary { "android.hardware.camera.provider@2.5", "android.hardware.camera.provider@2.5-external", "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", "libbinder", + "libcamera_metadata", "libhidlbase", "liblog", "libtinyxml2", @@ -179,5 +185,6 @@ cc_binary { "camera.device@3.4-impl_headers", "camera.device@3.5-external-impl_headers", "camera.device@3.5-impl_headers", + "camera.device@3.6-external-impl_headers", ], }