Merge "Camera: Check external camera v4l2 buffer size" into pi-dev

This commit is contained in:
TreeHugger Robot
2018-04-04 09:41:36 +00:00
committed by Android (Google) Code Review
2 changed files with 15 additions and 0 deletions

View File

@@ -2149,6 +2149,13 @@ int ExternalCameraDeviceSession::configureV4l2StreamLocked(
}
uint32_t bufferSize = fmt.fmt.pix.sizeimage;
ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize);
uint32_t expectedMaxBufferSize = kMaxBytesPerPixel * fmt.fmt.pix.width * fmt.fmt.pix.height;
if ((bufferSize == 0) || (bufferSize > expectedMaxBufferSize)) {
ALOGE("%s: V4L2 buffer size: %u looks invalid. Expected maximum size: %u", __FUNCTION__,
bufferSize, expectedMaxBufferSize);
return -EINVAL;
}
mMaxV4L2BufferSize = bufferSize;
const double kDefaultFps = 30.0;
double fps = 1000.0;
@@ -2296,6 +2303,12 @@ sp<V4L2Frame> ExternalCameraDeviceSession::dequeueV4l2FrameLocked(/*out*/nsecs_t
// TODO: try to dequeue again
}
if (buffer.bytesused > mMaxV4L2BufferSize) {
ALOGE("%s: v4l2 buffer bytes used: %u maximum %u", __FUNCTION__, buffer.bytesused,
mMaxV4L2BufferSize);
return ret;
}
if (buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC) {
// Ideally we should also check for V4L2_BUF_FLAG_TSTAMP_SRC_SOE, but
// even V4L2_BUF_FLAG_TSTAMP_SRC_EOF is better than capture a timestamp now

View File

@@ -108,6 +108,7 @@ struct ExternalCameraDeviceSession : public virtual RefBase {
static const int kMaxProcessedStream = 2;
static const int kMaxStallStream = 1;
static const uint32_t kMaxBytesPerPixel = 2;
protected:
@@ -319,6 +320,7 @@ protected:
std::mutex mV4l2BufferLock; // protect the buffer count and condition below
std::condition_variable mV4L2BufferReturned;
size_t mNumDequeuedV4l2Buffers = 0;
uint32_t mMaxV4L2BufferSize = 0;
// Not protected by mLock (but might be used when mLock is locked)
sp<OutputThread> mOutputThread;