EmulatedFakeCamera2: Add support for JPEG output, multiple streams

Required to support Camera.takePicture()

Bug: 6243944
Change-Id: I60d7a161a7037c25428eac5a6f9327aff47da584
This commit is contained in:
Eino-Ville Talvala
2012-06-12 17:23:52 -07:00
parent 2a2ada537a
commit e26e64f2de
8 changed files with 871 additions and 207 deletions

View File

@@ -60,7 +60,8 @@ LOCAL_SRC_FILES := \
EmulatedFakeCamera2.cpp \ EmulatedFakeCamera2.cpp \
EmulatedQemuCamera2.cpp \ EmulatedQemuCamera2.cpp \
fake-pipeline2/Scene.cpp \ fake-pipeline2/Scene.cpp \
fake-pipeline2/Sensor.cpp fake-pipeline2/Sensor.cpp \
fake-pipeline2/JpegCompressor.cpp
ifeq ($(TARGET_PRODUCT),vbox_x86) ifeq ($(TARGET_PRODUCT),vbox_x86)

View File

@@ -30,8 +30,10 @@
namespace android { namespace android {
const uint32_t EmulatedFakeCamera2::kAvailableFormats[3] = { const uint32_t EmulatedFakeCamera2::kAvailableFormats[5] = {
HAL_PIXEL_FORMAT_RAW_SENSOR, HAL_PIXEL_FORMAT_RAW_SENSOR,
HAL_PIXEL_FORMAT_BLOB,
HAL_PIXEL_FORMAT_RGBA_8888,
HAL_PIXEL_FORMAT_YV12, HAL_PIXEL_FORMAT_YV12,
HAL_PIXEL_FORMAT_YCrCb_420_SP HAL_PIXEL_FORMAT_YCrCb_420_SP
}; };
@@ -101,6 +103,11 @@ status_t EmulatedFakeCamera2::Initialize() {
} }
if (res != OK) return res; if (res != OK) return res;
mNextStreamId = 0;
mRawStreamCount = 0;
mProcessedStreamCount = 0;
mJpegStreamCount = 0;
return NO_ERROR; return NO_ERROR;
} }
@@ -114,10 +121,10 @@ status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
mConfigureThread = new ConfigureThread(this); mConfigureThread = new ConfigureThread(this);
mReadoutThread = new ReadoutThread(this); mReadoutThread = new ReadoutThread(this);
mSensor = new Sensor(); mSensor = new Sensor(this);
mJpegCompressor = new JpegCompressor(this);
mNextStreamId = 0; mNextStreamId = 0;
mRawStreamOps = NULL;
res = mSensor->startUp(); res = mSensor->startUp();
if (res != NO_ERROR) return res; if (res != NO_ERROR) return res;
@@ -145,10 +152,12 @@ status_t EmulatedFakeCamera2::closeCamera() {
mConfigureThread->requestExit(); mConfigureThread->requestExit();
mReadoutThread->requestExit(); mReadoutThread->requestExit();
mJpegCompressor->cancel();
mConfigureThread->join(); mConfigureThread->join();
mReadoutThread->join(); mReadoutThread->join();
ALOGV("%s exit", __FUNCTION__); ALOGV("%s exit", __FUNCTION__);
return NO_ERROR; return NO_ERROR;
} }
@@ -174,12 +183,23 @@ int EmulatedFakeCamera2::requestQueueNotify() {
ALOG_ASSERT(mFrameQueueDst != NULL, ALOG_ASSERT(mFrameQueueDst != NULL,
"%s: Request queue src not set, but received queue notification!", "%s: Request queue src not set, but received queue notification!",
__FUNCTION__); __FUNCTION__);
ALOG_ASSERT(mRawStreamOps != NULL, ALOG_ASSERT(mStreams.size() != 0,
"%s: No raw stream allocated, but received queue notification!", "%s: No streams allocated, but received queue notification!",
__FUNCTION__); __FUNCTION__);
return mConfigureThread->newRequestAvailable(); return mConfigureThread->newRequestAvailable();
} }
int EmulatedFakeCamera2::getInProgressCount() {
Mutex::Autolock l(mMutex);
int requestCount = 0;
requestCount += mConfigureThread->getInProgressCount();
requestCount += mReadoutThread->getInProgressCount();
requestCount += mJpegCompressor->isBusy() ? 1 : 0;
return requestCount;
}
int EmulatedFakeCamera2::constructDefaultRequest( int EmulatedFakeCamera2::constructDefaultRequest(
int request_template, int request_template,
camera_metadata_t **request) { camera_metadata_t **request) {
@@ -219,12 +239,6 @@ int EmulatedFakeCamera2::allocateStream(
uint32_t *max_buffers) { uint32_t *max_buffers) {
Mutex::Autolock l(mMutex); Mutex::Autolock l(mMutex);
if (mNextStreamId > 0) {
// TODO: Support more than one stream
ALOGW("%s: Only one stream supported", __FUNCTION__);
return BAD_VALUE;
}
if (format != CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) { if (format != CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) {
unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t); unsigned int numFormats = sizeof(kAvailableFormats) / sizeof(uint32_t);
unsigned int formatIdx = 0; unsigned int formatIdx = 0;
@@ -243,14 +257,25 @@ int EmulatedFakeCamera2::allocateStream(
const uint32_t *availableSizes; const uint32_t *availableSizes;
size_t availableSizeCount; size_t availableSizeCount;
if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) { switch (format) {
availableSizes = kAvailableRawSizes; case HAL_PIXEL_FORMAT_RAW_SENSOR:
availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t); availableSizes = kAvailableRawSizes;
} else { availableSizeCount = sizeof(kAvailableRawSizes)/sizeof(uint32_t);
availableSizes = kAvailableProcessedSizes; break;
availableSizeCount = sizeof(kAvailableProcessedSizes)/sizeof(uint32_t); case HAL_PIXEL_FORMAT_BLOB:
availableSizes = kAvailableJpegSizes;
availableSizeCount = sizeof(kAvailableJpegSizes)/sizeof(uint32_t);
break;
case HAL_PIXEL_FORMAT_RGBA_8888:
case HAL_PIXEL_FORMAT_YV12:
case HAL_PIXEL_FORMAT_YCrCb_420_SP:
availableSizes = kAvailableProcessedSizes;
availableSizeCount = sizeof(kAvailableProcessedSizes)/sizeof(uint32_t);
break;
default:
ALOGE("%s: Unknown format 0x%x", __FUNCTION__, format);
return BAD_VALUE;
} }
// TODO: JPEG sizes
unsigned int resIdx = 0; unsigned int resIdx = 0;
for (; resIdx < availableSizeCount; resIdx++) { for (; resIdx < availableSizeCount; resIdx++) {
@@ -263,16 +288,41 @@ int EmulatedFakeCamera2::allocateStream(
return BAD_VALUE; return BAD_VALUE;
} }
// TODO: Generalize below to work for variable types of streams, etc. switch (format) {
// Currently only correct for raw sensor format, sensor resolution. case HAL_PIXEL_FORMAT_RAW_SENSOR:
if (mRawStreamCount >= kMaxRawStreamCount) {
ALOGE("%s: Cannot allocate another raw stream (%d already allocated)",
__FUNCTION__, mRawStreamCount);
return INVALID_OPERATION;
}
mRawStreamCount++;
break;
case HAL_PIXEL_FORMAT_BLOB:
if (mJpegStreamCount >= kMaxJpegStreamCount) {
ALOGE("%s: Cannot allocate another JPEG stream (%d already allocated)",
__FUNCTION__, mJpegStreamCount);
return INVALID_OPERATION;
}
mJpegStreamCount++;
break;
default:
if (mProcessedStreamCount >= kMaxProcessedStreamCount) {
ALOGE("%s: Cannot allocate another processed stream (%d already allocated)",
__FUNCTION__, mProcessedStreamCount);
return INVALID_OPERATION;
}
mProcessedStreamCount++;
}
ALOG_ASSERT(width == Sensor::kResolution[0], Stream newStream;
"%s: TODO: Only supporting raw sensor size right now", __FUNCTION__); newStream.ops = stream_ops;
ALOG_ASSERT(height == Sensor::kResolution[1], newStream.width = width;
"%s: TODO: Only supporting raw sensor size right now", __FUNCTION__); newStream.height = height;
newStream.format = format;
// TODO: Query stride from gralloc
newStream.stride = width;
mStreamFormat = format; mStreams.add(mNextStreamId, newStream);
mRawStreamOps = stream_ops;
*stream_id = mNextStreamId; *stream_id = mNextStreamId;
if (format_actual) *format_actual = format; if (format_actual) *format_actual = format;
@@ -298,11 +348,32 @@ int EmulatedFakeCamera2::registerStreamBuffers(
int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) { int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
Mutex::Autolock l(mMutex); Mutex::Autolock l(mMutex);
ALOG_ASSERT(stream_id == 0,
"%s: TODO: Only one stream supported", __FUNCTION__);
// TODO: Need to clean up better than this - in-flight buffers likely ssize_t streamIndex = mStreams.indexOfKey(stream_id);
mRawStreamOps = NULL; if (streamIndex < 0) {
ALOGE("%s: Unknown stream id %d!", __FUNCTION__, stream_id);
return BAD_VALUE;
}
if (isStreamInUse(stream_id)) {
ALOGE("%s: Cannot release stream %d; in use!", __FUNCTION__,
stream_id);
return BAD_VALUE;
}
switch(mStreams.valueAt(streamIndex).format) {
case HAL_PIXEL_FORMAT_RAW_SENSOR:
mRawStreamCount--;
break;
case HAL_PIXEL_FORMAT_BLOB:
mJpegStreamCount--;
break;
default:
mProcessedStreamCount--;
break;
}
mStreams.removeItemsAt(streamIndex);
return NO_ERROR; return NO_ERROR;
} }
@@ -435,6 +506,21 @@ status_t EmulatedFakeCamera2::ConfigureThread::newRequestAvailable() {
return OK; return OK;
} }
bool EmulatedFakeCamera2::ConfigureThread::isStreamInUse(uint32_t id) {
Mutex::Autolock lock(mInternalsMutex);
if (mNextBuffers == NULL) return false;
for (size_t i=0; i < mNextBuffers->size(); i++) {
if ((*mNextBuffers)[i].streamId == (int)id) return true;
}
return false;
}
int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
Mutex::Autolock lock(mInternalsMutex);
return mNextBuffers == NULL ? 0 : 1;
}
bool EmulatedFakeCamera2::ConfigureThread::threadLoop() { bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
status_t res; status_t res;
@@ -457,6 +543,8 @@ bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
// Active // Active
} }
if (mRequest == NULL) { if (mRequest == NULL) {
Mutex::Autolock il(mInternalsMutex);
ALOGV("Getting next request"); ALOGV("Getting next request");
res = mParent->mRequestQueueSrc->dequeue_request( res = mParent->mRequestQueueSrc->dequeue_request(
mParent->mRequestQueueSrc, mParent->mRequestQueueSrc,
@@ -486,11 +574,24 @@ bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
mParent->signalError(); mParent->signalError();
return false; return false;
} }
// TODO: Only raw stream supported
if (streams.count != 1 || streams.data.u8[0] != 0) { mNextBuffers = new Buffers;
ALOGE("%s: TODO: Only raw stream supported", __FUNCTION__); mNextNeedsJpeg = false;
mParent->signalError(); ALOGV("Setting up buffers for capture");
return false; for (size_t i = 0; i < streams.count; i++) {
const Stream &s = mParent->getStreamInfo(streams.data.u8[i]);
StreamBuffer b;
b.streamId = streams.data.u8[i];
b.width = s.width;
b.height = s.height;
b.format = s.format;
b.stride = s.stride;
mNextBuffers->push_back(b);
ALOGV(" Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
i, b.streamId, b.width, b.height, b.format, b.stride);
if (b.format == HAL_PIXEL_FORMAT_BLOB) {
mNextNeedsJpeg = true;
}
} }
camera_metadata_entry_t e; camera_metadata_entry_t e;
@@ -548,56 +649,70 @@ bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
mParent->mSensor->getScene().setHour(*e.data.i32); mParent->mSensor->getScene().setHour(*e.data.i32);
} }
// TODO: Fetch stride from gralloc // Start waiting on sensor or JPEG block
mNextBufferStride = Sensor::kResolution[0]; if (mNextNeedsJpeg) {
ALOGV("Waiting for JPEG compressor");
} else {
ALOGV("Waiting for sensor");
}
}
if (mNextNeedsJpeg) {
bool jpegDone;
jpegDone = mParent->mJpegCompressor->waitForDone(kWaitPerLoop);
if (!jpegDone) return true;
// Start waiting on sensor
ALOGV("Waiting for sensor"); ALOGV("Waiting for sensor");
mNextNeedsJpeg = false;
} }
bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop); bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
if (vsync) { if (!vsync) return true;
ALOGV("Configuring sensor for frame %d", mNextFrameNumber);
mParent->mSensor->setExposureTime(mNextExposureTime);
mParent->mSensor->setFrameDuration(mNextFrameDuration);
mParent->mSensor->setSensitivity(mNextSensitivity);
/** Get buffer to fill for this frame */ Mutex::Autolock il(mInternalsMutex);
// TODO: Only does raw stream ALOGV("Configuring sensor for frame %d", mNextFrameNumber);
mParent->mSensor->setExposureTime(mNextExposureTime);
mParent->mSensor->setFrameDuration(mNextFrameDuration);
mParent->mSensor->setSensitivity(mNextSensitivity);
/* Get next buffer from raw stream */ /** Get buffers to fill for this frame */
mNextBuffer = NULL; for (size_t i = 0; i < mNextBuffers->size(); i++) {
res = mParent->mRawStreamOps->dequeue_buffer(mParent->mRawStreamOps, StreamBuffer &b = mNextBuffers->editItemAt(i);
&mNextBuffer);
if (res != NO_ERROR || mNextBuffer == NULL) { Stream s = mParent->getStreamInfo(b.streamId);
ALOGE("%s: Unable to dequeue buffer from stream %d: %d",
__FUNCTION__, 0, res); res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
if (res != NO_ERROR || b.buffer == NULL) {
ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
__FUNCTION__, b.streamId, strerror(-res), res);
mParent->signalError(); mParent->signalError();
return false; return false;
} }
/* Lock the buffer from the perspective of the graphics mapper */ /* Lock the buffer from the perspective of the graphics mapper */
uint8_t *img; uint8_t *img;
const Rect rect(Sensor::kResolution[0], Sensor::kResolution[1]); const Rect rect(s.width, s.height);
res = GraphicBufferMapper::get().lock(*mNextBuffer, res = GraphicBufferMapper::get().lock(*(b.buffer),
GRALLOC_USAGE_SW_WRITE_OFTEN, GRALLOC_USAGE_SW_WRITE_OFTEN,
rect, (void**)&img); rect, (void**)&(b.img) );
if (res != NO_ERROR) { if (res != NO_ERROR) {
ALOGE("%s: grbuffer_mapper.lock failure: %d", __FUNCTION__, res); ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
mParent->mRawStreamOps->cancel_buffer(mParent->mRawStreamOps, __FUNCTION__, strerror(-res), res);
mNextBuffer); s.ops->cancel_buffer(s.ops,
b.buffer);
mParent->signalError(); mParent->signalError();
return false; return false;
} }
mParent->mSensor->setDestinationBuffer(img, mParent->mStreamFormat,
mNextBufferStride);
mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffer);
mRequest = NULL;
} }
mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffers);
mParent->mSensor->setDestinationBuffers(mNextBuffers);
mRequest = NULL;
mNextBuffers = NULL;
return true; return true;
} }
@@ -606,8 +721,7 @@ EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
mParent(parent), mParent(parent),
mRunning(false), mRunning(false),
mActive(false), mActive(false),
mRequest(NULL), mRequest(NULL)
mBuffer(NULL)
{ {
mInFlightQueue = new InFlightQueue[kInFlightQueueSize]; mInFlightQueue = new InFlightQueue[kInFlightQueueSize];
mInFlightHead = 0; mInFlightHead = 0;
@@ -635,8 +749,9 @@ status_t EmulatedFakeCamera2::ReadoutThread::waitUntilRunning() {
return OK; return OK;
} }
void EmulatedFakeCamera2::ReadoutThread::setNextCapture(camera_metadata_t *request, void EmulatedFakeCamera2::ReadoutThread::setNextCapture(
buffer_handle_t *buffer) { camera_metadata_t *request,
Buffers *buffers) {
Mutex::Autolock lock(mInputMutex); Mutex::Autolock lock(mInputMutex);
if ( (mInFlightTail + 1) % kInFlightQueueSize == mInFlightHead) { if ( (mInFlightTail + 1) % kInFlightQueueSize == mInFlightHead) {
ALOGE("In flight queue full, dropping captures"); ALOGE("In flight queue full, dropping captures");
@@ -644,7 +759,7 @@ void EmulatedFakeCamera2::ReadoutThread::setNextCapture(camera_metadata_t *reque
return; return;
} }
mInFlightQueue[mInFlightTail].request = request; mInFlightQueue[mInFlightTail].request = request;
mInFlightQueue[mInFlightTail].buffer = buffer; mInFlightQueue[mInFlightTail].buffers = buffers;
mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize; mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
if (!mActive) { if (!mActive) {
@@ -653,6 +768,41 @@ void EmulatedFakeCamera2::ReadoutThread::setNextCapture(camera_metadata_t *reque
} }
} }
bool EmulatedFakeCamera2::ReadoutThread::isStreamInUse(uint32_t id) {
Mutex::Autolock lock(mInputMutex);
size_t i = mInFlightHead;
while (i != mInFlightTail) {
for (size_t j = 0; j < mInFlightQueue[i].buffers->size(); j++) {
if ( (*(mInFlightQueue[i].buffers))[j].streamId == (int)id )
return true;
}
i = (i + 1) % kInFlightQueueSize;
}
Mutex::Autolock iLock(mInternalsMutex);
if (mBuffers != NULL) {
for (i = 0; i < mBuffers->size(); i++) {
if ( (*mBuffers)[i].streamId == (int)id) return true;
}
}
return false;
}
int EmulatedFakeCamera2::ReadoutThread::getInProgressCount() {
Mutex::Autolock lock(mInputMutex);
Mutex::Autolock iLock(mInternalsMutex);
int requestCount =
((mInFlightTail + kInFlightQueueSize) - mInFlightHead)
% kInFlightQueueSize;
requestCount += (mBuffers == NULL) ? 0 : 1;
return requestCount;
}
bool EmulatedFakeCamera2::ReadoutThread::threadLoop() { bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
status_t res; status_t res;
@@ -679,11 +829,14 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
mActive = false; mActive = false;
return true; return true;
} else { } else {
Mutex::Autolock iLock(mInternalsMutex);
mRequest = mInFlightQueue[mInFlightHead].request; mRequest = mInFlightQueue[mInFlightHead].request;
mBuffer = mInFlightQueue[mInFlightHead].buffer; mBuffers = mInFlightQueue[mInFlightHead].buffers;
mInFlightQueue[mInFlightHead].request = NULL; mInFlightQueue[mInFlightHead].request = NULL;
mInFlightQueue[mInFlightHead].buffer = NULL; mInFlightQueue[mInFlightHead].buffers = NULL;
mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize; mInFlightHead = (mInFlightHead + 1) % kInFlightQueueSize;
ALOGV("Ready to read out request %p, %d buffers",
mRequest, mBuffers->size());
} }
} }
} }
@@ -700,6 +853,7 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
// Got sensor data, construct frame and send it out // Got sensor data, construct frame and send it out
ALOGV("Readout: Constructing metadata and frames"); ALOGV("Readout: Constructing metadata and frames");
Mutex::Autolock iLock(mInternalsMutex);
camera_metadata_entry_t metadataMode; camera_metadata_entry_t metadataMode;
res = find_camera_metadata_entry(mRequest, res = find_camera_metadata_entry(mRequest,
@@ -771,16 +925,41 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
} }
mRequest = NULL; mRequest = NULL;
ALOGV("Sending image buffer to output stream."); int compressedBufferIndex = -1;
GraphicBufferMapper::get().unlock(*mBuffer); ALOGV("Processing %d buffers", mBuffers->size());
res = mParent->mRawStreamOps->enqueue_buffer(mParent->mRawStreamOps, for (size_t i = 0; i < mBuffers->size(); i++) {
captureTime, mBuffer); const StreamBuffer &b = (*mBuffers)[i];
if (res != OK) { ALOGV(" Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
ALOGE("Error enqueuing image buffer %p: %s (%d)", mBuffer, i, b.streamId, b.width, b.height, b.format, b.stride);
strerror(-res), res); if (b.streamId >= 0) {
// TODO: Should this cause a stop? if (b.format == HAL_PIXEL_FORMAT_BLOB) {
// Assumes only one BLOB buffer type per capture
compressedBufferIndex = i;
} else {
ALOGV("Sending image buffer %d to output stream %d",
i, b.streamId);
GraphicBufferMapper::get().unlock(*(b.buffer));
res = mParent->getStreamInfo(b.streamId).ops->enqueue_buffer(
mParent->getStreamInfo(b.streamId).ops,
captureTime, b.buffer);
if (res != OK) {
ALOGE("Error enqueuing image buffer %p: %s (%d)", b.buffer,
strerror(-res), res);
mParent->signalError();
}
}
}
}
if (compressedBufferIndex == -1) {
delete mBuffers;
mBuffers = NULL;
} else {
ALOGV("Starting JPEG compression for buffer %d, stream %d",
compressedBufferIndex,
(*mBuffers)[compressedBufferIndex].streamId);
mParent->mJpegCompressor->start(mBuffers, captureTime);
mBuffers = NULL;
} }
mBuffer = NULL;
return true; return true;
} }
@@ -789,7 +968,7 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
status_t EmulatedFakeCamera2::constructStaticInfo( status_t EmulatedFakeCamera2::constructStaticInfo(
camera_metadata_t **info, camera_metadata_t **info,
bool sizeRequest) { bool sizeRequest) const {
size_t entryCount = 0; size_t entryCount = 0;
size_t dataCount = 0; size_t dataCount = 0;
@@ -958,6 +1137,9 @@ status_t EmulatedFakeCamera2::constructStaticInfo(
ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t)); jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
// android.stats // android.stats
static const uint8_t availableFaceDetectModes[] = { static const uint8_t availableFaceDetectModes[] = {
@@ -1080,7 +1262,7 @@ status_t EmulatedFakeCamera2::constructStaticInfo(
status_t EmulatedFakeCamera2::constructDefaultRequest( status_t EmulatedFakeCamera2::constructDefaultRequest(
int request_template, int request_template,
camera_metadata_t **request, camera_metadata_t **request,
bool sizeRequest) { bool sizeRequest) const {
size_t entryCount = 0; size_t entryCount = 0;
size_t dataCount = 0; size_t dataCount = 0;
@@ -1433,5 +1615,25 @@ status_t EmulatedFakeCamera2::addOrSize(camera_metadata_t *request,
} }
} }
bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
// Assumes mMutex is locked; otherwise new requests could enter
// configureThread while readoutThread is being checked
// Order of isStreamInUse calls matters
if (mConfigureThread->isStreamInUse(id) ||
mReadoutThread->isStreamInUse(id) ||
mJpegCompressor->isStreamInUse(id) ) {
ALOGE("%s: Stream %d is in use in active requests!",
__FUNCTION__, id);
return true;
}
return false;
}
const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
Mutex::Autolock lock(mMutex);
return mStreams.valueFor(streamId);
}
}; /* namespace android */ }; /* namespace android */

View File

@@ -24,8 +24,11 @@
*/ */
#include "EmulatedCamera2.h" #include "EmulatedCamera2.h"
#include "fake-pipeline2/Base.h"
#include "fake-pipeline2/Sensor.h" #include "fake-pipeline2/Sensor.h"
#include "fake-pipeline2/JpegCompressor.h"
#include <utils/Condition.h> #include <utils/Condition.h>
#include <utils/KeyedVector.h>
#include <utils/Thread.h> #include <utils/Thread.h>
namespace android { namespace android {
@@ -69,7 +72,7 @@ protected:
virtual int requestQueueNotify(); virtual int requestQueueNotify();
/** Count of requests in flight */ /** Count of requests in flight */
//virtual int getInProgressCount(); virtual int getInProgressCount();
/** Cancel all captures in flight */ /** Cancel all captures in flight */
//virtual int flushCapturesInProgress(); //virtual int flushCapturesInProgress();
@@ -121,7 +124,13 @@ protected:
virtual int dump(int fd); virtual int dump(int fd);
/** Methods for worker threads to call */ public:
/****************************************************************************
* Utility methods called by configure/readout threads and pipeline
***************************************************************************/
// Get information about a given stream. Will lock mMutex
const Stream &getStreamInfo(uint32_t streamId);
// Notifies rest of camera subsystem of serious error // Notifies rest of camera subsystem of serious error
void signalError(); void signalError();
@@ -133,15 +142,15 @@ private:
/** Construct static camera metadata, two-pass */ /** Construct static camera metadata, two-pass */
status_t constructStaticInfo( status_t constructStaticInfo(
camera_metadata_t **info, camera_metadata_t **info,
bool sizeRequest); bool sizeRequest) const;
/** Two-pass implementation of constructDefaultRequest */ /** Two-pass implementation of constructDefaultRequest */
status_t constructDefaultRequest( status_t constructDefaultRequest(
int request_template, int request_template,
camera_metadata_t **request, camera_metadata_t **request,
bool sizeRequest); bool sizeRequest) const;
/** Helper function for constructDefaultRequest */ /** Helper function for constructDefaultRequest */
status_t addOrSize( camera_metadata_t *request, static status_t addOrSize( camera_metadata_t *request,
bool sizeRequest, bool sizeRequest,
size_t *entryCount, size_t *entryCount,
size_t *dataCount, size_t *dataCount,
@@ -149,6 +158,10 @@ private:
const void *entry_data, const void *entry_data,
size_t entry_count); size_t entry_count);
/** Determine if the stream id is listed in any currently-in-flight
* requests. Assumes mMutex is locked */
bool isStreamInUse(uint32_t streamId);
/**************************************************************************** /****************************************************************************
* Pipeline controller threads * Pipeline controller threads
***************************************************************************/ ***************************************************************************/
@@ -161,6 +174,9 @@ private:
status_t waitUntilRunning(); status_t waitUntilRunning();
status_t newRequestAvailable(); status_t newRequestAvailable();
status_t readyToRun(); status_t readyToRun();
bool isStreamInUse(uint32_t id);
int getInProgressCount();
private: private:
EmulatedFakeCamera2 *mParent; EmulatedFakeCamera2 *mParent;
@@ -173,12 +189,14 @@ private:
// working on them // working on them
camera_metadata_t *mRequest; camera_metadata_t *mRequest;
Mutex mInternalsMutex; // Lock before accessing below members.
bool mNextNeedsJpeg;
int32_t mNextFrameNumber; int32_t mNextFrameNumber;
int64_t mNextExposureTime; int64_t mNextExposureTime;
int64_t mNextFrameDuration; int64_t mNextFrameDuration;
int32_t mNextSensitivity; int32_t mNextSensitivity;
buffer_handle_t *mNextBuffer; Buffers *mNextBuffers;
int mNextBufferStride;
}; };
class ReadoutThread: public Thread { class ReadoutThread: public Thread {
@@ -191,8 +209,10 @@ private:
// Input // Input
status_t waitUntilRunning(); status_t waitUntilRunning();
void setNextCapture(camera_metadata_t *request, void setNextCapture(camera_metadata_t *request,
buffer_handle_t *buffer); Buffers *buffers);
bool isStreamInUse(uint32_t id);
int getInProgressCount();
private: private:
EmulatedFakeCamera2 *mParent; EmulatedFakeCamera2 *mParent;
@@ -207,15 +227,16 @@ private:
static const int kInFlightQueueSize = 4; static const int kInFlightQueueSize = 4;
struct InFlightQueue { struct InFlightQueue {
camera_metadata_t *request; camera_metadata_t *request;
buffer_handle_t *buffer; Buffers *buffers;
} *mInFlightQueue; } *mInFlightQueue;
int mInFlightHead; size_t mInFlightHead;
int mInFlightTail; size_t mInFlightTail;
// Internals // Internals
Mutex mInternalsMutex;
camera_metadata_t *mRequest; camera_metadata_t *mRequest;
buffer_handle_t *mBuffer; Buffers *mBuffers;
}; };
@@ -223,6 +244,9 @@ private:
* Static configuration information * Static configuration information
***************************************************************************/ ***************************************************************************/
private: private:
static const uint32_t kMaxRawStreamCount = 1;
static const uint32_t kMaxProcessedStreamCount = 3;
static const uint32_t kMaxJpegStreamCount = 1;
static const uint32_t kAvailableFormats[]; static const uint32_t kAvailableFormats[];
static const uint32_t kAvailableRawSizes[]; static const uint32_t kAvailableRawSizes[];
static const uint64_t kAvailableRawMinDurations[]; static const uint64_t kAvailableRawMinDurations[];
@@ -245,11 +269,15 @@ private:
/** Stream manipulation */ /** Stream manipulation */
uint32_t mNextStreamId; uint32_t mNextStreamId;
const camera2_stream_ops_t *mRawStreamOps; uint32_t mRawStreamCount;
uint32_t mStreamFormat; uint32_t mProcessedStreamCount;
uint32_t mJpegStreamCount;
KeyedVector<uint32_t, Stream> mStreams;
/** Simulated hardware interfaces */ /** Simulated hardware interfaces */
sp<Sensor> mSensor; sp<Sensor> mSensor;
sp<JpegCompressor> mJpegCompressor;
/** Pipeline control threads */ /** Pipeline control threads */
sp<ConfigureThread> mConfigureThread; sp<ConfigureThread> mConfigureThread;

View File

@@ -0,0 +1,53 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This file includes various basic structures that are needed by multiple parts
* of the fake camera 2 implementation.
*/
#ifndef HW_EMULATOR_CAMERA2_BASE_H
#define HW_EMULATOR_CAMERA2_BASE_H
#include <system/window.h>
#include <hardware/camera2.h>
#include <utils/Vector.h>
namespace android {
/* Internal structure for passing buffers across threads */
struct StreamBuffer {
int streamId;
uint32_t width, height;
uint32_t format;
uint32_t stride;
buffer_handle_t *buffer;
uint8_t *img;
};
typedef Vector<StreamBuffer> Buffers;
struct Stream {
uint32_t id;
const camera2_stream_ops_t *ops;
uint32_t width, height;
uint32_t format;
uint32_t stride;
};
} // namespace android;
#endif

View File

@@ -0,0 +1,256 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_NDEBUG 0
#define LOG_TAG "EmulatedCamera2_JpegCompressor"
#include <utils/Log.h>
#include <ui/GraphicBufferMapper.h>
#include "JpegCompressor.h"
#include "../EmulatedFakeCamera2.h"
namespace android {
JpegCompressor::JpegCompressor(EmulatedFakeCamera2 *parent):
Thread(false),
mIsBusy(false),
mParent(parent),
mBuffers(NULL),
mCaptureTime(0) {
}
JpegCompressor::~JpegCompressor() {
Mutex::Autolock lock(mMutex);
}
status_t JpegCompressor::start(Buffers *buffers,
nsecs_t captureTime) {
Mutex::Autolock lock(mMutex);
{
Mutex::Autolock busyLock(mBusyMutex);
if (mIsBusy) {
ALOGE("%s: Already processing a buffer!", __FUNCTION__);
return INVALID_OPERATION;
}
mIsBusy = true;
mBuffers = buffers;
mCaptureTime = captureTime;
}
status_t res;
res = run("EmulatedFakeCamera2::JpegCompressor");
if (res != OK) {
ALOGE("%s: Unable to start up compression thread: %s (%d)",
__FUNCTION__, strerror(-res), res);
delete mBuffers;
}
return res;
}
status_t JpegCompressor::cancel() {
requestExitAndWait();
return OK;
}
status_t JpegCompressor::readyToRun() {
return OK;
}
bool JpegCompressor::threadLoop() {
Mutex::Autolock lock(mMutex);
ALOGV("%s: Starting compression thread", __FUNCTION__);
// Find source and target buffers
bool foundJpeg = false, mFoundAux = false;
for (size_t i = 0; i < mBuffers->size(); i++) {
const StreamBuffer &b = (*mBuffers)[i];
if (b.format == HAL_PIXEL_FORMAT_BLOB) {
mJpegBuffer = b;
mFoundJpeg = true;
} else if (b.streamId == -1) {
mAuxBuffer = b;
mFoundAux = true;
}
if (mFoundJpeg && mFoundAux) break;
}
if (!mFoundJpeg || !mFoundAux) {
ALOGE("%s: Unable to find buffers for JPEG source/destination",
__FUNCTION__);
cleanUp();
return false;
}
// Set up error management
mJpegErrorInfo = NULL;
JpegError error;
error.parent = this;
mCInfo.err = jpeg_std_error(&error);
mCInfo.err->error_exit = jpegErrorHandler;
jpeg_create_compress(&mCInfo);
if (checkError("Error initializing compression")) return false;
// Route compressed data straight to output stream buffer
JpegDestination jpegDestMgr;
jpegDestMgr.parent = this;
jpegDestMgr.init_destination = jpegInitDestination;
jpegDestMgr.empty_output_buffer = jpegEmptyOutputBuffer;
jpegDestMgr.term_destination = jpegTermDestination;
mCInfo.dest = &jpegDestMgr;
// Set up compression parameters
mCInfo.image_width = mAuxBuffer.width;
mCInfo.image_height = mAuxBuffer.height;
mCInfo.input_components = 3;
mCInfo.in_color_space = JCS_RGB;
jpeg_set_defaults(&mCInfo);
if (checkError("Error configuring defaults")) return false;
// Do compression
jpeg_start_compress(&mCInfo, TRUE);
if (checkError("Error starting compression")) return false;
size_t rowStride = mAuxBuffer.stride * 3;
const size_t kChunkSize = 32;
while (mCInfo.next_scanline < mCInfo.image_height) {
JSAMPROW chunk[kChunkSize];
for (size_t i = 0 ; i < kChunkSize; i++) {
chunk[i] = (JSAMPROW)
(mAuxBuffer.img + (i + mCInfo.next_scanline) * rowStride);
}
jpeg_write_scanlines(&mCInfo, chunk, kChunkSize);
if (checkError("Error while compressing")) return false;
if (exitPending()) {
ALOGV("%s: Cancel called, exiting early", __FUNCTION__);
cleanUp();
return false;
}
}
jpeg_finish_compress(&mCInfo);
if (checkError("Error while finishing compression")) return false;
// Write to JPEG output stream
ALOGV("%s: Compression complete, pushing to stream %d", __FUNCTION__,
mJpegBuffer.streamId);
GraphicBufferMapper::get().unlock(*(mJpegBuffer.buffer));
status_t res;
const Stream &s = mParent->getStreamInfo(mJpegBuffer.streamId);
res = s.ops->enqueue_buffer(s.ops, mCaptureTime, mJpegBuffer.buffer);
if (res != OK) {
ALOGE("%s: Error queueing compressed image buffer %p: %s (%d)",
__FUNCTION__, mJpegBuffer.buffer, strerror(-res), res);
mParent->signalError();
}
// All done
cleanUp();
return false;
}
bool JpegCompressor::isBusy() {
Mutex::Autolock busyLock(mBusyMutex);
return mIsBusy;
}
bool JpegCompressor::isStreamInUse(uint32_t id) {
Mutex::Autolock lock(mBusyMutex);
if (mBuffers && mIsBusy) {
for (size_t i = 0; i < mBuffers->size(); i++) {
if ( (*mBuffers)[i].streamId == (int)id ) return true;
}
}
return false;
}
bool JpegCompressor::waitForDone(nsecs_t timeout) {
Mutex::Autolock lock(mBusyMutex);
status_t res = OK;
if (mIsBusy) {
res = mDone.waitRelative(mBusyMutex, timeout);
}
return (res == OK);
}
bool JpegCompressor::checkError(const char *msg) {
if (mJpegErrorInfo) {
char errBuffer[JMSG_LENGTH_MAX];
mJpegErrorInfo->err->format_message(mJpegErrorInfo, errBuffer);
ALOGE("%s: %s: %s",
__FUNCTION__, msg, errBuffer);
cleanUp();
mJpegErrorInfo = NULL;
return true;
}
return false;
}
void JpegCompressor::cleanUp() {
jpeg_destroy_compress(&mCInfo);
Mutex::Autolock lock(mBusyMutex);
if (mFoundAux) {
delete[] mAuxBuffer.img;
}
delete mBuffers;
mBuffers = NULL;
mIsBusy = false;
mDone.signal();
}
void JpegCompressor::jpegErrorHandler(j_common_ptr cinfo) {
JpegError *error = static_cast<JpegError*>(cinfo->err);
error->parent->mJpegErrorInfo = cinfo;
}
void JpegCompressor::jpegInitDestination(j_compress_ptr cinfo) {
JpegDestination *dest= static_cast<JpegDestination*>(cinfo->dest);
ALOGV("%s: Setting destination to %p, size %d",
__FUNCTION__, dest->parent->mJpegBuffer.img, kMaxJpegSize);
dest->next_output_byte = (JOCTET*)(dest->parent->mJpegBuffer.img);
dest->free_in_buffer = kMaxJpegSize;
}
boolean JpegCompressor::jpegEmptyOutputBuffer(j_compress_ptr cinfo) {
ALOGE("%s: JPEG destination buffer overflow!",
__FUNCTION__);
return true;
}
void JpegCompressor::jpegTermDestination(j_compress_ptr cinfo) {
ALOGV("%s: Done writing JPEG data. %d bytes left in buffer",
__FUNCTION__, cinfo->dest->free_in_buffer);
}
} // namespace android

View File

@@ -0,0 +1,109 @@
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This class simulates a hardware JPEG compressor. It receives image buffers
* in RGBA_8888 format, processes them in a worker thread, and then pushes them
* out to their destination stream.
*/
#ifndef HW_EMULATOR_CAMERA2_JPEG_H
#define HW_EMULATOR_CAMERA2_JPEG_H
#include "utils/Thread.h"
#include "utils/Mutex.h"
#include "utils/Timers.h"
#include "Base.h"
#include <stdio.h>
extern "C" {
#include <jpeglib.h>
}
namespace android {
class EmulatedFakeCamera2;
class JpegCompressor: private Thread, public virtual RefBase {
public:
JpegCompressor(EmulatedFakeCamera2 *parent);
~JpegCompressor();
// Start compressing COMPRESSED format buffers; JpegCompressor takes
// ownership of the Buffers vector.
status_t start(Buffers *buffers,
nsecs_t captureTime);
status_t cancel();
bool isBusy();
bool isStreamInUse(uint32_t id);
bool waitForDone(nsecs_t timeout);
// TODO: Measure this
static const size_t kMaxJpegSize = 300000;
private:
Mutex mBusyMutex;
bool mIsBusy;
Condition mDone;
Mutex mMutex;
EmulatedFakeCamera2 *mParent;
Buffers *mBuffers;
nsecs_t mCaptureTime;
StreamBuffer mJpegBuffer, mAuxBuffer;
bool mFoundJpeg, mFoundAux;
jpeg_compress_struct mCInfo;
struct JpegError : public jpeg_error_mgr {
JpegCompressor *parent;
};
j_common_ptr mJpegErrorInfo;
struct JpegDestination : public jpeg_destination_mgr {
JpegCompressor *parent;
};
static void jpegErrorHandler(j_common_ptr cinfo);
static void jpegInitDestination(j_compress_ptr cinfo);
static boolean jpegEmptyOutputBuffer(j_compress_ptr cinfo);
static void jpegTermDestination(j_compress_ptr cinfo);
bool checkError(const char *msg);
void cleanUp();
/**
* Inherited Thread virtual overrides
*/
private:
virtual status_t readyToRun();
virtual bool threadLoop();
};
} // namespace android
#endif

View File

@@ -26,6 +26,7 @@
#include <utils/Log.h> #include <utils/Log.h>
#include "../EmulatedFakeCamera2.h"
#include "Sensor.h" #include "Sensor.h"
#include <cmath> #include <cmath>
#include <cstdlib> #include <cstdlib>
@@ -98,14 +99,15 @@ float sqrtf_approx(float r) {
Sensor::Sensor(): Sensor::Sensor(EmulatedFakeCamera2 *parent):
Thread(false), Thread(false),
mParent(parent),
mGotVSync(false), mGotVSync(false),
mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank), mExposureTime(kFrameDurationRange[0]-kMinVerticalBlank),
mFrameDuration(kFrameDurationRange[0]), mFrameDuration(kFrameDurationRange[0]),
mGainFactor(kDefaultSensitivity), mGainFactor(kDefaultSensitivity),
mNextBuffer(NULL), mNextBuffers(NULL),
mCapturedBuffer(NULL), mCapturedBuffers(NULL),
mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond) mScene(kResolution[0], kResolution[1], kElectronsPerLuxSecond)
{ {
@@ -119,12 +121,7 @@ status_t Sensor::startUp() {
ALOGV("%s: E", __FUNCTION__); ALOGV("%s: E", __FUNCTION__);
int res; int res;
mCapturedBuffer = NULL; mCapturedBuffers = NULL;
res = readyToRun();
if (res != OK) {
ALOGE("Unable to prepare sensor capture thread to run: %d", res);
return res;
}
res = run("EmulatedFakeCamera2::Sensor", res = run("EmulatedFakeCamera2::Sensor",
ANDROID_PRIORITY_URGENT_DISPLAY); ANDROID_PRIORITY_URGENT_DISPLAY);
@@ -167,12 +164,9 @@ void Sensor::setSensitivity(uint32_t gain) {
mGainFactor = gain; mGainFactor = gain;
} }
void Sensor::setDestinationBuffer(uint8_t *buffer, void Sensor::setDestinationBuffers(Buffers *buffers) {
uint32_t format, uint32_t stride) {
Mutex::Autolock lock(mControlMutex); Mutex::Autolock lock(mControlMutex);
mNextBuffer = buffer; mNextBuffers = buffers;
mNextBufferFmt = format;
mNextStride = stride;
} }
bool Sensor::waitForVSync(nsecs_t reltime) { bool Sensor::waitForVSync(nsecs_t reltime) {
@@ -192,18 +186,18 @@ bool Sensor::waitForNewFrame(nsecs_t reltime,
nsecs_t *captureTime) { nsecs_t *captureTime) {
Mutex::Autolock lock(mReadoutMutex); Mutex::Autolock lock(mReadoutMutex);
uint8_t *ret; uint8_t *ret;
if (mCapturedBuffer == NULL) { if (mCapturedBuffers == NULL) {
int res; int res;
res = mReadoutComplete.waitRelative(mReadoutMutex, reltime); res = mReadoutComplete.waitRelative(mReadoutMutex, reltime);
if (res == TIMED_OUT) { if (res == TIMED_OUT) {
return false; return false;
} else if (res != OK || mCapturedBuffer == NULL) { } else if (res != OK || mCapturedBuffers == NULL) {
ALOGE("Error waiting for sensor readout signal: %d", res); ALOGE("Error waiting for sensor readout signal: %d", res);
return false; return false;
} }
} }
*captureTime = mCaptureTime; *captureTime = mCaptureTime;
mCapturedBuffer = NULL; mCapturedBuffers = NULL;
return true; return true;
} }
@@ -211,7 +205,7 @@ status_t Sensor::readyToRun() {
ALOGV("Starting up sensor thread"); ALOGV("Starting up sensor thread");
mStartupTime = systemTime(); mStartupTime = systemTime();
mNextCaptureTime = 0; mNextCaptureTime = 0;
mNextCapturedBuffer = NULL; mNextCapturedBuffers = NULL;
return OK; return OK;
} }
@@ -229,19 +223,15 @@ bool Sensor::threadLoop() {
uint64_t exposureDuration; uint64_t exposureDuration;
uint64_t frameDuration; uint64_t frameDuration;
uint32_t gain; uint32_t gain;
uint8_t *nextBuffer; Buffers *nextBuffers;
uint32_t nextBufferFmt;
uint32_t stride;
{ {
Mutex::Autolock lock(mControlMutex); Mutex::Autolock lock(mControlMutex);
exposureDuration = mExposureTime; exposureDuration = mExposureTime;
frameDuration = mFrameDuration; frameDuration = mFrameDuration;
gain = mGainFactor; gain = mGainFactor;
nextBuffer = mNextBuffer; nextBuffers = mNextBuffers;
nextBufferFmt = mNextBufferFmt; // Don't reuse a buffer set
stride = mNextStride; mNextBuffers = NULL;
// Don't reuse a buffer
mNextBuffer = NULL;
// Signal VSync for start of readout // Signal VSync for start of readout
ALOGVV("Sensor VSync"); ALOGVV("Sensor VSync");
@@ -253,7 +243,7 @@ bool Sensor::threadLoop() {
* Stage 3: Read out latest captured image * Stage 3: Read out latest captured image
*/ */
uint8_t *capturedBuffer = NULL; Buffers *capturedBuffers = NULL;
nsecs_t captureTime = 0; nsecs_t captureTime = 0;
nsecs_t startRealTime = systemTime(); nsecs_t startRealTime = systemTime();
@@ -262,52 +252,78 @@ bool Sensor::threadLoop() {
nsecs_t frameReadoutEndRealTime = startRealTime + nsecs_t frameReadoutEndRealTime = startRealTime +
kRowReadoutTime * kResolution[1]; kRowReadoutTime * kResolution[1];
if (mNextCapturedBuffer != NULL) { if (mNextCapturedBuffers != NULL) {
ALOGVV("Sensor starting readout"); ALOGVV("Sensor starting readout");
// Pretend we're doing readout now; will signal once enough time has elapsed // Pretend we're doing readout now; will signal once enough time has elapsed
capturedBuffer = mNextCapturedBuffer; capturedBuffers = mNextCapturedBuffers;
captureTime = mNextCaptureTime; captureTime = mNextCaptureTime;
} }
simulatedTime += kRowReadoutTime + kMinVerticalBlank; simulatedTime += kRowReadoutTime + kMinVerticalBlank;
// TODO: Move this signal to another thread to simulate readout
// time properly
if (capturedBuffers != NULL) {
ALOGVV("Sensor readout complete");
Mutex::Autolock lock(mReadoutMutex);
mCapturedBuffers = capturedBuffers;
mCaptureTime = captureTime;
mReadoutComplete.signal();
capturedBuffers = NULL;
}
/** /**
* Stage 2: Capture new image * Stage 2: Capture new image
*/ */
mNextCaptureTime = simulatedTime; mNextCaptureTime = simulatedTime;
mNextCapturedBuffer = nextBuffer; mNextCapturedBuffers = nextBuffers;
if (mNextCapturedBuffer != NULL) { if (mNextCapturedBuffers != NULL) {
ALOGVV("Sensor capturing image (%d x %d) stride %d", ALOGVV("Starting next capture: Exposure: %f ms, gain: %d",
kResolution[0], kResolution[1], stride); (float)exposureDuration/1e6, gain);
ALOGVV("Exposure: %f ms, gain: %d", (float)exposureDuration/1e6, gain);
mScene.setExposureDuration((float)exposureDuration/1e9); mScene.setExposureDuration((float)exposureDuration/1e9);
mScene.calculateScene(mNextCaptureTime); mScene.calculateScene(mNextCaptureTime);
for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
switch(nextBufferFmt) { const StreamBuffer &b = (*mNextCapturedBuffers)[i];
case HAL_PIXEL_FORMAT_RAW_SENSOR: ALOGVV("Sensor capturing buffer %d: stream %d,"
captureRaw(gain, stride, &capturedBuffer, " %d x %d, format %x, stride %d, buf %p, img %p",
captureTime, frameEndRealTime); i, b.streamId, b.width, b.height, b.format, b.stride,
break; b.buffer, b.img);
case HAL_PIXEL_FORMAT_RGBA_8888: switch(b.format) {
captureRGBA(gain, stride, &capturedBuffer, case HAL_PIXEL_FORMAT_RAW_SENSOR:
captureTime, frameEndRealTime); captureRaw(b.img, gain, b.stride);
break; break;
default: case HAL_PIXEL_FORMAT_RGBA_8888:
ALOGE("%s: Unknown format %x, no output", __FUNCTION__, captureRGBA(b.img, gain, b.stride);
nextBufferFmt); break;
break; case HAL_PIXEL_FORMAT_BLOB:
// Add auxillary buffer of the right size
// Assumes only one BLOB (JPEG) buffer in
// mNextCapturedBuffers
StreamBuffer bAux;
bAux.streamId = -1;
bAux.width = b.width;
bAux.height = b.height;
bAux.format = HAL_PIXEL_FORMAT_RGB_888;
bAux.stride = b.width;
bAux.buffer = NULL;
// TODO: Reuse these
bAux.img = new uint8_t[b.width * b.height * 3];
captureRGB(bAux.img, gain, b.stride);
mNextCapturedBuffers->push_back(bAux);
break;
case HAL_PIXEL_FORMAT_YV12:
case HAL_PIXEL_FORMAT_YCrCb_420_SP:
// TODO:
ALOGE("%s: Format %x is TODO", __FUNCTION__, b.format);
break;
default:
ALOGE("%s: Unknown format %x, no output", __FUNCTION__,
b.format);
break;
}
} }
} }
// No capture done, or finished image generation before readout was completed
if (capturedBuffer != NULL) {
ALOGVV("Sensor readout complete");
Mutex::Autolock lock(mReadoutMutex);
mCapturedBuffer = capturedBuffer;
mCaptureTime = captureTime;
mReadoutComplete.signal();
capturedBuffer = NULL;
}
ALOGVV("Sensor vertical blanking interval"); ALOGVV("Sensor vertical blanking interval");
nsecs_t workDoneRealTime = systemTime(); nsecs_t workDoneRealTime = systemTime();
@@ -329,18 +345,17 @@ bool Sensor::threadLoop() {
return true; return true;
}; };
void Sensor::captureRaw(uint32_t gain, uint32_t stride, void Sensor::captureRaw(uint8_t *img, uint32_t gain, uint32_t stride) {
uint8_t **capturedBuffer, nsecs_t captureTime, nsecs_t frameReadoutTime) {
float totalGain = gain/100.0 * kBaseGainFactor; float totalGain = gain/100.0 * kBaseGainFactor;
float noiseVarGain = totalGain * totalGain; float noiseVarGain = totalGain * totalGain;
float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain float readNoiseVar = kReadNoiseVarBeforeGain * noiseVarGain
+ kReadNoiseVarAfterGain; + kReadNoiseVarAfterGain;
int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB int bayerSelect[4] = {Scene::R, Scene::Gr, Scene::Gb, Scene::B}; // RGGB
mScene.setReadoutPixel(0,0);
for (unsigned int y = 0; y < kResolution[1]; y++ ) { for (unsigned int y = 0; y < kResolution[1]; y++ ) {
int *bayerRow = bayerSelect + (y & 0x1) * 2; int *bayerRow = bayerSelect + (y & 0x1) * 2;
uint16_t *px = (uint16_t*)mNextCapturedBuffer + y * stride; uint16_t *px = (uint16_t*)img + y * stride;
for (unsigned int x = 0; x < kResolution[0]; x++) { for (unsigned int x = 0; x < kResolution[0]; x++) {
uint32_t electronCount; uint32_t electronCount;
electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]]; electronCount = mScene.getPixelElectrons()[bayerRow[x & 0x1]];
@@ -367,59 +382,61 @@ void Sensor::captureRaw(uint32_t gain, uint32_t stride,
} }
// TODO: Handle this better // TODO: Handle this better
//simulatedTime += kRowReadoutTime; //simulatedTime += kRowReadoutTime;
// If enough time has elapsed to complete readout, signal done frame
// Only check every so often, though
if ((*capturedBuffer != NULL) &&
((y & 63) == 0) &&
(systemTime() >= frameReadoutTime) ) {
ALOGV("Sensor readout complete");
Mutex::Autolock lock(mReadoutMutex);
mCapturedBuffer = *capturedBuffer;
mCaptureTime = captureTime;
mReadoutComplete.signal();
*capturedBuffer = NULL;
}
} }
ALOGVV("Raw sensor image captured"); ALOGVV("Raw sensor image captured");
} }
void Sensor::captureRGBA(uint32_t gain, uint32_t stride, void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride) {
uint8_t **capturedBuffer, nsecs_t captureTime, nsecs_t frameReadoutTime) { float totalGain = gain/100.0 * kBaseGainFactor;
int totalGain = gain/100.0 * kBaseGainFactor; // In fixed-point math, calculate total scaling from electrons to 8bpp
int scale64x = 64 * totalGain * 255 / kMaxRawValue;
mScene.setReadoutPixel(0,0);
for (unsigned int y = 0; y < kResolution[1]; y++ ) { for (unsigned int y = 0; y < kResolution[1]; y++ ) {
uint8_t *px = (uint8_t*)mNextCapturedBuffer + y * stride * 4; uint8_t *px = img + y * stride * 4;
for (unsigned int x = 0; x < kResolution[0]; x++) { for (unsigned int x = 0; x < kResolution[0]; x++) {
uint32_t rCount, gCount, bCount; uint32_t rCount, gCount, bCount;
// TODO: Perfect demosaicing is a cheat // TODO: Perfect demosaicing is a cheat
const uint32_t *pixel = mScene.getPixelElectrons(); const uint32_t *pixel = mScene.getPixelElectrons();
rCount = pixel[Scene::R] * totalGain / (kMaxRawValue / 255); rCount = pixel[Scene::R] * scale64x;
gCount = pixel[Scene::Gr] * totalGain / (kMaxRawValue / 255); gCount = pixel[Scene::Gr] * scale64x;
bCount = pixel[Scene::B] * totalGain / (kMaxRawValue / 255); bCount = pixel[Scene::B] * scale64x;
*px++ = rCount < 255 ? rCount : 255; *px++ = rCount < 255*64 ? rCount / 64 : 255;
*px++ = gCount < 255 ? gCount : 255; *px++ = gCount < 255*64 ? gCount / 64 : 255;
*px++ = bCount < 255 ? bCount : 255; *px++ = bCount < 255*64 ? bCount / 64 : 255;
*px++ = 255; *px++ = 255;
} }
// TODO: Handle this better // TODO: Handle this better
//simulatedTime += kRowReadoutTime; //simulatedTime += kRowReadoutTime;
// If enough time has elapsed to complete readout, signal done frame
// Only check every so often, though
if ((*capturedBuffer != NULL) &&
((y & 63) == 0) &&
(systemTime() >= frameReadoutTime) ) {
ALOGV("Sensor readout complete");
Mutex::Autolock lock(mReadoutMutex);
mCapturedBuffer = *capturedBuffer;
mCaptureTime = captureTime;
mReadoutComplete.signal();
*capturedBuffer = NULL;
}
} }
ALOGVV("RGBA sensor image captured"); ALOGVV("RGBA sensor image captured");
} }
void Sensor::captureRGB(uint8_t *img, uint32_t gain, uint32_t stride) {
float totalGain = gain/100.0 * kBaseGainFactor;
// In fixed-point math, calculate total scaling from electrons to 8bpp
int scale64x = 64 * totalGain * 255 / kMaxRawValue;
mScene.setReadoutPixel(0,0);
for (unsigned int y = 0; y < kResolution[1]; y++ ) {
uint8_t *px = img + y * stride * 3;
for (unsigned int x = 0; x < kResolution[0]; x++) {
uint32_t rCount, gCount, bCount;
// TODO: Perfect demosaicing is a cheat
const uint32_t *pixel = mScene.getPixelElectrons();
rCount = pixel[Scene::R] * scale64x;
gCount = pixel[Scene::Gr] * scale64x;
bCount = pixel[Scene::B] * scale64x;
*px++ = rCount < 255*64 ? rCount / 64 : 255;
*px++ = gCount < 255*64 ? gCount / 64 : 255;
*px++ = bCount < 255*64 ? bCount / 64 : 255;
}
// TODO: Handle this better
//simulatedTime += kRowReadoutTime;
}
ALOGVV("RGB sensor image captured");
}
} // namespace android } // namespace android

View File

@@ -78,13 +78,16 @@
#include "utils/Timers.h" #include "utils/Timers.h"
#include "Scene.h" #include "Scene.h"
#include "Base.h"
namespace android { namespace android {
class EmulatedFakeCamera2;
class Sensor: private Thread, public virtual RefBase { class Sensor: private Thread, public virtual RefBase {
public: public:
Sensor(); Sensor(EmulatedFakeCamera2 *parent);
~Sensor(); ~Sensor();
/* /*
@@ -107,7 +110,7 @@ class Sensor: private Thread, public virtual RefBase {
void setFrameDuration(uint64_t ns); void setFrameDuration(uint64_t ns);
void setSensitivity(uint32_t gain); void setSensitivity(uint32_t gain);
// Buffer must be at least stride*height*2 bytes in size // Buffer must be at least stride*height*2 bytes in size
void setDestinationBuffer(uint8_t *buffer, uint32_t format, uint32_t stride); void setDestinationBuffers(Buffers *buffers);
/* /*
* Controls that cause reconfiguration delay * Controls that cause reconfiguration delay
@@ -169,6 +172,7 @@ class Sensor: private Thread, public virtual RefBase {
static const uint32_t kDefaultSensitivity; static const uint32_t kDefaultSensitivity;
private: private:
EmulatedFakeCamera2 *mParent;
Mutex mControlMutex; // Lock before accessing control parameters Mutex mControlMutex; // Lock before accessing control parameters
// Start of control parameters // Start of control parameters
@@ -177,16 +181,14 @@ class Sensor: private Thread, public virtual RefBase {
uint64_t mExposureTime; uint64_t mExposureTime;
uint64_t mFrameDuration; uint64_t mFrameDuration;
uint32_t mGainFactor; uint32_t mGainFactor;
uint8_t *mNextBuffer; Buffers *mNextBuffers;
uint32_t mNextBufferFmt;
uint32_t mNextStride;
// End of control parameters // End of control parameters
Mutex mReadoutMutex; // Lock before accessing readout variables Mutex mReadoutMutex; // Lock before accessing readout variables
// Start of readout variables // Start of readout variables
Condition mReadoutComplete; Condition mReadoutComplete;
uint8_t *mCapturedBuffer; Buffers *mCapturedBuffers;
nsecs_t mCaptureTime; nsecs_t mCaptureTime;
// End of readout variables // End of readout variables
@@ -203,17 +205,13 @@ class Sensor: private Thread, public virtual RefBase {
virtual bool threadLoop(); virtual bool threadLoop();
nsecs_t mNextCaptureTime; nsecs_t mNextCaptureTime;
uint8_t *mNextCapturedBuffer; Buffers *mNextCapturedBuffers;
Scene mScene; Scene mScene;
void captureRaw(uint32_t gain, uint32_t stride, void captureRaw(uint8_t *img, uint32_t gain, uint32_t stride);
uint8_t **capturedBuffer, nsecs_t captureTime, void captureRGBA(uint8_t *img, uint32_t gain, uint32_t stride);
nsecs_t frameReadoutTime); void captureRGB(uint8_t *img, uint32_t gain, uint32_t stride);
void captureRGBA(uint32_t gain, uint32_t stride,
uint8_t **capturedBuffer, nsecs_t captureTime,
nsecs_t frameReadoutTime);
}; };