Revert "Revert "EmulatedFakeCamera2: Add precapture trigger and reprocess support""

Missing project ready to go.

This reverts commit 75832395dd

Change-Id: I48f6dee11248de5a12b66bd7fbc724c82517fe3a
This commit is contained in:
Eino-Ville Talvala
2012-09-04 14:21:30 -07:00
committed by Android (Google) Code Review
parent 75832395dd
commit f11b793ffb
7 changed files with 707 additions and 178 deletions

View File

@@ -162,6 +162,13 @@ int EmulatedCamera2::allocateReprocessStream(
return INVALID_OPERATION; return INVALID_OPERATION;
} }
int EmulatedCamera2::allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id) {
return INVALID_OPERATION;
}
int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) { int EmulatedCamera2::releaseReprocessStream(uint32_t stream_id) {
return INVALID_OPERATION; return INVALID_OPERATION;
} }
@@ -288,6 +295,17 @@ int EmulatedCamera2::allocate_reprocess_stream(const camera2_device_t *d,
reprocess_stream_ops, stream_id, consumer_usage, max_buffers); reprocess_stream_ops, stream_id, consumer_usage, max_buffers);
} }
int EmulatedCamera2::allocate_reprocess_stream_from_stream(
const camera2_device_t *d,
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id) {
EmulatedCamera2* ec = getInstance(d);
return ec->allocateReprocessStreamFromStream(output_stream_id,
reprocess_stream_ops, stream_id);
}
int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d, int EmulatedCamera2::release_reprocess_stream(const camera2_device_t *d,
uint32_t stream_id) { uint32_t stream_id) {
EmulatedCamera2* ec = getInstance(d); EmulatedCamera2* ec = getInstance(d);
@@ -379,6 +397,7 @@ camera2_device_ops_t EmulatedCamera2::sDeviceOps = {
EmulatedCamera2::register_stream_buffers, EmulatedCamera2::register_stream_buffers,
EmulatedCamera2::release_stream, EmulatedCamera2::release_stream,
EmulatedCamera2::allocate_reprocess_stream, EmulatedCamera2::allocate_reprocess_stream,
EmulatedCamera2::allocate_reprocess_stream_from_stream,
EmulatedCamera2::release_reprocess_stream, EmulatedCamera2::release_reprocess_stream,
EmulatedCamera2::trigger_action, EmulatedCamera2::trigger_action,
EmulatedCamera2::set_notify_callback, EmulatedCamera2::set_notify_callback,

View File

@@ -126,6 +126,11 @@ protected:
uint32_t *consumer_usage, uint32_t *consumer_usage,
uint32_t *max_buffers); uint32_t *max_buffers);
virtual int allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id);
virtual int releaseReprocessStream(uint32_t stream_id); virtual int releaseReprocessStream(uint32_t stream_id);
/** 3A action triggering */ /** 3A action triggering */
@@ -197,6 +202,11 @@ private:
uint32_t *consumer_usage, uint32_t *consumer_usage,
uint32_t *max_buffers); uint32_t *max_buffers);
static int allocate_reprocess_stream_from_stream(const camera2_device_t *,
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
uint32_t *stream_id);
static int release_reprocess_stream(const camera2_device_t *, static int release_reprocess_stream(const camera2_device_t *,
uint32_t stream_id); uint32_t stream_id);

View File

@@ -31,6 +31,10 @@
namespace android { namespace android {
const int64_t USEC = 1000LL;
const int64_t MSEC = USEC * 1000LL;
const int64_t SEC = MSEC * 1000LL;
const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = { const uint32_t EmulatedFakeCamera2::kAvailableFormats[4] = {
HAL_PIXEL_FORMAT_RAW_SENSOR, HAL_PIXEL_FORMAT_RAW_SENSOR,
HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_BLOB,
@@ -118,10 +122,12 @@ status_t EmulatedFakeCamera2::Initialize() {
} }
if (res != OK) return res; if (res != OK) return res;
mNextStreamId = 0; mNextStreamId = 1;
mNextReprocessStreamId = 1;
mRawStreamCount = 0; mRawStreamCount = 0;
mProcessedStreamCount = 0; mProcessedStreamCount = 0;
mJpegStreamCount = 0; mJpegStreamCount = 0;
mReprocessStreamCount = 0;
return NO_ERROR; return NO_ERROR;
} }
@@ -140,7 +146,8 @@ status_t EmulatedFakeCamera2::connectCamera(hw_device_t** device) {
mSensor = new Sensor(this); mSensor = new Sensor(this);
mJpegCompressor = new JpegCompressor(this); mJpegCompressor = new JpegCompressor(this);
mNextStreamId = 0; mNextStreamId = 1;
mNextReprocessStreamId = 1;
res = mSensor->startUp(); res = mSensor->startUp();
if (res != NO_ERROR) return res; if (res != NO_ERROR) return res;
@@ -435,6 +442,69 @@ int EmulatedFakeCamera2::releaseStream(uint32_t stream_id) {
return NO_ERROR; return NO_ERROR;
} }
int EmulatedFakeCamera2::allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *stream_ops,
uint32_t *stream_id) {
Mutex::Autolock l(mMutex);
ssize_t baseStreamIndex = mStreams.indexOfKey(output_stream_id);
if (baseStreamIndex < 0) {
ALOGE("%s: Unknown output stream id %d!", __FUNCTION__, output_stream_id);
return BAD_VALUE;
}
const Stream &baseStream = mStreams[baseStreamIndex];
// We'll reprocess anything we produced
if (mReprocessStreamCount >= kMaxReprocessStreamCount) {
ALOGE("%s: Cannot allocate another reprocess stream (%d already allocated)",
__FUNCTION__, mReprocessStreamCount);
return INVALID_OPERATION;
}
mReprocessStreamCount++;
ReprocessStream newStream;
newStream.ops = stream_ops;
newStream.width = baseStream.width;
newStream.height = baseStream.height;
newStream.format = baseStream.format;
newStream.stride = baseStream.stride;
newStream.sourceStreamId = output_stream_id;
*stream_id = mNextReprocessStreamId;
mReprocessStreams.add(mNextReprocessStreamId, newStream);
ALOGV("Reprocess stream allocated: %d: %d, %d, 0x%x. Parent stream: %d",
*stream_id, newStream.width, newStream.height, newStream.format,
output_stream_id);
mNextReprocessStreamId++;
return NO_ERROR;
}
int EmulatedFakeCamera2::releaseReprocessStream(uint32_t stream_id) {
Mutex::Autolock l(mMutex);
ssize_t streamIndex = mReprocessStreams.indexOfKey(stream_id);
if (streamIndex < 0) {
ALOGE("%s: Unknown reprocess stream id %d!", __FUNCTION__, stream_id);
return BAD_VALUE;
}
if (isReprocessStreamInUse(stream_id)) {
ALOGE("%s: Cannot release reprocessing stream %d; in use!", __FUNCTION__,
stream_id);
return BAD_VALUE;
}
mReprocessStreamCount--;
mReprocessStreams.removeItemsAt(streamIndex);
return NO_ERROR;
}
int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id, int EmulatedFakeCamera2::triggerAction(uint32_t trigger_id,
int32_t ext1, int32_t ext1,
int32_t ext2) { int32_t ext2) {
@@ -603,7 +673,6 @@ int EmulatedFakeCamera2::ConfigureThread::getInProgressCount() {
} }
bool EmulatedFakeCamera2::ConfigureThread::threadLoop() { bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
status_t res; status_t res;
// Check if we're currently processing or just waiting // Check if we're currently processing or just waiting
@@ -645,105 +714,32 @@ bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
Mutex::Autolock lock(mInputMutex); Mutex::Autolock lock(mInputMutex);
mRequestCount++; mRequestCount++;
} }
// Get necessary parameters for sensor config
mParent->mControlThread->processRequest(mRequest); camera_metadata_entry_t type;
camera_metadata_entry_t streams;
res = find_camera_metadata_entry(mRequest, res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_OUTPUT_STREAMS, ANDROID_REQUEST_TYPE,
&streams); &type);
if (res != NO_ERROR) { if (res != NO_ERROR) {
ALOGE("%s: error reading output stream tag", __FUNCTION__); ALOGE("%s: error reading request type", __FUNCTION__);
mParent->signalError(); mParent->signalError();
return false; return false;
} }
bool success = false;;
mNextBuffers = new Buffers; switch (type.data.u8[0]) {
mNextNeedsJpeg = false; case ANDROID_REQUEST_TYPE_CAPTURE:
ALOGV("Configure: Setting up buffers for capture"); success = setupCapture();
for (size_t i = 0; i < streams.count; i++) { break;
int streamId = streams.data.u8[i]; case ANDROID_REQUEST_TYPE_REPROCESS:
const Stream &s = mParent->getStreamInfo(streamId); success = setupReprocess();
if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { break;
ALOGE("%s: Stream %d does not have a concrete pixel format, but " default:
"is included in a request!", __FUNCTION__, streamId); ALOGE("%s: Unexpected request type %d",
__FUNCTION__, type.data.u8[0]);
mParent->signalError(); mParent->signalError();
return false; break;
}
StreamBuffer b;
b.streamId = streams.data.u8[i];
b.width = s.width;
b.height = s.height;
b.format = s.format;
b.stride = s.stride;
mNextBuffers->push_back(b);
ALOGV("Configure: Buffer %d: Stream %d, %d x %d, format 0x%x, "
"stride %d",
i, b.streamId, b.width, b.height, b.format, b.stride);
if (b.format == HAL_PIXEL_FORMAT_BLOB) {
mNextNeedsJpeg = true;
}
} }
if (!success) return false;
camera_metadata_entry_t e;
res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_FRAME_COUNT,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading frame count tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
mParent->signalError();
return false;
}
mNextFrameNumber = *e.data.i32;
res = find_camera_metadata_entry(mRequest,
ANDROID_SENSOR_EXPOSURE_TIME,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading exposure time tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
mParent->signalError();
return false;
}
mNextExposureTime = *e.data.i64;
res = find_camera_metadata_entry(mRequest,
ANDROID_SENSOR_FRAME_DURATION,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading frame duration tag", __FUNCTION__);
mParent->signalError();
return false;
}
mNextFrameDuration = *e.data.i64;
if (mNextFrameDuration <
mNextExposureTime + Sensor::kMinVerticalBlank) {
mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
}
res = find_camera_metadata_entry(mRequest,
ANDROID_SENSOR_SENSITIVITY,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
mParent->signalError();
return false;
}
mNextSensitivity = *e.data.i32;
res = find_camera_metadata_entry(mRequest,
EMULATOR_SCENE_HOUROFDAY,
&e);
if (res == NO_ERROR) {
ALOGV("Setting hour: %d", *e.data.i32);
mParent->mSensor->getScene().setHour(*e.data.i32);
}
// Start waiting on readout thread
mWaitingForReadout = true;
ALOGV("Configure: Waiting for readout thread");
} }
if (mWaitingForReadout) { if (mWaitingForReadout) {
@@ -767,49 +763,134 @@ bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
ALOGV("Configure: Waiting for sensor"); ALOGV("Configure: Waiting for sensor");
mNextNeedsJpeg = false; mNextNeedsJpeg = false;
} }
bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
if (mNextIsCapture) {
return configureNextCapture();
} else {
return configureNextReprocess();
}
}
bool EmulatedFakeCamera2::ConfigureThread::setupCapture() {
status_t res;
mNextIsCapture = true;
// Get necessary parameters for sensor config
mParent->mControlThread->processRequest(mRequest);
camera_metadata_entry_t streams;
res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_OUTPUT_STREAMS,
&streams);
if (res != NO_ERROR) {
ALOGE("%s: error reading output stream tag", __FUNCTION__);
mParent->signalError();
return false;
}
mNextBuffers = new Buffers;
mNextNeedsJpeg = false;
ALOGV("Configure: Setting up buffers for capture");
for (size_t i = 0; i < streams.count; i++) {
int streamId = streams.data.u8[i];
const Stream &s = mParent->getStreamInfo(streamId);
if (s.format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
ALOGE("%s: Stream %d does not have a concrete pixel format, but "
"is included in a request!", __FUNCTION__, streamId);
mParent->signalError();
return false;
}
StreamBuffer b;
b.streamId = streams.data.u8[i];
b.width = s.width;
b.height = s.height;
b.format = s.format;
b.stride = s.stride;
mNextBuffers->push_back(b);
ALOGV("Configure: Buffer %d: Stream %d, %d x %d, format 0x%x, "
"stride %d",
i, b.streamId, b.width, b.height, b.format, b.stride);
if (b.format == HAL_PIXEL_FORMAT_BLOB) {
mNextNeedsJpeg = true;
}
}
camera_metadata_entry_t e;
res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_FRAME_COUNT,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading frame count tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
mParent->signalError();
return false;
}
mNextFrameNumber = *e.data.i32;
res = find_camera_metadata_entry(mRequest,
ANDROID_SENSOR_EXPOSURE_TIME,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading exposure time tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
mParent->signalError();
return false;
}
mNextExposureTime = *e.data.i64;
res = find_camera_metadata_entry(mRequest,
ANDROID_SENSOR_FRAME_DURATION,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading frame duration tag", __FUNCTION__);
mParent->signalError();
return false;
}
mNextFrameDuration = *e.data.i64;
if (mNextFrameDuration <
mNextExposureTime + Sensor::kMinVerticalBlank) {
mNextFrameDuration = mNextExposureTime + Sensor::kMinVerticalBlank;
}
res = find_camera_metadata_entry(mRequest,
ANDROID_SENSOR_SENSITIVITY,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading sensitivity tag", __FUNCTION__);
mParent->signalError();
return false;
}
mNextSensitivity = *e.data.i32;
res = find_camera_metadata_entry(mRequest,
EMULATOR_SCENE_HOUROFDAY,
&e);
if (res == NO_ERROR) {
ALOGV("Setting hour: %d", *e.data.i32);
mParent->mSensor->getScene().setHour(*e.data.i32);
}
// Start waiting on readout thread
mWaitingForReadout = true;
ALOGV("Configure: Waiting for readout thread");
return true;
}
bool EmulatedFakeCamera2::ConfigureThread::configureNextCapture() {
bool vsync = mParent->mSensor->waitForVSync(kWaitPerLoop);
if (!vsync) return true; if (!vsync) return true;
Mutex::Autolock il(mInternalsMutex); Mutex::Autolock il(mInternalsMutex);
ALOGV("Configure: Configuring sensor for frame %d", mNextFrameNumber); ALOGV("Configure: Configuring sensor for capture %d", mNextFrameNumber);
mParent->mSensor->setExposureTime(mNextExposureTime); mParent->mSensor->setExposureTime(mNextExposureTime);
mParent->mSensor->setFrameDuration(mNextFrameDuration); mParent->mSensor->setFrameDuration(mNextFrameDuration);
mParent->mSensor->setSensitivity(mNextSensitivity); mParent->mSensor->setSensitivity(mNextSensitivity);
/** Get buffers to fill for this frame */ getBuffers();
for (size_t i = 0; i < mNextBuffers->size(); i++) {
StreamBuffer &b = mNextBuffers->editItemAt(i);
Stream s = mParent->getStreamInfo(b.streamId); ALOGV("Configure: Done configure for capture %d", mNextFrameNumber);
ALOGV("Configure: Dequeing buffer from stream %d", b.streamId); mParent->mReadoutThread->setNextOperation(true, mRequest, mNextBuffers);
res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
if (res != NO_ERROR || b.buffer == NULL) {
ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
__FUNCTION__, b.streamId, strerror(-res), res);
mParent->signalError();
return false;
}
/* Lock the buffer from the perspective of the graphics mapper */
uint8_t *img;
const Rect rect(s.width, s.height);
res = GraphicBufferMapper::get().lock(*(b.buffer),
GRALLOC_USAGE_HW_CAMERA_WRITE,
rect, (void**)&(b.img) );
if (res != NO_ERROR) {
ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
__FUNCTION__, strerror(-res), res);
s.ops->cancel_buffer(s.ops,
b.buffer);
mParent->signalError();
return false;
}
}
ALOGV("Configure: Done configure for frame %d", mNextFrameNumber);
mParent->mReadoutThread->setNextCapture(mRequest, mNextBuffers);
mParent->mSensor->setDestinationBuffers(mNextBuffers); mParent->mSensor->setDestinationBuffers(mNextBuffers);
mRequest = NULL; mRequest = NULL;
@@ -821,6 +902,172 @@ bool EmulatedFakeCamera2::ConfigureThread::threadLoop() {
return true; return true;
} }
bool EmulatedFakeCamera2::ConfigureThread::setupReprocess() {
status_t res;
mNextNeedsJpeg = true;
mNextIsCapture = false;
camera_metadata_entry_t reprocessStreams;
res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_INPUT_STREAMS,
&reprocessStreams);
if (res != NO_ERROR) {
ALOGE("%s: error reading output stream tag", __FUNCTION__);
mParent->signalError();
return false;
}
mNextBuffers = new Buffers;
ALOGV("Configure: Setting up input buffers for reprocess");
for (size_t i = 0; i < reprocessStreams.count; i++) {
int streamId = reprocessStreams.data.u8[i];
const ReprocessStream &s = mParent->getReprocessStreamInfo(streamId);
if (s.format != HAL_PIXEL_FORMAT_RGB_888) {
ALOGE("%s: Only ZSL reprocessing supported!",
__FUNCTION__);
mParent->signalError();
return false;
}
StreamBuffer b;
b.streamId = -streamId;
b.width = s.width;
b.height = s.height;
b.format = s.format;
b.stride = s.stride;
mNextBuffers->push_back(b);
}
camera_metadata_entry_t streams;
res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_OUTPUT_STREAMS,
&streams);
if (res != NO_ERROR) {
ALOGE("%s: error reading output stream tag", __FUNCTION__);
mParent->signalError();
return false;
}
ALOGV("Configure: Setting up output buffers for reprocess");
for (size_t i = 0; i < streams.count; i++) {
int streamId = streams.data.u8[i];
const Stream &s = mParent->getStreamInfo(streamId);
if (s.format != HAL_PIXEL_FORMAT_BLOB) {
// TODO: Support reprocess to YUV
ALOGE("%s: Non-JPEG output stream %d for reprocess not supported",
__FUNCTION__, streamId);
mParent->signalError();
return false;
}
StreamBuffer b;
b.streamId = streams.data.u8[i];
b.width = s.width;
b.height = s.height;
b.format = s.format;
b.stride = s.stride;
mNextBuffers->push_back(b);
ALOGV("Configure: Buffer %d: Stream %d, %d x %d, format 0x%x, "
"stride %d",
i, b.streamId, b.width, b.height, b.format, b.stride);
}
camera_metadata_entry_t e;
res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_FRAME_COUNT,
&e);
if (res != NO_ERROR) {
ALOGE("%s: error reading frame count tag: %s (%d)",
__FUNCTION__, strerror(-res), res);
mParent->signalError();
return false;
}
mNextFrameNumber = *e.data.i32;
return true;
}
bool EmulatedFakeCamera2::ConfigureThread::configureNextReprocess() {
Mutex::Autolock il(mInternalsMutex);
getBuffers();
ALOGV("Configure: Done configure for reprocess %d", mNextFrameNumber);
mParent->mReadoutThread->setNextOperation(false, mRequest, mNextBuffers);
mRequest = NULL;
mNextBuffers = NULL;
Mutex::Autolock lock(mInputMutex);
mRequestCount--;
return true;
}
bool EmulatedFakeCamera2::ConfigureThread::getBuffers() {
status_t res;
/** Get buffers to fill for this frame */
for (size_t i = 0; i < mNextBuffers->size(); i++) {
StreamBuffer &b = mNextBuffers->editItemAt(i);
if (b.streamId > 0) {
Stream s = mParent->getStreamInfo(b.streamId);
ALOGV("Configure: Dequeing buffer from stream %d", b.streamId);
res = s.ops->dequeue_buffer(s.ops, &(b.buffer) );
if (res != NO_ERROR || b.buffer == NULL) {
ALOGE("%s: Unable to dequeue buffer from stream %d: %s (%d)",
__FUNCTION__, b.streamId, strerror(-res), res);
mParent->signalError();
return false;
}
/* Lock the buffer from the perspective of the graphics mapper */
const Rect rect(s.width, s.height);
res = GraphicBufferMapper::get().lock(*(b.buffer),
GRALLOC_USAGE_HW_CAMERA_WRITE,
rect, (void**)&(b.img) );
if (res != NO_ERROR) {
ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
__FUNCTION__, strerror(-res), res);
s.ops->cancel_buffer(s.ops,
b.buffer);
mParent->signalError();
return false;
}
} else {
ReprocessStream s = mParent->getReprocessStreamInfo(-b.streamId);
ALOGV("Configure: Acquiring buffer from reprocess stream %d",
-b.streamId);
res = s.ops->acquire_buffer(s.ops, &(b.buffer) );
if (res != NO_ERROR || b.buffer == NULL) {
ALOGE("%s: Unable to acquire buffer from reprocess stream %d: "
"%s (%d)", __FUNCTION__, -b.streamId,
strerror(-res), res);
mParent->signalError();
return false;
}
/* Lock the buffer from the perspective of the graphics mapper */
const Rect rect(s.width, s.height);
res = GraphicBufferMapper::get().lock(*(b.buffer),
GRALLOC_USAGE_HW_CAMERA_READ,
rect, (void**)&(b.img) );
if (res != NO_ERROR) {
ALOGE("%s: grbuffer_mapper.lock failure: %s (%d)",
__FUNCTION__, strerror(-res), res);
s.ops->release_buffer(s.ops,
b.buffer);
mParent->signalError();
return false;
}
}
}
return true;
}
EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent): EmulatedFakeCamera2::ReadoutThread::ReadoutThread(EmulatedFakeCamera2 *parent):
Thread(false), Thread(false),
mParent(parent), mParent(parent),
@@ -874,7 +1121,8 @@ bool EmulatedFakeCamera2::ReadoutThread::readyForNextCapture() {
return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead; return (mInFlightTail + 1) % kInFlightQueueSize != mInFlightHead;
} }
void EmulatedFakeCamera2::ReadoutThread::setNextCapture( void EmulatedFakeCamera2::ReadoutThread::setNextOperation(
bool isCapture,
camera_metadata_t *request, camera_metadata_t *request,
Buffers *buffers) { Buffers *buffers) {
Mutex::Autolock lock(mInputMutex); Mutex::Autolock lock(mInputMutex);
@@ -883,6 +1131,7 @@ void EmulatedFakeCamera2::ReadoutThread::setNextCapture(
mParent->signalError(); mParent->signalError();
return; return;
} }
mInFlightQueue[mInFlightTail].isCapture = isCapture;
mInFlightQueue[mInFlightTail].request = request; mInFlightQueue[mInFlightTail].request = request;
mInFlightQueue[mInFlightTail].buffers = buffers; mInFlightQueue[mInFlightTail].buffers = buffers;
mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize; mInFlightTail = (mInFlightTail + 1) % kInFlightQueueSize;
@@ -952,6 +1201,7 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
} else { } else {
Mutex::Autolock iLock(mInternalsMutex); Mutex::Autolock iLock(mInternalsMutex);
mReadySignal.signal(); mReadySignal.signal();
mIsCapture = mInFlightQueue[mInFlightHead].isCapture;
mRequest = mInFlightQueue[mInFlightHead].request; mRequest = mInFlightQueue[mInFlightHead].request;
mBuffers = mInFlightQueue[mInFlightHead].buffers; mBuffers = mInFlightQueue[mInFlightHead].buffers;
mInFlightQueue[mInFlightHead].request = NULL; mInFlightQueue[mInFlightHead].request = NULL;
@@ -967,15 +1217,30 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
nsecs_t captureTime; nsecs_t captureTime;
bool gotFrame; if (mIsCapture) {
gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop, bool gotFrame;
&captureTime); gotFrame = mParent->mSensor->waitForNewFrame(kWaitPerLoop,
&captureTime);
if (!gotFrame) return true; if (!gotFrame) return true;
}
Mutex::Autolock iLock(mInternalsMutex); Mutex::Autolock iLock(mInternalsMutex);
camera_metadata_entry_t entry; camera_metadata_entry_t entry;
if (!mIsCapture) {
res = find_camera_metadata_entry(mRequest,
ANDROID_SENSOR_TIMESTAMP,
&entry);
if (res != NO_ERROR) {
ALOGE("%s: error reading reprocessing timestamp: %s (%d)",
__FUNCTION__, strerror(-res), res);
mParent->signalError();
return false;
}
captureTime = entry.data.i64[0];
}
res = find_camera_metadata_entry(mRequest, res = find_camera_metadata_entry(mRequest,
ANDROID_REQUEST_FRAME_COUNT, ANDROID_REQUEST_FRAME_COUNT,
&entry); &entry);
@@ -1027,31 +1292,34 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
ALOGE("Unable to append request metadata"); ALOGE("Unable to append request metadata");
} }
add_camera_metadata_entry(frame, if (mIsCapture) {
ANDROID_SENSOR_TIMESTAMP, add_camera_metadata_entry(frame,
&captureTime, ANDROID_SENSOR_TIMESTAMP,
1); &captureTime,
1);
int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour(); int32_t hourOfDay = (int32_t)mParent->mSensor->getScene().getHour();
camera_metadata_entry_t requestedHour; camera_metadata_entry_t requestedHour;
res = find_camera_metadata_entry(frame, res = find_camera_metadata_entry(frame,
EMULATOR_SCENE_HOUROFDAY,
&requestedHour);
if (res == NAME_NOT_FOUND) {
res = add_camera_metadata_entry(frame,
EMULATOR_SCENE_HOUROFDAY, EMULATOR_SCENE_HOUROFDAY,
&hourOfDay, 1); &requestedHour);
if (res != NO_ERROR) { if (res == NAME_NOT_FOUND) {
ALOGE("Unable to add vendor tag"); res = add_camera_metadata_entry(frame,
EMULATOR_SCENE_HOUROFDAY,
&hourOfDay, 1);
if (res != NO_ERROR) {
ALOGE("Unable to add vendor tag");
}
} else if (res == OK) {
*requestedHour.data.i32 = hourOfDay;
} else {
ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
} }
} else if (res == OK) {
*requestedHour.data.i32 = hourOfDay; collectStatisticsMetadata(frame);
} else { // TODO: Collect all final values used from sensor in addition to timestamp
ALOGE("%s: Error looking up vendor tag", __FUNCTION__);
} }
collectStatisticsMetadata(frame);
// TODO: Collect all final values used from sensor in addition to timestamp
ALOGV("Readout: Enqueue frame %d", frameNumber); ALOGV("Readout: Enqueue frame %d", frameNumber);
mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst, mParent->mFrameQueueDst->enqueue_frame(mParent->mFrameQueueDst,
frame); frame);
@@ -1072,13 +1340,13 @@ bool EmulatedFakeCamera2::ReadoutThread::threadLoop() {
const StreamBuffer &b = (*mBuffers)[i]; const StreamBuffer &b = (*mBuffers)[i];
ALOGV("Readout: Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d", ALOGV("Readout: Buffer %d: Stream %d, %d x %d, format 0x%x, stride %d",
i, b.streamId, b.width, b.height, b.format, b.stride); i, b.streamId, b.width, b.height, b.format, b.stride);
if (b.streamId >= 0) { if (b.streamId > 0) {
if (b.format == HAL_PIXEL_FORMAT_BLOB) { if (b.format == HAL_PIXEL_FORMAT_BLOB) {
// Assumes only one BLOB buffer type per capture // Assumes only one BLOB buffer type per capture
compressedBufferIndex = i; compressedBufferIndex = i;
} else { } else {
ALOGV("Readout: Sending image buffer %d to output stream %d", ALOGV("Readout: Sending image buffer %d (%p) to output stream %d",
i, b.streamId); i, (void*)*(b.buffer), b.streamId);
GraphicBufferMapper::get().unlock(*(b.buffer)); GraphicBufferMapper::get().unlock(*(b.buffer));
const Stream &s = mParent->getStreamInfo(b.streamId); const Stream &s = mParent->getStreamInfo(b.streamId);
res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer); res = s.ops->enqueue_buffer(s.ops, captureTime, b.buffer);
@@ -1253,6 +1521,8 @@ status_t EmulatedFakeCamera2::ControlThread::readyToRun() {
mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE; mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE; mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
mExposureTime = kNormalExposureTime;
mInputSignal.signal(); mInputSignal.signal();
return NO_ERROR; return NO_ERROR;
} }
@@ -1308,13 +1578,24 @@ status_t EmulatedFakeCamera2::ControlThread::processRequest(camera_metadata_t *r
&mode); &mode);
mAwbMode = mode.data.u8[0]; mAwbMode = mode.data.u8[0];
// TODO: Override control fields // TODO: Override more control fields
if (mAeMode != ANDROID_CONTROL_AE_OFF) {
camera_metadata_entry_t exposureTime;
res = find_camera_metadata_entry(request,
ANDROID_SENSOR_EXPOSURE_TIME,
&exposureTime);
if (res == OK) {
exposureTime.data.i64[0] = mExposureTime;
}
}
return OK; return OK;
} }
status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType, status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
int32_t ext1, int32_t ext2) { int32_t ext1, int32_t ext2) {
ALOGV("%s: Triggering %d (%d, %d)", __FUNCTION__, msgType, ext1, ext2);
Mutex::Autolock lock(mInputMutex); Mutex::Autolock lock(mInputMutex);
switch (msgType) { switch (msgType) {
case CAMERA2_TRIGGER_AUTOFOCUS: case CAMERA2_TRIGGER_AUTOFOCUS:
@@ -1339,12 +1620,24 @@ status_t EmulatedFakeCamera2::ControlThread::triggerAction(uint32_t msgType,
return OK; return OK;
} }
const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100000000; const nsecs_t EmulatedFakeCamera2::ControlThread::kControlCycleDelay = 100 * MSEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500000000; const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAfDuration = 500 * MSEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900000000; const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAfDuration = 900 * MSEC;
const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9; const float EmulatedFakeCamera2::ControlThread::kAfSuccessRate = 0.9;
// Once every 5 seconds
const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate = const float EmulatedFakeCamera2::ControlThread::kContinuousAfStartRate =
kControlCycleDelay / 5000000000.0; // Once every 5 seconds kControlCycleDelay / 5.0 * SEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kMinAeDuration = 500 * MSEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxAeDuration = 2 * SEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kMinPrecaptureAeDuration = 100 * MSEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kMaxPrecaptureAeDuration = 400 * MSEC;
// Once every 3 seconds
const float EmulatedFakeCamera2::ControlThread::kAeScanStartRate =
kControlCycleDelay / 3000000000.0;
const nsecs_t EmulatedFakeCamera2::ControlThread::kNormalExposureTime = 10 * MSEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kExposureJump = 5 * MSEC;
const nsecs_t EmulatedFakeCamera2::ControlThread::kMinExposureTime = 1 * MSEC;
bool EmulatedFakeCamera2::ControlThread::threadLoop() { bool EmulatedFakeCamera2::ControlThread::threadLoop() {
bool afModeChange = false; bool afModeChange = false;
@@ -1353,14 +1646,20 @@ bool EmulatedFakeCamera2::ControlThread::threadLoop() {
uint8_t afState; uint8_t afState;
uint8_t afMode; uint8_t afMode;
int32_t afTriggerId; int32_t afTriggerId;
bool precaptureTriggered = false;
uint8_t aeState;
uint8_t aeMode;
int32_t precaptureTriggerId;
nsecs_t nextSleep = kControlCycleDelay; nsecs_t nextSleep = kControlCycleDelay;
{ {
Mutex::Autolock lock(mInputMutex); Mutex::Autolock lock(mInputMutex);
if (mStartAf) { if (mStartAf) {
ALOGD("Starting AF trigger processing");
afTriggered = true; afTriggered = true;
mStartAf = false; mStartAf = false;
} else if (mCancelAf) { } else if (mCancelAf) {
ALOGD("Starting cancel AF trigger processing");
afCancelled = true; afCancelled = true;
mCancelAf = false; mCancelAf = false;
} }
@@ -1370,6 +1669,15 @@ bool EmulatedFakeCamera2::ControlThread::threadLoop() {
mAfModeChange = false; mAfModeChange = false;
afTriggerId = mAfTriggerId; afTriggerId = mAfTriggerId;
if(mStartPrecapture) {
ALOGD("Starting precapture trigger processing");
precaptureTriggered = true;
mStartPrecapture = false;
}
aeState = mAeState;
aeMode = mAeMode;
precaptureTriggerId = mPrecaptureTriggerId;
} }
if (afCancelled || afModeChange) { if (afCancelled || afModeChange) {
@@ -1392,6 +1700,16 @@ bool EmulatedFakeCamera2::ControlThread::threadLoop() {
updateAfState(afState, afTriggerId); updateAfState(afState, afTriggerId);
if (precaptureTriggered) {
aeState = processPrecaptureTrigger(aeMode, aeState);
}
aeState = maybeStartAeScan(aeMode, aeState);
aeState = updateAeScan(aeMode, aeState, &nextSleep);
updateAeState(aeState, precaptureTriggerId);
int ret; int ret;
timespec t; timespec t;
t.tv_sec = 0; t.tv_sec = 0;
@@ -1400,6 +1718,13 @@ bool EmulatedFakeCamera2::ControlThread::threadLoop() {
ret = nanosleep(&t, &t); ret = nanosleep(&t, &t);
} while (ret != 0); } while (ret != 0);
if (mAfScanDuration > 0) {
mAfScanDuration -= nextSleep;
}
if (mAeScanDuration > 0) {
mAeScanDuration -= nextSleep;
}
return true; return true;
} }
@@ -1504,7 +1829,7 @@ int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
return afState; return afState;
} }
if (mAfScanDuration == 0) { if (mAfScanDuration <= 0) {
ALOGV("%s: AF scan done", __FUNCTION__); ALOGV("%s: AF scan done", __FUNCTION__);
switch (afMode) { switch (afMode) {
case ANDROID_CONTROL_AF_MACRO: case ANDROID_CONTROL_AF_MACRO:
@@ -1534,9 +1859,6 @@ int EmulatedFakeCamera2::ControlThread::updateAfScan(uint8_t afMode,
} else { } else {
if (mAfScanDuration <= *maxSleep) { if (mAfScanDuration <= *maxSleep) {
*maxSleep = mAfScanDuration; *maxSleep = mAfScanDuration;
mAfScanDuration = 0;
} else {
mAfScanDuration -= *maxSleep;
} }
} }
return afState; return afState;
@@ -1554,6 +1876,97 @@ void EmulatedFakeCamera2::ControlThread::updateAfState(uint8_t newState,
} }
} }
int EmulatedFakeCamera2::ControlThread::processPrecaptureTrigger(uint8_t aeMode,
uint8_t aeState) {
switch (aeMode) {
case ANDROID_CONTROL_AE_OFF:
case ANDROID_CONTROL_AE_LOCKED:
// Don't do anything for these
return aeState;
case ANDROID_CONTROL_AE_ON:
case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE:
// Trigger a precapture cycle
aeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
mAeScanDuration = ((double)rand() / RAND_MAX) *
(kMaxPrecaptureAeDuration - kMinPrecaptureAeDuration) +
kMinPrecaptureAeDuration;
ALOGD("%s: AE precapture scan start, duration %lld ms",
__FUNCTION__, mAeScanDuration / 1000000);
}
return aeState;
}
int EmulatedFakeCamera2::ControlThread::maybeStartAeScan(uint8_t aeMode,
uint8_t aeState) {
switch (aeMode) {
case ANDROID_CONTROL_AE_OFF:
case ANDROID_CONTROL_AE_LOCKED:
// Don't do anything for these
break;
case ANDROID_CONTROL_AE_ON:
case ANDROID_CONTROL_AE_ON_AUTO_FLASH:
case ANDROID_CONTROL_AE_ON_ALWAYS_FLASH:
case ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE: {
if (aeState != ANDROID_CONTROL_AE_STATE_INACTIVE &&
aeState != ANDROID_CONTROL_AE_STATE_CONVERGED) break;
bool startScan = ((double)rand() / RAND_MAX) < kAeScanStartRate;
if (startScan) {
mAeScanDuration = ((double)rand() / RAND_MAX) *
(kMaxAeDuration - kMinAeDuration) + kMinAeDuration;
aeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
ALOGD("%s: AE scan start, duration %lld ms",
__FUNCTION__, mAeScanDuration / 1000000);
}
}
}
return aeState;
}
int EmulatedFakeCamera2::ControlThread::updateAeScan(uint8_t aeMode,
uint8_t aeState, nsecs_t *maxSleep) {
if ((aeState == ANDROID_CONTROL_AE_STATE_SEARCHING) ||
(aeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE ) ) {
if (mAeScanDuration <= 0) {
ALOGD("%s: AE scan done", __FUNCTION__);
aeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
Mutex::Autolock lock(mInputMutex);
mExposureTime = kNormalExposureTime;
} else {
if (mAeScanDuration <= *maxSleep) {
*maxSleep = mAeScanDuration;
}
int64_t exposureDelta =
((double)rand() / RAND_MAX) * 2 * kExposureJump -
kExposureJump;
Mutex::Autolock lock(mInputMutex);
mExposureTime = mExposureTime + exposureDelta;
if (mExposureTime < kMinExposureTime) mExposureTime = kMinExposureTime;
}
}
return aeState;
}
void EmulatedFakeCamera2::ControlThread::updateAeState(uint8_t newState,
int32_t triggerId) {
Mutex::Autolock lock(mInputMutex);
if (mAeState != newState) {
ALOGD("%s: Autoexposure state now %d, id %d", __FUNCTION__,
newState, triggerId);
mAeState = newState;
mParent->sendNotification(CAMERA2_MSG_AUTOEXPOSURE,
newState, triggerId, 0);
}
}
/** Private methods */ /** Private methods */
status_t EmulatedFakeCamera2::constructStaticInfo( status_t EmulatedFakeCamera2::constructStaticInfo(
@@ -1894,12 +2307,11 @@ status_t EmulatedFakeCamera2::constructDefaultRequest(
if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \ if ( ( ret = addOrSize(*request, sizeRequest, &entryCount, &dataCount, \
tag, data, count) ) != OK ) return ret tag, data, count) ) != OK ) return ret
static const int64_t USEC = 1000LL;
static const int64_t MSEC = USEC * 1000LL;
static const int64_t SEC = MSEC * 1000LL;
/** android.request */ /** android.request */
static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
ADD_OR_SIZE(ANDROID_REQUEST_TYPE, &requestType, 1);
static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL;
ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); ADD_OR_SIZE(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
@@ -2250,7 +2662,12 @@ bool EmulatedFakeCamera2::isStreamInUse(uint32_t id) {
return true; return true;
} }
return false; return false;
} }
bool EmulatedFakeCamera2::isReprocessStreamInUse(uint32_t id) {
// TODO: implement
return false;
}
const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) { const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
Mutex::Autolock lock(mMutex); Mutex::Autolock lock(mMutex);
@@ -2258,4 +2675,10 @@ const Stream& EmulatedFakeCamera2::getStreamInfo(uint32_t streamId) {
return mStreams.valueFor(streamId); return mStreams.valueFor(streamId);
} }
const ReprocessStream& EmulatedFakeCamera2::getReprocessStreamInfo(uint32_t streamId) {
Mutex::Autolock lock(mMutex);
return mReprocessStreams.valueFor(streamId);
}
}; /* namespace android */ }; /* namespace android */

View File

@@ -110,7 +110,12 @@ protected:
// uint32_t *usage, // uint32_t *usage,
// uint32_t *max_buffers); // uint32_t *max_buffers);
// virtual int releaseReprocessStream(uint32_t stream_id); virtual int allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *stream_ops,
uint32_t *stream_id);
virtual int releaseReprocessStream(uint32_t stream_id);
virtual int triggerAction(uint32_t trigger_id, virtual int triggerAction(uint32_t trigger_id,
int32_t ext1, int32_t ext1,
@@ -132,6 +137,7 @@ public:
// Get information about a given stream. Will lock mMutex // Get information about a given stream. Will lock mMutex
const Stream &getStreamInfo(uint32_t streamId); const Stream &getStreamInfo(uint32_t streamId);
const ReprocessStream &getReprocessStreamInfo(uint32_t streamId);
// Notifies rest of camera subsystem of serious error // Notifies rest of camera subsystem of serious error
void signalError(); void signalError();
@@ -163,6 +169,10 @@ private:
* requests. Assumes mMutex is locked */ * requests. Assumes mMutex is locked */
bool isStreamInUse(uint32_t streamId); bool isStreamInUse(uint32_t streamId);
/** Determine if the reprocess stream id is listed in any
* currently-in-flight requests. Assumes mMutex is locked */
bool isReprocessStreamInUse(uint32_t streamId);
/**************************************************************************** /****************************************************************************
* Pipeline controller threads * Pipeline controller threads
***************************************************************************/ ***************************************************************************/
@@ -180,10 +190,19 @@ private:
int getInProgressCount(); int getInProgressCount();
private: private:
EmulatedFakeCamera2 *mParent; EmulatedFakeCamera2 *mParent;
static const nsecs_t kWaitPerLoop = 10000000L; // 10 ms
bool mRunning; bool mRunning;
bool threadLoop(); bool threadLoop();
bool setupCapture();
bool setupReprocess();
bool configureNextCapture();
bool configureNextReprocess();
bool getBuffers();
Mutex mInputMutex; // Protects mActive, mRequestCount Mutex mInputMutex; // Protects mActive, mRequestCount
Condition mInputSignal; Condition mInputSignal;
bool mActive; // Whether we're waiting for input requests or actively bool mActive; // Whether we're waiting for input requests or actively
@@ -195,6 +214,7 @@ private:
Mutex mInternalsMutex; // Lock before accessing below members. Mutex mInternalsMutex; // Lock before accessing below members.
bool mWaitingForReadout; bool mWaitingForReadout;
bool mNextNeedsJpeg; bool mNextNeedsJpeg;
bool mNextIsCapture;
int32_t mNextFrameNumber; int32_t mNextFrameNumber;
int64_t mNextExposureTime; int64_t mNextExposureTime;
int64_t mNextFrameDuration; int64_t mNextFrameDuration;
@@ -212,9 +232,9 @@ private:
// Input // Input
status_t waitUntilRunning(); status_t waitUntilRunning();
bool waitForReady(nsecs_t timeout); bool waitForReady(nsecs_t timeout);
void setNextCapture(camera_metadata_t *request, void setNextOperation(bool isCapture,
camera_metadata_t *request,
Buffers *buffers); Buffers *buffers);
bool isStreamInUse(uint32_t id); bool isStreamInUse(uint32_t id);
int getInProgressCount(); int getInProgressCount();
private: private:
@@ -235,6 +255,7 @@ private:
static const int kInFlightQueueSize = 4; static const int kInFlightQueueSize = 4;
struct InFlightQueue { struct InFlightQueue {
bool isCapture;
camera_metadata_t *request; camera_metadata_t *request;
Buffers *buffers; Buffers *buffers;
} *mInFlightQueue; } *mInFlightQueue;
@@ -246,6 +267,8 @@ private:
// Internals // Internals
Mutex mInternalsMutex; Mutex mInternalsMutex;
bool mIsCapture;
camera_metadata_t *mRequest; camera_metadata_t *mRequest;
Buffers *mBuffers; Buffers *mBuffers;
@@ -278,6 +301,16 @@ private:
static const float kAfSuccessRate; static const float kAfSuccessRate;
static const float kContinuousAfStartRate; static const float kContinuousAfStartRate;
static const float kAeScanStartRate;
static const nsecs_t kMinAeDuration;
static const nsecs_t kMaxAeDuration;
static const nsecs_t kMinPrecaptureAeDuration;
static const nsecs_t kMaxPrecaptureAeDuration;
static const nsecs_t kNormalExposureTime;
static const nsecs_t kExposureJump;
static const nsecs_t kMinExposureTime;
EmulatedFakeCamera2 *mParent; EmulatedFakeCamera2 *mParent;
bool mRunning; bool mRunning;
@@ -312,17 +345,26 @@ private:
uint8_t mAeState; uint8_t mAeState;
uint8_t mAwbState; uint8_t mAwbState;
// Current control parameters
nsecs_t mExposureTime;
// Private to threadLoop and its utility methods // Private to threadLoop and its utility methods
nsecs_t mAfScanDuration; nsecs_t mAfScanDuration;
nsecs_t mAeScanDuration;
bool mLockAfterPassiveScan; bool mLockAfterPassiveScan;
// Utility methods // Utility methods for AF
int processAfTrigger(uint8_t afMode, uint8_t afState); int processAfTrigger(uint8_t afMode, uint8_t afState);
int maybeStartAfScan(uint8_t afMode, uint8_t afState); int maybeStartAfScan(uint8_t afMode, uint8_t afState);
int updateAfScan(uint8_t afMode, uint8_t afState, nsecs_t *maxSleep); int updateAfScan(uint8_t afMode, uint8_t afState, nsecs_t *maxSleep);
void updateAfState(uint8_t newState, int32_t triggerId); void updateAfState(uint8_t newState, int32_t triggerId);
// Utility methods for precapture trigger
int processPrecaptureTrigger(uint8_t aeMode, uint8_t aeState);
int maybeStartAeScan(uint8_t aeMode, uint8_t aeState);
int updateAeScan(uint8_t aeMode, uint8_t aeState, nsecs_t *maxSleep);
void updateAeState(uint8_t newState, int32_t triggerId);
}; };
/**************************************************************************** /****************************************************************************
@@ -332,6 +374,7 @@ private:
static const uint32_t kMaxRawStreamCount = 1; static const uint32_t kMaxRawStreamCount = 1;
static const uint32_t kMaxProcessedStreamCount = 3; static const uint32_t kMaxProcessedStreamCount = 3;
static const uint32_t kMaxJpegStreamCount = 1; static const uint32_t kMaxJpegStreamCount = 1;
static const uint32_t kMaxReprocessStreamCount = 2;
static const uint32_t kMaxBufferCount = 4; static const uint32_t kMaxBufferCount = 4;
static const uint32_t kAvailableFormats[]; static const uint32_t kAvailableFormats[];
static const uint32_t kAvailableRawSizes[]; static const uint32_t kAvailableRawSizes[];
@@ -358,7 +401,11 @@ private:
uint32_t mProcessedStreamCount; uint32_t mProcessedStreamCount;
uint32_t mJpegStreamCount; uint32_t mJpegStreamCount;
uint32_t mNextReprocessStreamId;
uint32_t mReprocessStreamCount;
KeyedVector<uint32_t, Stream> mStreams; KeyedVector<uint32_t, Stream> mStreams;
KeyedVector<uint32_t, ReprocessStream> mReprocessStreams;
/** Simulated hardware interfaces */ /** Simulated hardware interfaces */
sp<Sensor> mSensor; sp<Sensor> mSensor;

View File

@@ -31,6 +31,9 @@ namespace android {
/* Internal structure for passing buffers across threads */ /* Internal structure for passing buffers across threads */
struct StreamBuffer { struct StreamBuffer {
// Positive numbers are output streams
// Negative numbers are input reprocess streams
// Zero is an auxillary buffer
int streamId; int streamId;
uint32_t width, height; uint32_t width, height;
uint32_t format; uint32_t format;
@@ -47,6 +50,15 @@ struct Stream {
uint32_t stride; uint32_t stride;
}; };
struct ReprocessStream {
const camera2_stream_in_ops_t *ops;
uint32_t width, height;
int32_t format;
uint32_t stride;
// -1 if the reprocessing stream is independent
int32_t sourceStreamId;
};
} // namespace android; } // namespace android;
#endif #endif

View File

@@ -77,7 +77,8 @@ bool JpegCompressor::threadLoop() {
Mutex::Autolock lock(mMutex); Mutex::Autolock lock(mMutex);
ALOGV("%s: Starting compression thread", __FUNCTION__); ALOGV("%s: Starting compression thread", __FUNCTION__);
// Find source and target buffers // Find source and target buffers. Assumes only one buffer matches
// each condition!
bool foundJpeg = false, mFoundAux = false; bool foundJpeg = false, mFoundAux = false;
for (size_t i = 0; i < mBuffers->size(); i++) { for (size_t i = 0; i < mBuffers->size(); i++) {
@@ -85,7 +86,7 @@ bool JpegCompressor::threadLoop() {
if (b.format == HAL_PIXEL_FORMAT_BLOB) { if (b.format == HAL_PIXEL_FORMAT_BLOB) {
mJpegBuffer = b; mJpegBuffer = b;
mFoundJpeg = true; mFoundJpeg = true;
} else if (b.streamId == -1) { } else if (b.streamId <= 0) {
mAuxBuffer = b; mAuxBuffer = b;
mFoundAux = true; mFoundAux = true;
} }
@@ -216,11 +217,24 @@ bool JpegCompressor::checkError(const char *msg) {
} }
void JpegCompressor::cleanUp() { void JpegCompressor::cleanUp() {
status_t res;
jpeg_destroy_compress(&mCInfo); jpeg_destroy_compress(&mCInfo);
Mutex::Autolock lock(mBusyMutex); Mutex::Autolock lock(mBusyMutex);
if (mFoundAux) { if (mFoundAux) {
delete[] mAuxBuffer.img; if (mAuxBuffer.streamId == 0) {
delete[] mAuxBuffer.img;
} else {
GraphicBufferMapper::get().unlock(*(mAuxBuffer.buffer));
const ReprocessStream &s =
mParent->getReprocessStreamInfo(-mAuxBuffer.streamId);
res = s.ops->release_buffer(s.ops, mAuxBuffer.buffer);
if (res != OK) {
ALOGE("Error releasing reprocess buffer %p: %s (%d)",
mAuxBuffer.buffer, strerror(-res), res);
mParent->signalError();
}
}
} }
delete mBuffers; delete mBuffers;
mBuffers = NULL; mBuffers = NULL;

View File

@@ -293,6 +293,8 @@ bool Sensor::threadLoop() {
(float)exposureDuration/1e6, gain); (float)exposureDuration/1e6, gain);
mScene.setExposureDuration((float)exposureDuration/1e9); mScene.setExposureDuration((float)exposureDuration/1e9);
mScene.calculateScene(mNextCaptureTime); mScene.calculateScene(mNextCaptureTime);
// Might be adding more buffers, so size isn't constant
for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) { for (size_t i = 0; i < mNextCapturedBuffers->size(); i++) {
const StreamBuffer &b = (*mNextCapturedBuffers)[i]; const StreamBuffer &b = (*mNextCapturedBuffers)[i];
ALOGVV("Sensor capturing buffer %d: stream %d," ALOGVV("Sensor capturing buffer %d: stream %d,"
@@ -303,6 +305,9 @@ bool Sensor::threadLoop() {
case HAL_PIXEL_FORMAT_RAW_SENSOR: case HAL_PIXEL_FORMAT_RAW_SENSOR:
captureRaw(b.img, gain, b.stride); captureRaw(b.img, gain, b.stride);
break; break;
case HAL_PIXEL_FORMAT_RGB_888:
captureRGB(b.img, gain, b.stride);
break;
case HAL_PIXEL_FORMAT_RGBA_8888: case HAL_PIXEL_FORMAT_RGBA_8888:
captureRGBA(b.img, gain, b.stride); captureRGBA(b.img, gain, b.stride);
break; break;
@@ -311,7 +316,7 @@ bool Sensor::threadLoop() {
// Assumes only one BLOB (JPEG) buffer in // Assumes only one BLOB (JPEG) buffer in
// mNextCapturedBuffers // mNextCapturedBuffers
StreamBuffer bAux; StreamBuffer bAux;
bAux.streamId = -1; bAux.streamId = 0;
bAux.width = b.width; bAux.width = b.width;
bAux.height = b.height; bAux.height = b.height;
bAux.format = HAL_PIXEL_FORMAT_RGB_888; bAux.format = HAL_PIXEL_FORMAT_RGB_888;
@@ -319,7 +324,6 @@ bool Sensor::threadLoop() {
bAux.buffer = NULL; bAux.buffer = NULL;
// TODO: Reuse these // TODO: Reuse these
bAux.img = new uint8_t[b.width * b.height * 3]; bAux.img = new uint8_t[b.width * b.height * 3];
captureRGB(bAux.img, gain, b.stride);
mNextCapturedBuffers->push_back(bAux); mNextCapturedBuffers->push_back(bAux);
break; break;
case HAL_PIXEL_FORMAT_YCrCb_420_SP: case HAL_PIXEL_FORMAT_YCrCb_420_SP: