Merge "Camera3: Add flush support to HEIC composite streams" into rvc-dev

gugelfrei
TreeHugger Robot 4 years ago committed by Android (Google) Code Review
commit 22e5822304

@ -28,19 +28,19 @@
namespace android {
namespace camera3 {
CompositeStream::CompositeStream(wp<CameraDeviceBase> device,
CompositeStream::CompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
mDevice(device),
mRemoteCallback(cb),
mNumPartialResults(1),
mErrorState(false) {
sp<CameraDeviceBase> cameraDevice = device.promote();
if (cameraDevice.get() != nullptr) {
CameraMetadata staticInfo = cameraDevice->info();
if (device != nullptr) {
CameraMetadata staticInfo = device->info();
camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (entry.count > 0) {
mNumPartialResults = entry.data.i32[0];
}
mStatusTracker = device->getStatusTracker();
}
}
@ -174,7 +174,7 @@ bool CompositeStream::onError(int32_t errorCode, const CaptureResultExtras& resu
ret = onStreamBufferError(resultExtras);
break;
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
// Invalid request, this shouldn't affect composite streams.
onRequestError(resultExtras);
break;
default:
ALOGE("%s: Unrecoverable error: %d detected!", __FUNCTION__, errorCode);
@ -186,7 +186,7 @@ bool CompositeStream::onError(int32_t errorCode, const CaptureResultExtras& resu
return ret;
}
void CompositeStream::notifyError(int64_t frameNumber) {
void CompositeStream::notifyError(int64_t frameNumber, int32_t requestId) {
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb =
mRemoteCallback.promote();
@ -194,6 +194,7 @@ void CompositeStream::notifyError(int64_t frameNumber) {
CaptureResultExtras extras;
extras.errorStreamId = getStreamId();
extras.frameNumber = frameNumber;
extras.requestId = requestId;
remoteCb->onDeviceError(
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
extras);

@ -38,7 +38,7 @@ namespace camera3 {
class CompositeStream : public camera3::Camera3StreamBufferListener {
public:
CompositeStream(wp<CameraDeviceBase> device, wp<hardware::camera2::ICameraDeviceCallbacks> cb);
CompositeStream(sp<CameraDeviceBase> device, wp<hardware::camera2::ICameraDeviceCallbacks> cb);
virtual ~CompositeStream() {}
status_t createStream(const std::vector<sp<Surface>>& consumers,
@ -95,7 +95,7 @@ protected:
status_t registerCompositeStreamListener(int32_t streamId);
void eraseResult(int64_t frameNumber);
void flagAnErrorFrameNumber(int64_t frameNumber);
void notifyError(int64_t frameNumber);
void notifyError(int64_t frameNumber, int32_t requestId);
// Subclasses should check for buffer errors from internal streams and return 'true' in
// case the error notification should remain within camera service.
@ -105,11 +105,16 @@ protected:
// internal processing needs result data.
virtual void onResultError(const CaptureResultExtras& resultExtras) = 0;
// Subclasses can decide how to handle request errors depending on whether
// or not the internal processing needs clean up.
virtual void onRequestError(const CaptureResultExtras& /*resultExtras*/) {}
// Device and/or service is in unrecoverable error state.
// Composite streams should behave accordingly.
void enableErrorState();
wp<CameraDeviceBase> mDevice;
wp<camera3::StatusTracker> mStatusTracker;
wp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
mutable Mutex mMutex;

@ -29,7 +29,7 @@
namespace android {
namespace camera3 {
DepthCompositeStream::DepthCompositeStream(wp<CameraDeviceBase> device,
DepthCompositeStream::DepthCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
CompositeStream(device, cb),
mBlobStreamId(-1),
@ -43,9 +43,8 @@ DepthCompositeStream::DepthCompositeStream(wp<CameraDeviceBase> device,
mProducerListener(new ProducerListener()),
mMaxJpegSize(-1),
mIsLogicalCamera(false) {
sp<CameraDeviceBase> cameraDevice = device.promote();
if (cameraDevice.get() != nullptr) {
CameraMetadata staticInfo = cameraDevice->info();
if (device != nullptr) {
CameraMetadata staticInfo = device->info();
auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
if (entry.count > 0) {
mMaxJpegSize = entry.data.i32[0];
@ -385,7 +384,8 @@ void DepthCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*
}
if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
notifyError(inputFrame->frameNumber);
//TODO: Figure out correct requestId
notifyError(inputFrame->frameNumber, -1 /*requestId*/);
inputFrame->errorNotified = true;
}
}

@ -41,7 +41,7 @@ class DepthCompositeStream : public CompositeStream, public Thread,
public CpuConsumer::FrameAvailableListener {
public:
DepthCompositeStream(wp<CameraDeviceBase> device,
DepthCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb);
~DepthCompositeStream() override;
@ -80,8 +80,9 @@ private:
bool error;
bool errorNotified;
int64_t frameNumber;
int32_t requestId;
InputFrame() : error(false), errorNotified(false), frameNumber(-1) { }
InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1) { }
};
// Helper methods

@ -45,7 +45,7 @@ using android::hardware::camera::device::V3_5::CameraBlobId;
namespace android {
namespace camera3 {
HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
CompositeStream(device, cb),
mUseHeic(false),
@ -68,7 +68,8 @@ HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
mLockedAppSegmentBufferCnt(0),
mCodecOutputCounter(0),
mQuality(-1),
mGridTimestampUs(0) {
mGridTimestampUs(0),
mStatusId(StatusTracker::NO_STATUS_ID) {
}
HeicCompositeStream::~HeicCompositeStream() {
@ -188,9 +189,17 @@ status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface
}
mOutputSurface = consumers[0];
res = registerCompositeStreamListener(getStreamId());
res = registerCompositeStreamListener(mMainImageStreamId);
if (res != OK) {
ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
strerror(-res), res);
return res;
}
res = registerCompositeStreamListener(mAppSegmentStreamId);
if (res != OK) {
ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
strerror(-res), res);
return res;
}
@ -224,6 +233,19 @@ status_t HeicCompositeStream::deleteInternalStreams() {
mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
mOutputSurface.clear();
}
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr && mStatusId != StatusTracker::NO_STATUS_ID) {
statusTracker->removeComponent(mStatusId);
mStatusId = StatusTracker::NO_STATUS_ID;
}
if (mPendingInputFrames.size() > 0) {
ALOGW("%s: mPendingInputFrames has %zu stale entries",
__FUNCTION__, mPendingInputFrames.size());
mPendingInputFrames.clear();
}
return res;
}
@ -232,9 +254,16 @@ void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
if (bufferInfo.mError) return;
mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)",
__FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size());
if (bufferInfo.mStreamId == mMainImageStreamId) {
mMainImageFrameNumbers.push(bufferInfo.mFrameNumber);
mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
__FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
} else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
__FUNCTION__, bufferInfo.mFrameNumber, mAppSegmentFrameNumbers.size());
}
}
// We need to get the settings early to handle the case where the codec output
@ -264,7 +293,7 @@ void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, in
quality = entry.data.i32[0];
}
mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
mSettingsByFrameNumber[frameNumber] = {orientation, quality};
}
void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
@ -479,6 +508,11 @@ status_t HeicCompositeStream::configureStream() {
return res;
}
sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
mStatusId = statusTracker->addComponent();
}
run("HeicCompositeStreamProc");
return NO_ERROR;
@ -524,30 +558,44 @@ void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nse
}
if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__,
timestamp, resultExtras.frameNumber);
mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
mSettingsByFrameNumber.erase(resultExtras.frameNumber);
ALOGV("%s: [%" PRId64 "]: timestamp %" PRId64 ", requestId %d", __FUNCTION__,
resultExtras.frameNumber, timestamp, resultExtras.requestId);
mSettingsByFrameNumber[resultExtras.frameNumber].shutterNotified = true;
mSettingsByFrameNumber[resultExtras.frameNumber].timestamp = timestamp;
mSettingsByFrameNumber[resultExtras.frameNumber].requestId = resultExtras.requestId;
mInputReadyCondition.signal();
}
}
void HeicCompositeStream::compilePendingInputLocked() {
while (!mSettingsByTimestamp.empty()) {
auto it = mSettingsByTimestamp.begin();
mPendingInputFrames[it->first].orientation = it->second.first;
mPendingInputFrames[it->first].quality = it->second.second;
mSettingsByTimestamp.erase(it);
// Set encoder quality if no inflight encoding
if (mPendingInputFrames.size() == 1) {
int32_t newQuality = mPendingInputFrames.begin()->second.quality;
updateCodecQualityLocked(newQuality);
auto i = mSettingsByFrameNumber.begin();
while (i != mSettingsByFrameNumber.end()) {
if (i->second.shutterNotified) {
mPendingInputFrames[i->first].orientation = i->second.orientation;
mPendingInputFrames[i->first].quality = i->second.quality;
mPendingInputFrames[i->first].timestamp = i->second.timestamp;
mPendingInputFrames[i->first].requestId = i->second.requestId;
ALOGV("%s: [%" PRId64 "]: timestamp is %" PRId64, __FUNCTION__,
i->first, i->second.timestamp);
i = mSettingsByFrameNumber.erase(i);
// Set encoder quality if no inflight encoding
if (mPendingInputFrames.size() == 1) {
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
statusTracker->markComponentActive(mStatusId);
ALOGV("%s: Mark component as active", __FUNCTION__);
}
int32_t newQuality = mPendingInputFrames.begin()->second.quality;
updateCodecQualityLocked(newQuality);
}
} else {
i++;
}
}
while (!mInputAppSegmentBuffers.empty()) {
while (!mInputAppSegmentBuffers.empty() && mAppSegmentFrameNumbers.size() > 0) {
CpuConsumer::LockedBuffer imgBuffer;
auto it = mInputAppSegmentBuffers.begin();
auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
@ -569,17 +617,29 @@ void HeicCompositeStream::compilePendingInputLocked() {
continue;
}
if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
(mPendingInputFrames[imgBuffer.timestamp].error)) {
if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
mAppSegmentFrameNumbers.front());
mInputYuvBuffers.erase(it);
continue;
}
int64_t frameNumber = mAppSegmentFrameNumbers.front();
// If mPendingInputFrames doesn't contain the expected frame number, the captured
// input app segment frame must have been dropped via a buffer error. Simply
// return the buffer to the buffer queue.
if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
(mPendingInputFrames[frameNumber].error)) {
mAppSegmentConsumer->unlockBuffer(imgBuffer);
} else {
mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
mLockedAppSegmentBufferCnt++;
}
mInputAppSegmentBuffers.erase(it);
mAppSegmentFrameNumbers.pop();
}
while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired && mMainImageFrameNumbers.size() > 0) {
CpuConsumer::LockedBuffer imgBuffer;
auto it = mInputYuvBuffers.begin();
auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
@ -600,59 +660,67 @@ void HeicCompositeStream::compilePendingInputLocked() {
continue;
}
if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
(mPendingInputFrames[imgBuffer.timestamp].error)) {
if (mPendingInputFrames.find(mMainImageFrameNumbers.front()) == mPendingInputFrames.end()) {
ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
mMainImageFrameNumbers.front());
mInputYuvBuffers.erase(it);
continue;
}
int64_t frameNumber = mMainImageFrameNumbers.front();
// If mPendingInputFrames doesn't contain the expected frame number, the captured
// input main image must have been dropped via a buffer error. Simply
// return the buffer to the buffer queue.
if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
(mPendingInputFrames[frameNumber].error)) {
mMainImageConsumer->unlockBuffer(imgBuffer);
} else {
mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
mPendingInputFrames[frameNumber].yuvBuffer = imgBuffer;
mYuvBufferAcquired = true;
}
mInputYuvBuffers.erase(it);
mMainImageFrameNumbers.pop();
}
while (!mCodecOutputBuffers.empty()) {
auto it = mCodecOutputBuffers.begin();
// Bitstream buffer timestamp doesn't necessarily directly correlate with input
// buffer timestamp. Assume encoder input to output is FIFO, use a queue
// to look up timestamp.
int64_t bufferTime = -1;
if (mCodecOutputBufferTimestamps.empty()) {
ALOGV("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
// Assume encoder input to output is FIFO, use a queue to look up
// frameNumber when handling codec outputs.
int64_t bufferFrameNumber = -1;
if (mCodecOutputBufferFrameNumbers.empty()) {
ALOGV("%s: Failed to find buffer frameNumber for codec output buffer!", __FUNCTION__);
break;
} else {
// Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
bufferTime = mCodecOutputBufferTimestamps.front();
// Direct mapping between camera frame number and codec timestamp (in us).
bufferFrameNumber = mCodecOutputBufferFrameNumbers.front();
mCodecOutputCounter++;
if (mCodecOutputCounter == mNumOutputTiles) {
mCodecOutputBufferTimestamps.pop();
mCodecOutputBufferFrameNumbers.pop();
mCodecOutputCounter = 0;
}
mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)",
__FUNCTION__, bufferTime, it->timeUs);
mPendingInputFrames[bufferFrameNumber].codecOutputBuffers.push_back(*it);
ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (frameNumber %" PRId64 ")",
__FUNCTION__, bufferFrameNumber, it->timeUs);
}
mCodecOutputBuffers.erase(it);
}
while (!mFrameNumberMap.empty()) {
auto it = mFrameNumberMap.begin();
mPendingInputFrames[it->second].frameNumber = it->first;
ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
mFrameNumberMap.erase(it);
}
while (!mCaptureResults.empty()) {
auto it = mCaptureResults.begin();
// Negative timestamp indicates that something went wrong during the capture result
// Negative frame number indicates that something went wrong during the capture result
// collection process.
if (it->first >= 0) {
if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) {
mPendingInputFrames[it->first].result =
int64_t frameNumber = std::get<0>(it->second);
if (it->first >= 0 &&
mPendingInputFrames.find(frameNumber) != mPendingInputFrames.end()) {
if (mPendingInputFrames[frameNumber].timestamp == it->first) {
mPendingInputFrames[frameNumber].result =
std::make_unique<CameraMetadata>(std::get<1>(it->second));
} else {
ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
"shutter and capture result!", __FUNCTION__);
"shutter and capture result! before: %" PRId64 ", after: %" PRId64,
__FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
it->first);
}
}
mCaptureResults.erase(it);
@ -661,22 +729,24 @@ void HeicCompositeStream::compilePendingInputLocked() {
// mErrorFrameNumbers stores frame number of dropped buffers.
auto it = mErrorFrameNumbers.begin();
while (it != mErrorFrameNumbers.end()) {
bool frameFound = false;
for (auto &inputFrame : mPendingInputFrames) {
if (inputFrame.second.frameNumber == *it) {
inputFrame.second.error = true;
frameFound = true;
break;
}
}
if (frameFound) {
it = mErrorFrameNumbers.erase(it);
if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
mPendingInputFrames[*it].error = true;
} else {
//Error callback is guaranteed to arrive after shutter notify, which
//results in mPendingInputFrames being populated.
ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
*it);
it++;
}
it = mErrorFrameNumbers.erase(it);
}
// mExifErrorFrameNumbers stores the frame number of dropped APP_SEGMENT buffers
it = mExifErrorFrameNumbers.begin();
while (it != mExifErrorFrameNumbers.end()) {
if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
mPendingInputFrames[*it].exifError = true;
}
it = mExifErrorFrameNumbers.erase(it);
}
// Distribute codec input buffers to be filled out from YUV output
@ -701,8 +771,8 @@ void HeicCompositeStream::compilePendingInputLocked() {
}
}
bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
if (currentTs == nullptr) {
bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
if (frameNumber == nullptr) {
return false;
}
@ -715,7 +785,8 @@ bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
// This makes sure that muxer gets created only when an output tile is
// generated, because right now we only handle 1 HEIC output buffer at a
// time (max dequeued buffer count is 1).
bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) &&
bool appSegmentReady =
(it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
!it.second.appSegmentWritten && it.second.result != nullptr &&
it.second.muxer != nullptr;
bool codecOutputReady = !it.second.codecOutputBuffers.empty();
@ -724,9 +795,8 @@ bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
bool hasOutputBuffer = it.second.muxer != nullptr ||
(mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
if ((!it.second.error) &&
(it.first < *currentTs) &&
(appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
*currentTs = it.first;
*frameNumber = it.first;
if (it.second.format == nullptr && mFormat != nullptr) {
it.second.format = mFormat->dup();
}
@ -738,16 +808,12 @@ bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
return newInputAvailable;
}
int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
int64_t HeicCompositeStream::getNextFailingInputLocked() {
int64_t res = -1;
if (currentTs == nullptr) {
return res;
}
for (const auto& it : mPendingInputFrames) {
if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
*currentTs = it.first;
res = it.second.frameNumber;
if (it.second.error) {
res = it.first;
break;
}
}
@ -755,12 +821,13 @@ int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*
return res;
}
status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
status_t HeicCompositeStream::processInputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
ATRACE_CALL();
status_t res = OK;
bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr &&
bool appSegmentReady =
(inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
!inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
inputFrame.muxer != nullptr;
bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
@ -770,8 +837,9 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
(mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
" dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady,
codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt);
" dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
appSegmentReady, codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt,
inputFrame.timestamp);
// Handle inputs for Hevc tiling
if (codecInputReady) {
@ -791,7 +859,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
// codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
// to be false, and the function must have returned early.
if (inputFrame.muxer == nullptr) {
res = startMuxerForInputFrame(timestamp, inputFrame);
res = startMuxerForInputFrame(frameNumber, inputFrame);
if (res != OK) {
ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
strerror(-res), res);
@ -801,7 +869,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
// Write JPEG APP segments data to the muxer.
if (appSegmentReady) {
res = processAppSegment(timestamp, inputFrame);
res = processAppSegment(frameNumber, inputFrame);
if (res != OK) {
ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
strerror(-res), res);
@ -811,7 +879,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
// Write media codec bitstream buffers to muxer.
while (!inputFrame.codecOutputBuffers.empty()) {
res = processOneCodecOutputFrame(timestamp, inputFrame);
res = processOneCodecOutputFrame(frameNumber, inputFrame);
if (res != OK) {
ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
strerror(-res), res);
@ -821,7 +889,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
if (inputFrame.pendingOutputTiles == 0) {
if (inputFrame.appSegmentWritten) {
res = processCompletedInputFrame(timestamp, inputFrame);
res = processCompletedInputFrame(frameNumber, inputFrame);
if (res != OK) {
ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
strerror(-res), res);
@ -837,7 +905,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
return res;
}
status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
status_t HeicCompositeStream::startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame) {
sp<ANativeWindow> outputANW = mOutputSurface;
auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
@ -851,7 +919,7 @@ status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFr
// Combine current thread id, stream id and timestamp to uniquely identify image.
std::ostringstream tempOutputFile;
tempOutputFile << "HEIF-" << pthread_self() << "-"
<< getStreamId() << "-" << timestamp;
<< getStreamId() << "-" << frameNumber;
inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
if (inputFrame.fileFd < 0) {
ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
@ -889,22 +957,27 @@ status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFr
}
ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
timestamp);
frameNumber);
return OK;
}
status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
status_t HeicCompositeStream::processAppSegment(int64_t frameNumber, InputFrame &inputFrame) {
size_t app1Size = 0;
auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
&app1Size);
if (appSegmentSize == 0) {
ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
return NO_INIT;
size_t appSegmentSize = 0;
if (!inputFrame.exifError) {
appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
&app1Size);
if (appSegmentSize == 0) {
ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
return NO_INIT;
}
}
std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
auto exifRes = inputFrame.exifError ?
exifUtils->initializeEmpty() :
exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
if (!exifRes) {
ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
return BAD_VALUE;
@ -945,7 +1018,7 @@ status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &i
sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
inputFrame.timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
delete[] appSegmentBuffer;
if (res != OK) {
@ -955,13 +1028,14 @@ status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &i
}
ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
__FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width,
__FUNCTION__, frameNumber, appSegmentSize, inputFrame.appSegmentBuffer.width,
inputFrame.appSegmentBuffer.height, app1Size);
inputFrame.appSegmentWritten = true;
// Release the buffer now so any pending input app segments can be processed
mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
inputFrame.appSegmentBuffer.data = nullptr;
inputFrame.exifError = false;
mLockedAppSegmentBufferCnt--;
return OK;
@ -1010,7 +1084,7 @@ status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
return OK;
}
status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
auto it = inputFrame.codecOutputBuffers.begin();
sp<MediaCodecBuffer> buffer;
@ -1028,7 +1102,7 @@ status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
res = inputFrame.muxer->writeSampleData(
aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
if (res != OK) {
ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
__FUNCTION__, it->index, strerror(-res), res);
@ -1045,11 +1119,11 @@ status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
__FUNCTION__, timestamp, it->index);
__FUNCTION__, frameNumber, it->index);
return OK;
}
status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
InputFrame &inputFrame) {
sp<ANativeWindow> outputANW = mOutputSurface;
inputFrame.muxer->stop();
@ -1088,7 +1162,7 @@ status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
blobHeader->blobSize = fSize;
res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
if (res != OK) {
ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
__FUNCTION__, getStreamId(), strerror(-res), res);
@ -1104,13 +1178,14 @@ status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
inputFrame.anb = nullptr;
mDequeuedOutputBufferCnt--;
ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp);
ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber);
ALOGV("%s: [%" PRId64 "]", __FUNCTION__, frameNumber);
ATRACE_ASYNC_END("HEIC capture", frameNumber);
return OK;
}
void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
void HeicCompositeStream::releaseInputFrameLocked(int64_t frameNumber,
InputFrame *inputFrame /*out*/) {
if (inputFrame == nullptr) {
return;
}
@ -1138,9 +1213,9 @@ void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/
inputFrame->codecInputBuffers.erase(it);
}
if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
notifyError(inputFrame->frameNumber);
inputFrame->errorNotified = true;
if (inputFrame->error || mErrorState) {
ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
notifyError(frameNumber, inputFrame->requestId);
}
if (inputFrame->fileFd >= 0) {
@ -1152,6 +1227,8 @@ void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/
sp<ANativeWindow> outputANW = mOutputSurface;
outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
inputFrame->anb = nullptr;
mDequeuedOutputBufferCnt--;
}
}
@ -1161,8 +1238,8 @@ void HeicCompositeStream::releaseInputFramesLocked() {
while (it != mPendingInputFrames.end()) {
auto& inputFrame = it->second;
if (inputFrame.error ||
(inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
releaseInputFrameLocked(&inputFrame);
(inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
releaseInputFrameLocked(it->first, &inputFrame);
it = mPendingInputFrames.erase(it);
inputFrameDone = true;
} else {
@ -1179,6 +1256,8 @@ void HeicCompositeStream::releaseInputFramesLocked() {
auto firstPendingFrame = mPendingInputFrames.begin();
if (firstPendingFrame != mPendingInputFrames.end()) {
updateCodecQualityLocked(firstPendingFrame->second.quality);
} else {
markTrackerIdle();
}
}
}
@ -1397,20 +1476,6 @@ size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
return expectedSize;
}
int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
for (const auto& fn : mFrameNumberMap) {
if (timeInUs == ns2us(fn.second)) {
return fn.second;
}
}
for (const auto& inputFrame : mPendingInputFrames) {
if (timeInUs == ns2us(inputFrame.first)) {
return inputFrame.first;
}
}
return -1;
}
status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height) {
@ -1584,7 +1649,7 @@ void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
}
bool HeicCompositeStream::threadLoop() {
int64_t currentTs = INT64_MAX;
int64_t frameNumber = -1;
bool newInputAvailable = false;
{
@ -1600,19 +1665,25 @@ bool HeicCompositeStream::threadLoop() {
while (!newInputAvailable) {
compilePendingInputLocked();
newInputAvailable = getNextReadyInputLocked(&currentTs);
newInputAvailable = getNextReadyInputLocked(&frameNumber);
if (!newInputAvailable) {
auto failingFrameNumber = getNextFailingInputLocked(&currentTs);
auto failingFrameNumber = getNextFailingInputLocked();
if (failingFrameNumber >= 0) {
// We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
// possible for two internal stream buffers to fail. In such scenario the
// composite stream should notify the client about a stream buffer error only
// once and this information is kept within 'errorNotified'.
// Any present failed input frames will be removed on a subsequent call to
// 'releaseInputFramesLocked()'.
releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
currentTs = INT64_MAX;
releaseInputFrameLocked(failingFrameNumber,
&mPendingInputFrames[failingFrameNumber]);
// It's okay to remove the entry from mPendingInputFrames
// because:
// 1. Only one internal stream (main input) is critical in
// backing the output stream.
// 2. If captureResult/appSegment arrives after the entry is
// removed, they are simply skipped.
mPendingInputFrames.erase(failingFrameNumber);
if (mPendingInputFrames.size() == 0) {
markTrackerIdle();
}
return true;
}
auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
@ -1627,12 +1698,13 @@ bool HeicCompositeStream::threadLoop() {
}
}
auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
auto res = processInputFrame(frameNumber, mPendingInputFrames[frameNumber]);
Mutex::Autolock l(mMutex);
if (res != OK) {
ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
__FUNCTION__, currentTs, strerror(-res), res);
mPendingInputFrames[currentTs].error = true;
ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ", frameNumber: %"
PRId64 ": %s (%d)", __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
frameNumber, strerror(-res), res);
mPendingInputFrames[frameNumber].error = true;
}
releaseInputFramesLocked();
@ -1640,14 +1712,26 @@ bool HeicCompositeStream::threadLoop() {
return true;
}
void HeicCompositeStream::flagAnExifErrorFrameNumber(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
mExifErrorFrameNumbers.emplace(frameNumber);
mInputReadyCondition.signal();
}
bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
bool res = false;
int64_t frameNumber = resultExtras.frameNumber;
// Buffer errors concerning internal composite streams should not be directly visible to
// camera clients. They must only receive a single buffer error with the public composite
// stream id.
if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
(resultExtras.errorStreamId == mMainImageStreamId)) {
flagAnErrorFrameNumber(resultExtras.frameNumber);
if (resultExtras.errorStreamId == mAppSegmentStreamId) {
ALOGV("%s: APP_SEGMENT frameNumber: %" PRId64, __FUNCTION__, frameNumber);
flagAnExifErrorFrameNumber(frameNumber);
res = true;
} else if (resultExtras.errorStreamId == mMainImageStreamId) {
ALOGV("%s: YUV frameNumber: %" PRId64, __FUNCTION__, frameNumber);
flagAnErrorFrameNumber(frameNumber);
res = true;
}
@ -1660,16 +1744,16 @@ void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras)
Mutex::Autolock l(mMutex);
int64_t timestamp = -1;
for (const auto& fn : mFrameNumberMap) {
for (const auto& fn : mSettingsByFrameNumber) {
if (fn.first == resultExtras.frameNumber) {
timestamp = fn.second;
timestamp = fn.second.timestamp;
break;
}
}
if (timestamp == -1) {
for (const auto& inputFrame : mPendingInputFrames) {
if (inputFrame.second.frameNumber == resultExtras.frameNumber) {
timestamp = inputFrame.first;
if (inputFrame.first == resultExtras.frameNumber) {
timestamp = inputFrame.second.timestamp;
break;
}
}
@ -1681,9 +1765,33 @@ void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras)
}
mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
ALOGV("%s: timestamp %" PRId64 ", frameNumber %" PRId64, __FUNCTION__,
timestamp, resultExtras.frameNumber);
mInputReadyCondition.signal();
}
void HeicCompositeStream::onRequestError(const CaptureResultExtras& resultExtras) {
auto frameNumber = resultExtras.frameNumber;
ALOGV("%s: frameNumber: %" PRId64, __FUNCTION__, frameNumber);
Mutex::Autolock l(mMutex);
auto numRequests = mSettingsByFrameNumber.erase(frameNumber);
if (numRequests == 0) {
// Pending request has been populated into mPendingInputFrames
mErrorFrameNumbers.emplace(frameNumber);
mInputReadyCondition.signal();
} else {
// REQUEST_ERROR was received without onShutter.
}
}
void HeicCompositeStream::markTrackerIdle() {
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
ALOGV("%s: Mark component as idle", __FUNCTION__);
}
}
void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
sp<HeicCompositeStream> parent = mParent.promote();
if (parent == nullptr) return;

@ -37,7 +37,7 @@ namespace camera3 {
class HeicCompositeStream : public CompositeStream, public Thread,
public CpuConsumer::FrameAvailableListener {
public:
HeicCompositeStream(wp<CameraDeviceBase> device,
HeicCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb);
~HeicCompositeStream() override;
@ -81,6 +81,7 @@ protected:
bool threadLoop() override;
bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
void onResultError(const CaptureResultExtras& resultExtras) override;
void onRequestError(const CaptureResultExtras& resultExtras) override;
private:
//
@ -156,9 +157,10 @@ private:
CpuConsumer::LockedBuffer yuvBuffer;
std::vector<CodecInputBufferInfo> codecInputBuffers;
bool error;
bool errorNotified;
int64_t frameNumber;
bool error; // Main input image buffer error
bool exifError; // Exif/APP_SEGMENT buffer error
int64_t timestamp;
int32_t requestId;
sp<AMessage> format;
sp<MediaMuxer> muxer;
@ -172,30 +174,29 @@ private:
size_t codecInputCounter;
InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
errorNotified(false), frameNumber(-1), fenceFd(-1), fileFd(-1),
trackIndex(-1), anb(nullptr), appSegmentWritten(false),
exifError(false), timestamp(-1), requestId(-1), fenceFd(-1),
fileFd(-1), trackIndex(-1), anb(nullptr), appSegmentWritten(false),
pendingOutputTiles(0), codecInputCounter(0) { }
};
void compilePendingInputLocked();
// Find first complete and valid frame with smallest timestamp
bool getNextReadyInputLocked(int64_t *currentTs /*out*/);
// Find next failing frame number with smallest timestamp and return respective frame number
int64_t getNextFailingInputLocked(int64_t *currentTs /*out*/);
// Find first complete and valid frame with smallest frame number
bool getNextReadyInputLocked(int64_t *frameNumber /*out*/);
// Find next failing frame number with smallest frame number and return respective frame number
int64_t getNextFailingInputLocked();
status_t processInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
status_t processInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCodecInputFrame(InputFrame &inputFrame);
status_t startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
status_t processAppSegment(nsecs_t timestamp, InputFrame &inputFrame);
status_t processOneCodecOutputFrame(nsecs_t timestamp, InputFrame &inputFrame);
status_t processCompletedInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
status_t startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processAppSegment(int64_t frameNumber, InputFrame &inputFrame);
status_t processOneCodecOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCompletedInputFrame(int64_t frameNumber, InputFrame &inputFrame);
void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
void releaseInputFrameLocked(int64_t frameNumber, InputFrame *inputFrame /*out*/);
void releaseInputFramesLocked();
size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize,
size_t* app1SegmentSize);
int64_t findTimestampInNsLocked(int64_t timeInUs);
status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height);
@ -218,12 +219,14 @@ private:
sp<CpuConsumer> mAppSegmentConsumer;
sp<Surface> mAppSegmentSurface;
size_t mAppSegmentMaxSize;
std::queue<int64_t> mAppSegmentFrameNumbers;
CameraMetadata mStaticInfo;
int mMainImageStreamId, mMainImageSurfaceId;
sp<Surface> mMainImageSurface;
sp<CpuConsumer> mMainImageConsumer; // Only applicable for HEVC codec.
bool mYuvBufferAcquired; // Only applicable to HEVC codec
std::queue<int64_t> mMainImageFrameNumbers;
static const int32_t kMaxOutputSurfaceProducerCount = 1;
sp<Surface> mOutputSurface;
@ -231,9 +234,22 @@ private:
int32_t mDequeuedOutputBufferCnt;
// Map from frame number to JPEG setting of orientation+quality
std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByFrameNumber;
// Map from timestamp to JPEG setting of orientation+quality
std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByTimestamp;
struct HeicSettings {
int32_t orientation;
int32_t quality;
int64_t timestamp;
int32_t requestId;
bool shutterNotified;
HeicSettings() : orientation(0), quality(95), timestamp(0),
requestId(-1), shutterNotified(false) {}
HeicSettings(int32_t _orientation, int32_t _quality) :
orientation(_orientation),
quality(_quality), timestamp(0),
requestId(-1), shutterNotified(false) {}
};
std::map<int64_t, HeicSettings> mSettingsByFrameNumber;
// Keep all incoming APP segment Blob buffer pending further processing.
std::vector<int64_t> mInputAppSegmentBuffers;
@ -241,7 +257,7 @@ private:
// Keep all incoming HEIC blob buffer pending further processing.
std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
std::queue<int64_t> mCodecOutputBufferTimestamps;
std::queue<int64_t> mCodecOutputBufferFrameNumbers;
size_t mCodecOutputCounter;
int32_t mQuality;
@ -253,11 +269,19 @@ private:
// Artificial strictly incremental YUV grid timestamp to make encoder happy.
int64_t mGridTimestampUs;
// In most common use case, entries are accessed in order.
// Indexed by frame number. In most common use case, entries are accessed in order.
std::map<int64_t, InputFrame> mPendingInputFrames;
// Function pointer of libyuv row copy.
void (*mFnCopyRow)(const uint8_t* src, uint8_t* dst, int width);
// A set of APP_SEGMENT error frame numbers
std::set<int64_t> mExifErrorFrameNumbers;
void flagAnExifErrorFrameNumber(int64_t frameNumber);
// The status id for tracking the active/idle status of this composite stream
int mStatusId;
void markTrackerIdle();
};
}; // namespace camera3

@ -33,6 +33,7 @@
#include "camera/CaptureResult.h"
#include "gui/IGraphicBufferProducer.h"
#include "device3/Camera3StreamInterface.h"
#include "device3/StatusTracker.h"
#include "binder/Status.h"
#include "FrameProducer.h"
@ -362,6 +363,10 @@ class CameraDeviceBase : public virtual FrameProducer {
virtual status_t setRotateAndCropAutoBehavior(
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
/**
* Get the status tracker of the camera device
*/
virtual wp<camera3::StatusTracker> getStatusTracker() = 0;
};
}; // namespace android

@ -231,6 +231,9 @@ class Camera3Device :
status_t setRotateAndCropAutoBehavior(
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
// Get the status trackeer for the camera device
wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
/**
* Helper functions to map between framework and HIDL values
*/

@ -269,8 +269,6 @@ status_t Camera3IOStreamBase::returnAnyBufferLocked(
}
}
mBufferReturnedSignal.signal();
if (output) {
mLastTimestamp = timestamp;
}

@ -55,7 +55,6 @@ class Camera3IOStreamBase :
// number of output buffers that are currently acquired by HAL. This will be
// Redundant when camera3 streams are no longer bidirectional streams.
size_t mHandoutOutputBufferCount;
Condition mBufferReturnedSignal;
uint32_t mFrameCount;
// Last received output buffer's timestamp
nsecs_t mLastTimestamp;

@ -814,6 +814,8 @@ void Camera3Stream::fireBufferListenersLocked(
info.mError = (buffer.status == CAMERA3_BUFFER_STATUS_ERROR);
info.mFrameNumber = frameNumber;
info.mTimestamp = timestamp;
info.mStreamId = getId();
// TODO: rest of fields
for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();

@ -29,6 +29,7 @@ class Camera3StreamBufferListener : public virtual RefBase {
public:
struct BufferInfo {
int mStreamId;
bool mOutput; // if false then input buffer
Rect mCrop;
uint32_t mTransform;

Loading…
Cancel
Save