Camera3: Add flush support to HEIC composite streams

- Handle various corner cases with regard to REQUEST_ERROR, RESULT_ERROR, and BUFFER_ERROR.
- Drain the codec outputs in case the input buffer isn't dropped.
- Allow APP_SEGMENT to drop while still producing valid output image.
- Add a status tracker to manage active/idle state.
- Use frame number as key for pending input frames since with ZSL, 2
capture result could have the same timestamp.
- Also removed some deprecated variable/methods.

Test: CTS, vendor testing
Bug: 145579077
Change-Id: I9c3e929469b8fb75b32b016f9006036c954f663f
gugelfrei
Shuzhen Wang 5 years ago
parent 40fae84d5e
commit e867578b74

@ -28,19 +28,19 @@
namespace android { namespace android {
namespace camera3 { namespace camera3 {
CompositeStream::CompositeStream(wp<CameraDeviceBase> device, CompositeStream::CompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb) : wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
mDevice(device), mDevice(device),
mRemoteCallback(cb), mRemoteCallback(cb),
mNumPartialResults(1), mNumPartialResults(1),
mErrorState(false) { mErrorState(false) {
sp<CameraDeviceBase> cameraDevice = device.promote(); if (device != nullptr) {
if (cameraDevice.get() != nullptr) { CameraMetadata staticInfo = device->info();
CameraMetadata staticInfo = cameraDevice->info();
camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT); camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (entry.count > 0) { if (entry.count > 0) {
mNumPartialResults = entry.data.i32[0]; mNumPartialResults = entry.data.i32[0];
} }
mStatusTracker = device->getStatusTracker();
} }
} }
@ -174,7 +174,7 @@ bool CompositeStream::onError(int32_t errorCode, const CaptureResultExtras& resu
ret = onStreamBufferError(resultExtras); ret = onStreamBufferError(resultExtras);
break; break;
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST: case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
// Invalid request, this shouldn't affect composite streams. onRequestError(resultExtras);
break; break;
default: default:
ALOGE("%s: Unrecoverable error: %d detected!", __FUNCTION__, errorCode); ALOGE("%s: Unrecoverable error: %d detected!", __FUNCTION__, errorCode);
@ -186,7 +186,7 @@ bool CompositeStream::onError(int32_t errorCode, const CaptureResultExtras& resu
return ret; return ret;
} }
void CompositeStream::notifyError(int64_t frameNumber) { void CompositeStream::notifyError(int64_t frameNumber, int32_t requestId) {
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb =
mRemoteCallback.promote(); mRemoteCallback.promote();
@ -194,6 +194,7 @@ void CompositeStream::notifyError(int64_t frameNumber) {
CaptureResultExtras extras; CaptureResultExtras extras;
extras.errorStreamId = getStreamId(); extras.errorStreamId = getStreamId();
extras.frameNumber = frameNumber; extras.frameNumber = frameNumber;
extras.requestId = requestId;
remoteCb->onDeviceError( remoteCb->onDeviceError(
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER, hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
extras); extras);

@ -38,7 +38,7 @@ namespace camera3 {
class CompositeStream : public camera3::Camera3StreamBufferListener { class CompositeStream : public camera3::Camera3StreamBufferListener {
public: public:
CompositeStream(wp<CameraDeviceBase> device, wp<hardware::camera2::ICameraDeviceCallbacks> cb); CompositeStream(sp<CameraDeviceBase> device, wp<hardware::camera2::ICameraDeviceCallbacks> cb);
virtual ~CompositeStream() {} virtual ~CompositeStream() {}
status_t createStream(const std::vector<sp<Surface>>& consumers, status_t createStream(const std::vector<sp<Surface>>& consumers,
@ -95,7 +95,7 @@ protected:
status_t registerCompositeStreamListener(int32_t streamId); status_t registerCompositeStreamListener(int32_t streamId);
void eraseResult(int64_t frameNumber); void eraseResult(int64_t frameNumber);
void flagAnErrorFrameNumber(int64_t frameNumber); void flagAnErrorFrameNumber(int64_t frameNumber);
void notifyError(int64_t frameNumber); void notifyError(int64_t frameNumber, int32_t requestId);
// Subclasses should check for buffer errors from internal streams and return 'true' in // Subclasses should check for buffer errors from internal streams and return 'true' in
// case the error notification should remain within camera service. // case the error notification should remain within camera service.
@ -105,11 +105,16 @@ protected:
// internal processing needs result data. // internal processing needs result data.
virtual void onResultError(const CaptureResultExtras& resultExtras) = 0; virtual void onResultError(const CaptureResultExtras& resultExtras) = 0;
// Subclasses can decide how to handle request errors depending on whether
// or not the internal processing needs clean up.
virtual void onRequestError(const CaptureResultExtras& /*resultExtras*/) {}
// Device and/or service is in unrecoverable error state. // Device and/or service is in unrecoverable error state.
// Composite streams should behave accordingly. // Composite streams should behave accordingly.
void enableErrorState(); void enableErrorState();
wp<CameraDeviceBase> mDevice; wp<CameraDeviceBase> mDevice;
wp<camera3::StatusTracker> mStatusTracker;
wp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback; wp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
mutable Mutex mMutex; mutable Mutex mMutex;

@ -29,7 +29,7 @@
namespace android { namespace android {
namespace camera3 { namespace camera3 {
DepthCompositeStream::DepthCompositeStream(wp<CameraDeviceBase> device, DepthCompositeStream::DepthCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb) : wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
CompositeStream(device, cb), CompositeStream(device, cb),
mBlobStreamId(-1), mBlobStreamId(-1),
@ -43,9 +43,8 @@ DepthCompositeStream::DepthCompositeStream(wp<CameraDeviceBase> device,
mProducerListener(new ProducerListener()), mProducerListener(new ProducerListener()),
mMaxJpegSize(-1), mMaxJpegSize(-1),
mIsLogicalCamera(false) { mIsLogicalCamera(false) {
sp<CameraDeviceBase> cameraDevice = device.promote(); if (device != nullptr) {
if (cameraDevice.get() != nullptr) { CameraMetadata staticInfo = device->info();
CameraMetadata staticInfo = cameraDevice->info();
auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE); auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
if (entry.count > 0) { if (entry.count > 0) {
mMaxJpegSize = entry.data.i32[0]; mMaxJpegSize = entry.data.i32[0];
@ -385,7 +384,8 @@ void DepthCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*
} }
if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) { if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
notifyError(inputFrame->frameNumber); //TODO: Figure out correct requestId
notifyError(inputFrame->frameNumber, -1 /*requestId*/);
inputFrame->errorNotified = true; inputFrame->errorNotified = true;
} }
} }

@ -41,7 +41,7 @@ class DepthCompositeStream : public CompositeStream, public Thread,
public CpuConsumer::FrameAvailableListener { public CpuConsumer::FrameAvailableListener {
public: public:
DepthCompositeStream(wp<CameraDeviceBase> device, DepthCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb); wp<hardware::camera2::ICameraDeviceCallbacks> cb);
~DepthCompositeStream() override; ~DepthCompositeStream() override;
@ -80,8 +80,9 @@ private:
bool error; bool error;
bool errorNotified; bool errorNotified;
int64_t frameNumber; int64_t frameNumber;
int32_t requestId;
InputFrame() : error(false), errorNotified(false), frameNumber(-1) { } InputFrame() : error(false), errorNotified(false), frameNumber(-1), requestId(-1) { }
}; };
// Helper methods // Helper methods

@ -45,7 +45,7 @@ using android::hardware::camera::device::V3_5::CameraBlobId;
namespace android { namespace android {
namespace camera3 { namespace camera3 {
HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device, HeicCompositeStream::HeicCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb) : wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
CompositeStream(device, cb), CompositeStream(device, cb),
mUseHeic(false), mUseHeic(false),
@ -68,7 +68,8 @@ HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
mLockedAppSegmentBufferCnt(0), mLockedAppSegmentBufferCnt(0),
mCodecOutputCounter(0), mCodecOutputCounter(0),
mQuality(-1), mQuality(-1),
mGridTimestampUs(0) { mGridTimestampUs(0),
mStatusId(StatusTracker::NO_STATUS_ID) {
} }
HeicCompositeStream::~HeicCompositeStream() { HeicCompositeStream::~HeicCompositeStream() {
@ -188,9 +189,17 @@ status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface
} }
mOutputSurface = consumers[0]; mOutputSurface = consumers[0];
res = registerCompositeStreamListener(getStreamId()); res = registerCompositeStreamListener(mMainImageStreamId);
if (res != OK) { if (res != OK) {
ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__); ALOGE("%s: Failed to register HAL main image stream: %s (%d)", __FUNCTION__,
strerror(-res), res);
return res;
}
res = registerCompositeStreamListener(mAppSegmentStreamId);
if (res != OK) {
ALOGE("%s: Failed to register HAL app segment stream: %s (%d)", __FUNCTION__,
strerror(-res), res);
return res; return res;
} }
@ -224,6 +233,19 @@ status_t HeicCompositeStream::deleteInternalStreams() {
mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA); mOutputSurface->disconnect(NATIVE_WINDOW_API_CAMERA);
mOutputSurface.clear(); mOutputSurface.clear();
} }
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr && mStatusId != StatusTracker::NO_STATUS_ID) {
statusTracker->removeComponent(mStatusId);
mStatusId = StatusTracker::NO_STATUS_ID;
}
if (mPendingInputFrames.size() > 0) {
ALOGW("%s: mPendingInputFrames has %zu stale entries",
__FUNCTION__, mPendingInputFrames.size());
mPendingInputFrames.clear();
}
return res; return res;
} }
@ -232,9 +254,16 @@ void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
if (bufferInfo.mError) return; if (bufferInfo.mError) return;
mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp); if (bufferInfo.mStreamId == mMainImageStreamId) {
ALOGV("%s: [%" PRId64 "]: Adding codecOutputBufferTimestamp (%zu timestamps in total)", mMainImageFrameNumbers.push(bufferInfo.mFrameNumber);
__FUNCTION__, bufferInfo.mTimestamp, mCodecOutputBufferTimestamps.size()); mCodecOutputBufferFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding main image frame number (%zu frame numbers in total)",
__FUNCTION__, bufferInfo.mFrameNumber, mMainImageFrameNumbers.size());
} else if (bufferInfo.mStreamId == mAppSegmentStreamId) {
mAppSegmentFrameNumbers.push(bufferInfo.mFrameNumber);
ALOGV("%s: [%" PRId64 "]: Adding app segment frame number (%zu frame numbers in total)",
__FUNCTION__, bufferInfo.mFrameNumber, mAppSegmentFrameNumbers.size());
}
} }
// We need to get the settings early to handle the case where the codec output // We need to get the settings early to handle the case where the codec output
@ -264,7 +293,7 @@ void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, in
quality = entry.data.i32[0]; quality = entry.data.i32[0];
} }
mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality); mSettingsByFrameNumber[frameNumber] = {orientation, quality};
} }
void HeicCompositeStream::onFrameAvailable(const BufferItem& item) { void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
@ -479,6 +508,11 @@ status_t HeicCompositeStream::configureStream() {
return res; return res;
} }
sp<camera3::StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
mStatusId = statusTracker->addComponent();
}
run("HeicCompositeStreamProc"); run("HeicCompositeStreamProc");
return NO_ERROR; return NO_ERROR;
@ -524,30 +558,44 @@ void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nse
} }
if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) { if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
ALOGV("%s: [%" PRId64 "]: frameNumber %" PRId64, __FUNCTION__, ALOGV("%s: [%" PRId64 "]: timestamp %" PRId64 ", requestId %d", __FUNCTION__,
timestamp, resultExtras.frameNumber); resultExtras.frameNumber, timestamp, resultExtras.requestId);
mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp); mSettingsByFrameNumber[resultExtras.frameNumber].shutterNotified = true;
mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber]; mSettingsByFrameNumber[resultExtras.frameNumber].timestamp = timestamp;
mSettingsByFrameNumber.erase(resultExtras.frameNumber); mSettingsByFrameNumber[resultExtras.frameNumber].requestId = resultExtras.requestId;
mInputReadyCondition.signal(); mInputReadyCondition.signal();
} }
} }
void HeicCompositeStream::compilePendingInputLocked() { void HeicCompositeStream::compilePendingInputLocked() {
while (!mSettingsByTimestamp.empty()) { auto i = mSettingsByFrameNumber.begin();
auto it = mSettingsByTimestamp.begin(); while (i != mSettingsByFrameNumber.end()) {
mPendingInputFrames[it->first].orientation = it->second.first; if (i->second.shutterNotified) {
mPendingInputFrames[it->first].quality = it->second.second; mPendingInputFrames[i->first].orientation = i->second.orientation;
mSettingsByTimestamp.erase(it); mPendingInputFrames[i->first].quality = i->second.quality;
mPendingInputFrames[i->first].timestamp = i->second.timestamp;
// Set encoder quality if no inflight encoding mPendingInputFrames[i->first].requestId = i->second.requestId;
if (mPendingInputFrames.size() == 1) { ALOGV("%s: [%" PRId64 "]: timestamp is %" PRId64, __FUNCTION__,
int32_t newQuality = mPendingInputFrames.begin()->second.quality; i->first, i->second.timestamp);
updateCodecQualityLocked(newQuality); i = mSettingsByFrameNumber.erase(i);
// Set encoder quality if no inflight encoding
if (mPendingInputFrames.size() == 1) {
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
statusTracker->markComponentActive(mStatusId);
ALOGV("%s: Mark component as active", __FUNCTION__);
}
int32_t newQuality = mPendingInputFrames.begin()->second.quality;
updateCodecQualityLocked(newQuality);
}
} else {
i++;
} }
} }
while (!mInputAppSegmentBuffers.empty()) { while (!mInputAppSegmentBuffers.empty() && mAppSegmentFrameNumbers.size() > 0) {
CpuConsumer::LockedBuffer imgBuffer; CpuConsumer::LockedBuffer imgBuffer;
auto it = mInputAppSegmentBuffers.begin(); auto it = mInputAppSegmentBuffers.begin();
auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer); auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
@ -569,17 +617,29 @@ void HeicCompositeStream::compilePendingInputLocked() {
continue; continue;
} }
if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) && if (mPendingInputFrames.find(mAppSegmentFrameNumbers.front()) == mPendingInputFrames.end()) {
(mPendingInputFrames[imgBuffer.timestamp].error)) { ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
mAppSegmentFrameNumbers.front());
mInputYuvBuffers.erase(it);
continue;
}
int64_t frameNumber = mAppSegmentFrameNumbers.front();
// If mPendingInputFrames doesn't contain the expected frame number, the captured
// input app segment frame must have been dropped via a buffer error. Simply
// return the buffer to the buffer queue.
if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
(mPendingInputFrames[frameNumber].error)) {
mAppSegmentConsumer->unlockBuffer(imgBuffer); mAppSegmentConsumer->unlockBuffer(imgBuffer);
} else { } else {
mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer; mPendingInputFrames[frameNumber].appSegmentBuffer = imgBuffer;
mLockedAppSegmentBufferCnt++; mLockedAppSegmentBufferCnt++;
} }
mInputAppSegmentBuffers.erase(it); mInputAppSegmentBuffers.erase(it);
mAppSegmentFrameNumbers.pop();
} }
while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) { while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired && mMainImageFrameNumbers.size() > 0) {
CpuConsumer::LockedBuffer imgBuffer; CpuConsumer::LockedBuffer imgBuffer;
auto it = mInputYuvBuffers.begin(); auto it = mInputYuvBuffers.begin();
auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer); auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
@ -600,59 +660,67 @@ void HeicCompositeStream::compilePendingInputLocked() {
continue; continue;
} }
if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) && if (mPendingInputFrames.find(mMainImageFrameNumbers.front()) == mPendingInputFrames.end()) {
(mPendingInputFrames[imgBuffer.timestamp].error)) { ALOGE("%s: mPendingInputFrames doesn't contain frameNumber %" PRId64, __FUNCTION__,
mMainImageFrameNumbers.front());
mInputYuvBuffers.erase(it);
continue;
}
int64_t frameNumber = mMainImageFrameNumbers.front();
// If mPendingInputFrames doesn't contain the expected frame number, the captured
// input main image must have been dropped via a buffer error. Simply
// return the buffer to the buffer queue.
if ((mPendingInputFrames.find(frameNumber) == mPendingInputFrames.end()) ||
(mPendingInputFrames[frameNumber].error)) {
mMainImageConsumer->unlockBuffer(imgBuffer); mMainImageConsumer->unlockBuffer(imgBuffer);
} else { } else {
mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer; mPendingInputFrames[frameNumber].yuvBuffer = imgBuffer;
mYuvBufferAcquired = true; mYuvBufferAcquired = true;
} }
mInputYuvBuffers.erase(it); mInputYuvBuffers.erase(it);
mMainImageFrameNumbers.pop();
} }
while (!mCodecOutputBuffers.empty()) { while (!mCodecOutputBuffers.empty()) {
auto it = mCodecOutputBuffers.begin(); auto it = mCodecOutputBuffers.begin();
// Bitstream buffer timestamp doesn't necessarily directly correlate with input // Assume encoder input to output is FIFO, use a queue to look up
// buffer timestamp. Assume encoder input to output is FIFO, use a queue // frameNumber when handling codec outputs.
// to look up timestamp. int64_t bufferFrameNumber = -1;
int64_t bufferTime = -1; if (mCodecOutputBufferFrameNumbers.empty()) {
if (mCodecOutputBufferTimestamps.empty()) { ALOGV("%s: Failed to find buffer frameNumber for codec output buffer!", __FUNCTION__);
ALOGV("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
break; break;
} else { } else {
// Direct mapping between camera timestamp (in ns) and codec timestamp (in us). // Direct mapping between camera frame number and codec timestamp (in us).
bufferTime = mCodecOutputBufferTimestamps.front(); bufferFrameNumber = mCodecOutputBufferFrameNumbers.front();
mCodecOutputCounter++; mCodecOutputCounter++;
if (mCodecOutputCounter == mNumOutputTiles) { if (mCodecOutputCounter == mNumOutputTiles) {
mCodecOutputBufferTimestamps.pop(); mCodecOutputBufferFrameNumbers.pop();
mCodecOutputCounter = 0; mCodecOutputCounter = 0;
} }
mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it); mPendingInputFrames[bufferFrameNumber].codecOutputBuffers.push_back(*it);
ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (time %" PRId64 " us)", ALOGV("%s: [%" PRId64 "]: Pushing codecOutputBuffers (frameNumber %" PRId64 ")",
__FUNCTION__, bufferTime, it->timeUs); __FUNCTION__, bufferFrameNumber, it->timeUs);
} }
mCodecOutputBuffers.erase(it); mCodecOutputBuffers.erase(it);
} }
while (!mFrameNumberMap.empty()) {
auto it = mFrameNumberMap.begin();
mPendingInputFrames[it->second].frameNumber = it->first;
ALOGV("%s: [%" PRId64 "]: frameNumber is %" PRId64, __FUNCTION__, it->second, it->first);
mFrameNumberMap.erase(it);
}
while (!mCaptureResults.empty()) { while (!mCaptureResults.empty()) {
auto it = mCaptureResults.begin(); auto it = mCaptureResults.begin();
// Negative timestamp indicates that something went wrong during the capture result // Negative frame number indicates that something went wrong during the capture result
// collection process. // collection process.
if (it->first >= 0) { int64_t frameNumber = std::get<0>(it->second);
if (mPendingInputFrames[it->first].frameNumber == std::get<0>(it->second)) { if (it->first >= 0 &&
mPendingInputFrames[it->first].result = mPendingInputFrames.find(frameNumber) != mPendingInputFrames.end()) {
if (mPendingInputFrames[frameNumber].timestamp == it->first) {
mPendingInputFrames[frameNumber].result =
std::make_unique<CameraMetadata>(std::get<1>(it->second)); std::make_unique<CameraMetadata>(std::get<1>(it->second));
} else { } else {
ALOGE("%s: Capture result frameNumber/timestamp mapping changed between " ALOGE("%s: Capture result frameNumber/timestamp mapping changed between "
"shutter and capture result!", __FUNCTION__); "shutter and capture result! before: %" PRId64 ", after: %" PRId64,
__FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
it->first);
} }
} }
mCaptureResults.erase(it); mCaptureResults.erase(it);
@ -661,22 +729,24 @@ void HeicCompositeStream::compilePendingInputLocked() {
// mErrorFrameNumbers stores frame number of dropped buffers. // mErrorFrameNumbers stores frame number of dropped buffers.
auto it = mErrorFrameNumbers.begin(); auto it = mErrorFrameNumbers.begin();
while (it != mErrorFrameNumbers.end()) { while (it != mErrorFrameNumbers.end()) {
bool frameFound = false; if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
for (auto &inputFrame : mPendingInputFrames) { mPendingInputFrames[*it].error = true;
if (inputFrame.second.frameNumber == *it) {
inputFrame.second.error = true;
frameFound = true;
break;
}
}
if (frameFound) {
it = mErrorFrameNumbers.erase(it);
} else { } else {
//Error callback is guaranteed to arrive after shutter notify, which
//results in mPendingInputFrames being populated.
ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__, ALOGW("%s: Not able to find failing input with frame number: %" PRId64, __FUNCTION__,
*it); *it);
it++;
} }
it = mErrorFrameNumbers.erase(it);
}
// mExifErrorFrameNumbers stores the frame number of dropped APP_SEGMENT buffers
it = mExifErrorFrameNumbers.begin();
while (it != mExifErrorFrameNumbers.end()) {
if (mPendingInputFrames.find(*it) != mPendingInputFrames.end()) {
mPendingInputFrames[*it].exifError = true;
}
it = mExifErrorFrameNumbers.erase(it);
} }
// Distribute codec input buffers to be filled out from YUV output // Distribute codec input buffers to be filled out from YUV output
@ -701,8 +771,8 @@ void HeicCompositeStream::compilePendingInputLocked() {
} }
} }
bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) { bool HeicCompositeStream::getNextReadyInputLocked(int64_t *frameNumber /*out*/) {
if (currentTs == nullptr) { if (frameNumber == nullptr) {
return false; return false;
} }
@ -715,7 +785,8 @@ bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
// This makes sure that muxer gets created only when an output tile is // This makes sure that muxer gets created only when an output tile is
// generated, because right now we only handle 1 HEIC output buffer at a // generated, because right now we only handle 1 HEIC output buffer at a
// time (max dequeued buffer count is 1). // time (max dequeued buffer count is 1).
bool appSegmentReady = (it.second.appSegmentBuffer.data != nullptr) && bool appSegmentReady =
(it.second.appSegmentBuffer.data != nullptr || it.second.exifError) &&
!it.second.appSegmentWritten && it.second.result != nullptr && !it.second.appSegmentWritten && it.second.result != nullptr &&
it.second.muxer != nullptr; it.second.muxer != nullptr;
bool codecOutputReady = !it.second.codecOutputBuffers.empty(); bool codecOutputReady = !it.second.codecOutputBuffers.empty();
@ -724,9 +795,8 @@ bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
bool hasOutputBuffer = it.second.muxer != nullptr || bool hasOutputBuffer = it.second.muxer != nullptr ||
(mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount); (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
if ((!it.second.error) && if ((!it.second.error) &&
(it.first < *currentTs) &&
(appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) { (appSegmentReady || (codecOutputReady && hasOutputBuffer) || codecInputReady)) {
*currentTs = it.first; *frameNumber = it.first;
if (it.second.format == nullptr && mFormat != nullptr) { if (it.second.format == nullptr && mFormat != nullptr) {
it.second.format = mFormat->dup(); it.second.format = mFormat->dup();
} }
@ -738,16 +808,12 @@ bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
return newInputAvailable; return newInputAvailable;
} }
int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) { int64_t HeicCompositeStream::getNextFailingInputLocked() {
int64_t res = -1; int64_t res = -1;
if (currentTs == nullptr) {
return res;
}
for (const auto& it : mPendingInputFrames) { for (const auto& it : mPendingInputFrames) {
if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) { if (it.second.error) {
*currentTs = it.first; res = it.first;
res = it.second.frameNumber;
break; break;
} }
} }
@ -755,12 +821,13 @@ int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*
return res; return res;
} }
status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp, status_t HeicCompositeStream::processInputFrame(int64_t frameNumber,
InputFrame &inputFrame) { InputFrame &inputFrame) {
ATRACE_CALL(); ATRACE_CALL();
status_t res = OK; status_t res = OK;
bool appSegmentReady = inputFrame.appSegmentBuffer.data != nullptr && bool appSegmentReady =
(inputFrame.appSegmentBuffer.data != nullptr || inputFrame.exifError) &&
!inputFrame.appSegmentWritten && inputFrame.result != nullptr && !inputFrame.appSegmentWritten && inputFrame.result != nullptr &&
inputFrame.muxer != nullptr; inputFrame.muxer != nullptr;
bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0; bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
@ -770,8 +837,9 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
(mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount); (mDequeuedOutputBufferCnt < kMaxOutputSurfaceProducerCount);
ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d," ALOGV("%s: [%" PRId64 "]: appSegmentReady %d, codecOutputReady %d, codecInputReady %d,"
" dequeuedOutputBuffer %d", __FUNCTION__, timestamp, appSegmentReady, " dequeuedOutputBuffer %d, timestamp %" PRId64, __FUNCTION__, frameNumber,
codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt); appSegmentReady, codecOutputReady, codecInputReady, mDequeuedOutputBufferCnt,
inputFrame.timestamp);
// Handle inputs for Hevc tiling // Handle inputs for Hevc tiling
if (codecInputReady) { if (codecInputReady) {
@ -791,7 +859,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
// codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed // codecOutputReady must be true. Otherwise, appSegmentReady is guaranteed
// to be false, and the function must have returned early. // to be false, and the function must have returned early.
if (inputFrame.muxer == nullptr) { if (inputFrame.muxer == nullptr) {
res = startMuxerForInputFrame(timestamp, inputFrame); res = startMuxerForInputFrame(frameNumber, inputFrame);
if (res != OK) { if (res != OK) {
ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__, ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
strerror(-res), res); strerror(-res), res);
@ -801,7 +869,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
// Write JPEG APP segments data to the muxer. // Write JPEG APP segments data to the muxer.
if (appSegmentReady) { if (appSegmentReady) {
res = processAppSegment(timestamp, inputFrame); res = processAppSegment(frameNumber, inputFrame);
if (res != OK) { if (res != OK) {
ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__, ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
strerror(-res), res); strerror(-res), res);
@ -811,7 +879,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
// Write media codec bitstream buffers to muxer. // Write media codec bitstream buffers to muxer.
while (!inputFrame.codecOutputBuffers.empty()) { while (!inputFrame.codecOutputBuffers.empty()) {
res = processOneCodecOutputFrame(timestamp, inputFrame); res = processOneCodecOutputFrame(frameNumber, inputFrame);
if (res != OK) { if (res != OK) {
ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__, ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
strerror(-res), res); strerror(-res), res);
@ -821,7 +889,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
if (inputFrame.pendingOutputTiles == 0) { if (inputFrame.pendingOutputTiles == 0) {
if (inputFrame.appSegmentWritten) { if (inputFrame.appSegmentWritten) {
res = processCompletedInputFrame(timestamp, inputFrame); res = processCompletedInputFrame(frameNumber, inputFrame);
if (res != OK) { if (res != OK) {
ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__, ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
strerror(-res), res); strerror(-res), res);
@ -837,7 +905,7 @@ status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
return res; return res;
} }
status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) { status_t HeicCompositeStream::startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame) {
sp<ANativeWindow> outputANW = mOutputSurface; sp<ANativeWindow> outputANW = mOutputSurface;
auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd); auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
@ -851,7 +919,7 @@ status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFr
// Combine current thread id, stream id and timestamp to uniquely identify image. // Combine current thread id, stream id and timestamp to uniquely identify image.
std::ostringstream tempOutputFile; std::ostringstream tempOutputFile;
tempOutputFile << "HEIF-" << pthread_self() << "-" tempOutputFile << "HEIF-" << pthread_self() << "-"
<< getStreamId() << "-" << timestamp; << getStreamId() << "-" << frameNumber;
inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC); inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
if (inputFrame.fileFd < 0) { if (inputFrame.fileFd < 0) {
ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__, ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
@ -889,22 +957,27 @@ status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFr
} }
ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__, ALOGV("%s: [%" PRId64 "]: Muxer started for inputFrame", __FUNCTION__,
timestamp); frameNumber);
return OK; return OK;
} }
status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) { status_t HeicCompositeStream::processAppSegment(int64_t frameNumber, InputFrame &inputFrame) {
size_t app1Size = 0; size_t app1Size = 0;
auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data, size_t appSegmentSize = 0;
inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height, if (!inputFrame.exifError) {
&app1Size); appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
if (appSegmentSize == 0) { inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__); &app1Size);
return NO_INIT; if (appSegmentSize == 0) {
ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
return NO_INIT;
}
} }
std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create()); std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size); auto exifRes = inputFrame.exifError ?
exifUtils->initializeEmpty() :
exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
if (!exifRes) { if (!exifRes) {
ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__); ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
return BAD_VALUE; return BAD_VALUE;
@ -945,7 +1018,7 @@ status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &i
sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize); sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex, auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA); inputFrame.timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
delete[] appSegmentBuffer; delete[] appSegmentBuffer;
if (res != OK) { if (res != OK) {
@ -955,13 +1028,14 @@ status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &i
} }
ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu", ALOGV("%s: [%" PRId64 "]: appSegmentSize is %zu, width %d, height %d, app1Size %zu",
__FUNCTION__, timestamp, appSegmentSize, inputFrame.appSegmentBuffer.width, __FUNCTION__, frameNumber, appSegmentSize, inputFrame.appSegmentBuffer.width,
inputFrame.appSegmentBuffer.height, app1Size); inputFrame.appSegmentBuffer.height, app1Size);
inputFrame.appSegmentWritten = true; inputFrame.appSegmentWritten = true;
// Release the buffer now so any pending input app segments can be processed // Release the buffer now so any pending input app segments can be processed
mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer); mAppSegmentConsumer->unlockBuffer(inputFrame.appSegmentBuffer);
inputFrame.appSegmentBuffer.data = nullptr; inputFrame.appSegmentBuffer.data = nullptr;
inputFrame.exifError = false;
mLockedAppSegmentBufferCnt--; mLockedAppSegmentBufferCnt--;
return OK; return OK;
@ -1010,7 +1084,7 @@ status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
return OK; return OK;
} }
status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp, status_t HeicCompositeStream::processOneCodecOutputFrame(int64_t frameNumber,
InputFrame &inputFrame) { InputFrame &inputFrame) {
auto it = inputFrame.codecOutputBuffers.begin(); auto it = inputFrame.codecOutputBuffers.begin();
sp<MediaCodecBuffer> buffer; sp<MediaCodecBuffer> buffer;
@ -1028,7 +1102,7 @@ status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size()); sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
res = inputFrame.muxer->writeSampleData( res = inputFrame.muxer->writeSampleData(
aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/); aBuffer, inputFrame.trackIndex, inputFrame.timestamp, 0 /*flags*/);
if (res != OK) { if (res != OK) {
ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)", ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
__FUNCTION__, it->index, strerror(-res), res); __FUNCTION__, it->index, strerror(-res), res);
@ -1045,11 +1119,11 @@ status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin()); inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
ALOGV("%s: [%" PRId64 "]: Output buffer index %d", ALOGV("%s: [%" PRId64 "]: Output buffer index %d",
__FUNCTION__, timestamp, it->index); __FUNCTION__, frameNumber, it->index);
return OK; return OK;
} }
status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp, status_t HeicCompositeStream::processCompletedInputFrame(int64_t frameNumber,
InputFrame &inputFrame) { InputFrame &inputFrame) {
sp<ANativeWindow> outputANW = mOutputSurface; sp<ANativeWindow> outputANW = mOutputSurface;
inputFrame.muxer->stop(); inputFrame.muxer->stop();
@ -1088,7 +1162,7 @@ status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
blobHeader->blobId = static_cast<CameraBlobId>(0x00FE); blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
blobHeader->blobSize = fSize; blobHeader->blobSize = fSize;
res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp); res = native_window_set_buffers_timestamp(mOutputSurface.get(), inputFrame.timestamp);
if (res != OK) { if (res != OK) {
ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
__FUNCTION__, getStreamId(), strerror(-res), res); __FUNCTION__, getStreamId(), strerror(-res), res);
@ -1104,13 +1178,14 @@ status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
inputFrame.anb = nullptr; inputFrame.anb = nullptr;
mDequeuedOutputBufferCnt--; mDequeuedOutputBufferCnt--;
ALOGV("%s: [%" PRId64 "]", __FUNCTION__, timestamp); ALOGV("%s: [%" PRId64 "]", __FUNCTION__, frameNumber);
ATRACE_ASYNC_END("HEIC capture", inputFrame.frameNumber); ATRACE_ASYNC_END("HEIC capture", frameNumber);
return OK; return OK;
} }
void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) { void HeicCompositeStream::releaseInputFrameLocked(int64_t frameNumber,
InputFrame *inputFrame /*out*/) {
if (inputFrame == nullptr) { if (inputFrame == nullptr) {
return; return;
} }
@ -1138,9 +1213,9 @@ void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/
inputFrame->codecInputBuffers.erase(it); inputFrame->codecInputBuffers.erase(it);
} }
if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) { if (inputFrame->error || mErrorState) {
notifyError(inputFrame->frameNumber); ALOGV("%s: notifyError called for frameNumber %" PRId64, __FUNCTION__, frameNumber);
inputFrame->errorNotified = true; notifyError(frameNumber, inputFrame->requestId);
} }
if (inputFrame->fileFd >= 0) { if (inputFrame->fileFd >= 0) {
@ -1152,6 +1227,8 @@ void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/
sp<ANativeWindow> outputANW = mOutputSurface; sp<ANativeWindow> outputANW = mOutputSurface;
outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1); outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
inputFrame->anb = nullptr; inputFrame->anb = nullptr;
mDequeuedOutputBufferCnt--;
} }
} }
@ -1161,8 +1238,8 @@ void HeicCompositeStream::releaseInputFramesLocked() {
while (it != mPendingInputFrames.end()) { while (it != mPendingInputFrames.end()) {
auto& inputFrame = it->second; auto& inputFrame = it->second;
if (inputFrame.error || if (inputFrame.error ||
(inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) { (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0)) {
releaseInputFrameLocked(&inputFrame); releaseInputFrameLocked(it->first, &inputFrame);
it = mPendingInputFrames.erase(it); it = mPendingInputFrames.erase(it);
inputFrameDone = true; inputFrameDone = true;
} else { } else {
@ -1179,6 +1256,8 @@ void HeicCompositeStream::releaseInputFramesLocked() {
auto firstPendingFrame = mPendingInputFrames.begin(); auto firstPendingFrame = mPendingInputFrames.begin();
if (firstPendingFrame != mPendingInputFrames.end()) { if (firstPendingFrame != mPendingInputFrames.end()) {
updateCodecQualityLocked(firstPendingFrame->second.quality); updateCodecQualityLocked(firstPendingFrame->second.quality);
} else {
markTrackerIdle();
} }
} }
} }
@ -1397,20 +1476,6 @@ size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
return expectedSize; return expectedSize;
} }
int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
for (const auto& fn : mFrameNumberMap) {
if (timeInUs == ns2us(fn.second)) {
return fn.second;
}
}
for (const auto& inputFrame : mPendingInputFrames) {
if (timeInUs == ns2us(inputFrame.first)) {
return inputFrame.first;
}
}
return -1;
}
status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer, status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer, const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height) { size_t top, size_t left, size_t width, size_t height) {
@ -1584,7 +1649,7 @@ void HeicCompositeStream::updateCodecQualityLocked(int32_t quality) {
} }
bool HeicCompositeStream::threadLoop() { bool HeicCompositeStream::threadLoop() {
int64_t currentTs = INT64_MAX; int64_t frameNumber = -1;
bool newInputAvailable = false; bool newInputAvailable = false;
{ {
@ -1600,19 +1665,25 @@ bool HeicCompositeStream::threadLoop() {
while (!newInputAvailable) { while (!newInputAvailable) {
compilePendingInputLocked(); compilePendingInputLocked();
newInputAvailable = getNextReadyInputLocked(&currentTs); newInputAvailable = getNextReadyInputLocked(&frameNumber);
if (!newInputAvailable) { if (!newInputAvailable) {
auto failingFrameNumber = getNextFailingInputLocked(&currentTs); auto failingFrameNumber = getNextFailingInputLocked();
if (failingFrameNumber >= 0) { if (failingFrameNumber >= 0) {
// We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is releaseInputFrameLocked(failingFrameNumber,
// possible for two internal stream buffers to fail. In such scenario the &mPendingInputFrames[failingFrameNumber]);
// composite stream should notify the client about a stream buffer error only
// once and this information is kept within 'errorNotified'. // It's okay to remove the entry from mPendingInputFrames
// Any present failed input frames will be removed on a subsequent call to // because:
// 'releaseInputFramesLocked()'. // 1. Only one internal stream (main input) is critical in
releaseInputFrameLocked(&mPendingInputFrames[currentTs]); // backing the output stream.
currentTs = INT64_MAX; // 2. If captureResult/appSegment arrives after the entry is
// removed, they are simply skipped.
mPendingInputFrames.erase(failingFrameNumber);
if (mPendingInputFrames.size() == 0) {
markTrackerIdle();
}
return true;
} }
auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration); auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
@ -1627,12 +1698,13 @@ bool HeicCompositeStream::threadLoop() {
} }
} }
auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]); auto res = processInputFrame(frameNumber, mPendingInputFrames[frameNumber]);
Mutex::Autolock l(mMutex); Mutex::Autolock l(mMutex);
if (res != OK) { if (res != OK) {
ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ", frameNumber: %"
__FUNCTION__, currentTs, strerror(-res), res); PRId64 ": %s (%d)", __FUNCTION__, mPendingInputFrames[frameNumber].timestamp,
mPendingInputFrames[currentTs].error = true; frameNumber, strerror(-res), res);
mPendingInputFrames[frameNumber].error = true;
} }
releaseInputFramesLocked(); releaseInputFramesLocked();
@ -1640,14 +1712,26 @@ bool HeicCompositeStream::threadLoop() {
return true; return true;
} }
void HeicCompositeStream::flagAnExifErrorFrameNumber(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
mExifErrorFrameNumbers.emplace(frameNumber);
mInputReadyCondition.signal();
}
bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) { bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
bool res = false; bool res = false;
int64_t frameNumber = resultExtras.frameNumber;
// Buffer errors concerning internal composite streams should not be directly visible to // Buffer errors concerning internal composite streams should not be directly visible to
// camera clients. They must only receive a single buffer error with the public composite // camera clients. They must only receive a single buffer error with the public composite
// stream id. // stream id.
if ((resultExtras.errorStreamId == mAppSegmentStreamId) || if (resultExtras.errorStreamId == mAppSegmentStreamId) {
(resultExtras.errorStreamId == mMainImageStreamId)) { ALOGV("%s: APP_SEGMENT frameNumber: %" PRId64, __FUNCTION__, frameNumber);
flagAnErrorFrameNumber(resultExtras.frameNumber); flagAnExifErrorFrameNumber(frameNumber);
res = true;
} else if (resultExtras.errorStreamId == mMainImageStreamId) {
ALOGV("%s: YUV frameNumber: %" PRId64, __FUNCTION__, frameNumber);
flagAnErrorFrameNumber(frameNumber);
res = true; res = true;
} }
@ -1660,16 +1744,16 @@ void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras)
Mutex::Autolock l(mMutex); Mutex::Autolock l(mMutex);
int64_t timestamp = -1; int64_t timestamp = -1;
for (const auto& fn : mFrameNumberMap) { for (const auto& fn : mSettingsByFrameNumber) {
if (fn.first == resultExtras.frameNumber) { if (fn.first == resultExtras.frameNumber) {
timestamp = fn.second; timestamp = fn.second.timestamp;
break; break;
} }
} }
if (timestamp == -1) { if (timestamp == -1) {
for (const auto& inputFrame : mPendingInputFrames) { for (const auto& inputFrame : mPendingInputFrames) {
if (inputFrame.second.frameNumber == resultExtras.frameNumber) { if (inputFrame.first == resultExtras.frameNumber) {
timestamp = inputFrame.first; timestamp = inputFrame.second.timestamp;
break; break;
} }
} }
@ -1681,9 +1765,33 @@ void HeicCompositeStream::onResultError(const CaptureResultExtras& resultExtras)
} }
mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata())); mCaptureResults.emplace(timestamp, std::make_tuple(resultExtras.frameNumber, CameraMetadata()));
ALOGV("%s: timestamp %" PRId64 ", frameNumber %" PRId64, __FUNCTION__,
timestamp, resultExtras.frameNumber);
mInputReadyCondition.signal(); mInputReadyCondition.signal();
} }
void HeicCompositeStream::onRequestError(const CaptureResultExtras& resultExtras) {
auto frameNumber = resultExtras.frameNumber;
ALOGV("%s: frameNumber: %" PRId64, __FUNCTION__, frameNumber);
Mutex::Autolock l(mMutex);
auto numRequests = mSettingsByFrameNumber.erase(frameNumber);
if (numRequests == 0) {
// Pending request has been populated into mPendingInputFrames
mErrorFrameNumbers.emplace(frameNumber);
mInputReadyCondition.signal();
} else {
// REQUEST_ERROR was received without onShutter.
}
}
void HeicCompositeStream::markTrackerIdle() {
sp<StatusTracker> statusTracker = mStatusTracker.promote();
if (statusTracker != nullptr) {
statusTracker->markComponentIdle(mStatusId, Fence::NO_FENCE);
ALOGV("%s: Mark component as idle", __FUNCTION__);
}
}
void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) { void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
sp<HeicCompositeStream> parent = mParent.promote(); sp<HeicCompositeStream> parent = mParent.promote();
if (parent == nullptr) return; if (parent == nullptr) return;

@ -37,7 +37,7 @@ namespace camera3 {
class HeicCompositeStream : public CompositeStream, public Thread, class HeicCompositeStream : public CompositeStream, public Thread,
public CpuConsumer::FrameAvailableListener { public CpuConsumer::FrameAvailableListener {
public: public:
HeicCompositeStream(wp<CameraDeviceBase> device, HeicCompositeStream(sp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb); wp<hardware::camera2::ICameraDeviceCallbacks> cb);
~HeicCompositeStream() override; ~HeicCompositeStream() override;
@ -81,6 +81,7 @@ protected:
bool threadLoop() override; bool threadLoop() override;
bool onStreamBufferError(const CaptureResultExtras& resultExtras) override; bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
void onResultError(const CaptureResultExtras& resultExtras) override; void onResultError(const CaptureResultExtras& resultExtras) override;
void onRequestError(const CaptureResultExtras& resultExtras) override;
private: private:
// //
@ -156,9 +157,10 @@ private:
CpuConsumer::LockedBuffer yuvBuffer; CpuConsumer::LockedBuffer yuvBuffer;
std::vector<CodecInputBufferInfo> codecInputBuffers; std::vector<CodecInputBufferInfo> codecInputBuffers;
bool error; bool error; // Main input image buffer error
bool errorNotified; bool exifError; // Exif/APP_SEGMENT buffer error
int64_t frameNumber; int64_t timestamp;
int32_t requestId;
sp<AMessage> format; sp<AMessage> format;
sp<MediaMuxer> muxer; sp<MediaMuxer> muxer;
@ -172,30 +174,29 @@ private:
size_t codecInputCounter; size_t codecInputCounter;
InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false), InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
errorNotified(false), frameNumber(-1), fenceFd(-1), fileFd(-1), exifError(false), timestamp(-1), requestId(-1), fenceFd(-1),
trackIndex(-1), anb(nullptr), appSegmentWritten(false), fileFd(-1), trackIndex(-1), anb(nullptr), appSegmentWritten(false),
pendingOutputTiles(0), codecInputCounter(0) { } pendingOutputTiles(0), codecInputCounter(0) { }
}; };
void compilePendingInputLocked(); void compilePendingInputLocked();
// Find first complete and valid frame with smallest timestamp // Find first complete and valid frame with smallest frame number
bool getNextReadyInputLocked(int64_t *currentTs /*out*/); bool getNextReadyInputLocked(int64_t *frameNumber /*out*/);
// Find next failing frame number with smallest timestamp and return respective frame number // Find next failing frame number with smallest frame number and return respective frame number
int64_t getNextFailingInputLocked(int64_t *currentTs /*out*/); int64_t getNextFailingInputLocked();
status_t processInputFrame(nsecs_t timestamp, InputFrame &inputFrame); status_t processInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCodecInputFrame(InputFrame &inputFrame); status_t processCodecInputFrame(InputFrame &inputFrame);
status_t startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame); status_t startMuxerForInputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processAppSegment(nsecs_t timestamp, InputFrame &inputFrame); status_t processAppSegment(int64_t frameNumber, InputFrame &inputFrame);
status_t processOneCodecOutputFrame(nsecs_t timestamp, InputFrame &inputFrame); status_t processOneCodecOutputFrame(int64_t frameNumber, InputFrame &inputFrame);
status_t processCompletedInputFrame(nsecs_t timestamp, InputFrame &inputFrame); status_t processCompletedInputFrame(int64_t frameNumber, InputFrame &inputFrame);
void releaseInputFrameLocked(InputFrame *inputFrame /*out*/); void releaseInputFrameLocked(int64_t frameNumber, InputFrame *inputFrame /*out*/);
void releaseInputFramesLocked(); void releaseInputFramesLocked();
size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize, size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize,
size_t* app1SegmentSize); size_t* app1SegmentSize);
int64_t findTimestampInNsLocked(int64_t timeInUs);
status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer, status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer, const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height); size_t top, size_t left, size_t width, size_t height);
@ -218,12 +219,14 @@ private:
sp<CpuConsumer> mAppSegmentConsumer; sp<CpuConsumer> mAppSegmentConsumer;
sp<Surface> mAppSegmentSurface; sp<Surface> mAppSegmentSurface;
size_t mAppSegmentMaxSize; size_t mAppSegmentMaxSize;
std::queue<int64_t> mAppSegmentFrameNumbers;
CameraMetadata mStaticInfo; CameraMetadata mStaticInfo;
int mMainImageStreamId, mMainImageSurfaceId; int mMainImageStreamId, mMainImageSurfaceId;
sp<Surface> mMainImageSurface; sp<Surface> mMainImageSurface;
sp<CpuConsumer> mMainImageConsumer; // Only applicable for HEVC codec. sp<CpuConsumer> mMainImageConsumer; // Only applicable for HEVC codec.
bool mYuvBufferAcquired; // Only applicable to HEVC codec bool mYuvBufferAcquired; // Only applicable to HEVC codec
std::queue<int64_t> mMainImageFrameNumbers;
static const int32_t kMaxOutputSurfaceProducerCount = 1; static const int32_t kMaxOutputSurfaceProducerCount = 1;
sp<Surface> mOutputSurface; sp<Surface> mOutputSurface;
@ -231,9 +234,22 @@ private:
int32_t mDequeuedOutputBufferCnt; int32_t mDequeuedOutputBufferCnt;
// Map from frame number to JPEG setting of orientation+quality // Map from frame number to JPEG setting of orientation+quality
std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByFrameNumber; struct HeicSettings {
// Map from timestamp to JPEG setting of orientation+quality int32_t orientation;
std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByTimestamp; int32_t quality;
int64_t timestamp;
int32_t requestId;
bool shutterNotified;
HeicSettings() : orientation(0), quality(95), timestamp(0),
requestId(-1), shutterNotified(false) {}
HeicSettings(int32_t _orientation, int32_t _quality) :
orientation(_orientation),
quality(_quality), timestamp(0),
requestId(-1), shutterNotified(false) {}
};
std::map<int64_t, HeicSettings> mSettingsByFrameNumber;
// Keep all incoming APP segment Blob buffer pending further processing. // Keep all incoming APP segment Blob buffer pending further processing.
std::vector<int64_t> mInputAppSegmentBuffers; std::vector<int64_t> mInputAppSegmentBuffers;
@ -241,7 +257,7 @@ private:
// Keep all incoming HEIC blob buffer pending further processing. // Keep all incoming HEIC blob buffer pending further processing.
std::vector<CodecOutputBufferInfo> mCodecOutputBuffers; std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
std::queue<int64_t> mCodecOutputBufferTimestamps; std::queue<int64_t> mCodecOutputBufferFrameNumbers;
size_t mCodecOutputCounter; size_t mCodecOutputCounter;
int32_t mQuality; int32_t mQuality;
@ -253,11 +269,19 @@ private:
// Artificial strictly incremental YUV grid timestamp to make encoder happy. // Artificial strictly incremental YUV grid timestamp to make encoder happy.
int64_t mGridTimestampUs; int64_t mGridTimestampUs;
// In most common use case, entries are accessed in order. // Indexed by frame number. In most common use case, entries are accessed in order.
std::map<int64_t, InputFrame> mPendingInputFrames; std::map<int64_t, InputFrame> mPendingInputFrames;
// Function pointer of libyuv row copy. // Function pointer of libyuv row copy.
void (*mFnCopyRow)(const uint8_t* src, uint8_t* dst, int width); void (*mFnCopyRow)(const uint8_t* src, uint8_t* dst, int width);
// A set of APP_SEGMENT error frame numbers
std::set<int64_t> mExifErrorFrameNumbers;
void flagAnExifErrorFrameNumber(int64_t frameNumber);
// The status id for tracking the active/idle status of this composite stream
int mStatusId;
void markTrackerIdle();
}; };
}; // namespace camera3 }; // namespace camera3

@ -33,6 +33,7 @@
#include "camera/CaptureResult.h" #include "camera/CaptureResult.h"
#include "gui/IGraphicBufferProducer.h" #include "gui/IGraphicBufferProducer.h"
#include "device3/Camera3StreamInterface.h" #include "device3/Camera3StreamInterface.h"
#include "device3/StatusTracker.h"
#include "binder/Status.h" #include "binder/Status.h"
#include "FrameProducer.h" #include "FrameProducer.h"
@ -362,6 +363,10 @@ class CameraDeviceBase : public virtual FrameProducer {
virtual status_t setRotateAndCropAutoBehavior( virtual status_t setRotateAndCropAutoBehavior(
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0; camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue) = 0;
/**
* Get the status tracker of the camera device
*/
virtual wp<camera3::StatusTracker> getStatusTracker() = 0;
}; };
}; // namespace android }; // namespace android

@ -231,6 +231,9 @@ class Camera3Device :
status_t setRotateAndCropAutoBehavior( status_t setRotateAndCropAutoBehavior(
camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue); camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropValue);
// Get the status trackeer for the camera device
wp<camera3::StatusTracker> getStatusTracker() { return mStatusTracker; }
/** /**
* Helper functions to map between framework and HIDL values * Helper functions to map between framework and HIDL values
*/ */

@ -269,8 +269,6 @@ status_t Camera3IOStreamBase::returnAnyBufferLocked(
} }
} }
mBufferReturnedSignal.signal();
if (output) { if (output) {
mLastTimestamp = timestamp; mLastTimestamp = timestamp;
} }

@ -55,7 +55,6 @@ class Camera3IOStreamBase :
// number of output buffers that are currently acquired by HAL. This will be // number of output buffers that are currently acquired by HAL. This will be
// Redundant when camera3 streams are no longer bidirectional streams. // Redundant when camera3 streams are no longer bidirectional streams.
size_t mHandoutOutputBufferCount; size_t mHandoutOutputBufferCount;
Condition mBufferReturnedSignal;
uint32_t mFrameCount; uint32_t mFrameCount;
// Last received output buffer's timestamp // Last received output buffer's timestamp
nsecs_t mLastTimestamp; nsecs_t mLastTimestamp;

@ -814,6 +814,8 @@ void Camera3Stream::fireBufferListenersLocked(
info.mError = (buffer.status == CAMERA3_BUFFER_STATUS_ERROR); info.mError = (buffer.status == CAMERA3_BUFFER_STATUS_ERROR);
info.mFrameNumber = frameNumber; info.mFrameNumber = frameNumber;
info.mTimestamp = timestamp; info.mTimestamp = timestamp;
info.mStreamId = getId();
// TODO: rest of fields // TODO: rest of fields
for (it = mBufferListenerList.begin(), end = mBufferListenerList.end(); for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();

@ -29,6 +29,7 @@ class Camera3StreamBufferListener : public virtual RefBase {
public: public:
struct BufferInfo { struct BufferInfo {
int mStreamId;
bool mOutput; // if false then input buffer bool mOutput; // if false then input buffer
Rect mCrop; Rect mCrop;
uint32_t mTransform; uint32_t mTransform;

Loading…
Cancel
Save