Camera: Add DepthCompositeStream

Add the necessary logic to support dynamic depth metadata.

Bug: 109735087
Test: Manual using application,
Camera CTS
Change-Id: Ic4710872dc596bc718270e1c79d4da53fb850875
gugelfrei
Emilian Peev 6 years ago
parent 4c6d2b5c41
commit 538c90e79d

@ -39,6 +39,8 @@ cc_library_shared {
"api1/client2/CaptureSequencer.cpp",
"api1/client2/ZslProcessor.cpp",
"api2/CameraDeviceClient.cpp",
"api2/CompositeStream.cpp",
"api2/DepthCompositeStream.cpp",
"device1/CameraHardwareInterface.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3Stream.cpp",
@ -65,6 +67,9 @@ cc_library_shared {
],
shared_libs: [
"libimage_io",
"libdynamic_depth",
"libxml2",
"libui",
"liblog",
"libutilscallstack",
@ -108,6 +113,8 @@ cc_library_shared {
"system/media/private/camera/include",
"frameworks/native/include/media/openmax",
"frameworks/av/media/ndk",
"external/dynamic_depth/includes",
"external/dynamic_depth/internal",
],
export_include_dirs: ["."],
@ -116,6 +123,7 @@ cc_library_shared {
"-Wall",
"-Wextra",
"-Werror",
"-Wno-ignored-qualifiers",
],
}

@ -62,6 +62,10 @@ void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
}
}
void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/, int /*streamId*/) {
// Intentionally left empty
}
void JpegProcessor::onBufferAcquired(const BufferInfo& /*bufferInfo*/) {
// Intentionally left empty
}

@ -25,6 +25,7 @@
#include <gui/CpuConsumer.h>
#include "camera/CameraMetadata.h"
#include "device3/Camera3StreamBufferListener.h"
namespace android {
@ -53,12 +54,16 @@ class JpegProcessor:
// Camera3StreamBufferListener implementation
void onBufferAcquired(const BufferInfo& bufferInfo) override;
void onBufferReleased(const BufferInfo& bufferInfo) override;
void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override;
status_t updateStream(const Parameters &params);
status_t deleteStream();
int getStreamId() const;
void dump(int fd, const Vector<String16>& args) const;
static size_t findJpegSize(uint8_t* jpegBuffer, size_t maxSize);
private:
static const nsecs_t kWaitDuration = 10000000; // 10 ms
wp<CameraDeviceBase> mDevice;
@ -82,7 +87,6 @@ class JpegProcessor:
virtual bool threadLoop();
status_t processNewCapture(bool captureSuccess);
size_t findJpegSize(uint8_t* jpegBuffer, size_t maxSize);
};

@ -33,6 +33,8 @@
#include <camera_metadata_hidden.h>
#include "DepthCompositeStream.h"
// Convenience methods for constructing binder::Status objects for error returns
#define STATUS_ERROR(errorCode, errorString) \
@ -143,6 +145,7 @@ binder::Status CameraDeviceClient::submitRequest(
binder::Status CameraDeviceClient::insertGbpLocked(const sp<IGraphicBufferProducer>& gbp,
SurfaceMap* outSurfaceMap, Vector<int32_t>* outputStreamIds, int32_t *currentStreamId) {
int compositeIdx;
int idx = mStreamMap.indexOfKey(IInterface::asBinder(gbp));
// Trying to submit request with surface that wasn't created
@ -152,6 +155,11 @@ binder::Status CameraDeviceClient::insertGbpLocked(const sp<IGraphicBufferProduc
__FUNCTION__, mCameraIdStr.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"Request targets Surface that is not part of current capture session");
} else if ((compositeIdx = mCompositeStreamMap.indexOfKey(IInterface::asBinder(gbp)))
!= NAME_NOT_FOUND) {
mCompositeStreamMap.valueAt(compositeIdx)->insertGbp(outSurfaceMap, outputStreamIds,
currentStreamId);
return binder::Status::ok();
}
const StreamSurfaceId& streamSurfaceId = mStreamMap.valueAt(idx);
@ -489,6 +497,17 @@ binder::Status CameraDeviceClient::endConfigure(int operatingMode,
mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
} else {
for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
err = mCompositeStreamMap.valueAt(i)->configureStream();
if (err != OK ) {
String8 msg = String8::format("Camera %s: Error configuring composite "
"streams: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
break;
}
}
}
return res;
@ -692,8 +711,35 @@ binder::Status CameraDeviceClient::isSessionConfigurationSupported(
return res;
if (!isStreamInfoValid) {
mapStreamInfo(streamInfo, static_cast<camera3_stream_rotation_t> (it.getRotation()),
physicalCameraId, &streamConfiguration.streams[streamIdx++]);
if (camera3::DepthCompositeStream::isDepthCompositeStream(surface)) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
std::vector<OutputStreamInfo> compositeStreams;
ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
mDevice->info(), &compositeStreams);
if (ret != OK) {
String8 msg = String8::format(
"Camera %s: Failed adding depth composite streams: %s (%d)",
mCameraIdStr.string(), strerror(-ret), ret);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
if (compositeStreams.size() > 1) {
streamCount += compositeStreams.size() - 1;
streamConfiguration.streams.resize(streamCount);
}
for (const auto& compositeStream : compositeStreams) {
mapStreamInfo(compositeStream,
static_cast<camera3_stream_rotation_t> (it.getRotation()),
physicalCameraId, &streamConfiguration.streams[streamIdx++]);
}
} else {
mapStreamInfo(streamInfo,
static_cast<camera3_stream_rotation_t> (it.getRotation()),
physicalCameraId, &streamConfiguration.streams[streamIdx++]);
}
isStreamInfoValid = true;
}
}
@ -743,6 +789,7 @@ binder::Status CameraDeviceClient::deleteStream(int streamId) {
bool isInput = false;
std::vector<sp<IBinder>> surfaces;
ssize_t dIndex = NAME_NOT_FOUND;
ssize_t compositeIndex = NAME_NOT_FOUND;
if (mInputStream.configured && mInputStream.id == streamId) {
isInput = true;
@ -762,6 +809,13 @@ binder::Status CameraDeviceClient::deleteStream(int streamId) {
}
}
for (size_t i = 0; i < mCompositeStreamMap.size(); ++i) {
if (streamId == mCompositeStreamMap.valueAt(i)->getStreamId()) {
compositeIndex = i;
break;
}
}
if (surfaces.empty() && dIndex == NAME_NOT_FOUND) {
String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no such"
" stream created yet", mCameraIdStr.string(), streamId);
@ -791,6 +845,19 @@ binder::Status CameraDeviceClient::deleteStream(int streamId) {
if (dIndex != NAME_NOT_FOUND) {
mDeferredStreams.removeItemsAt(dIndex);
}
if (compositeIndex != NAME_NOT_FOUND) {
status_t ret;
if ((ret = mCompositeStreamMap.valueAt(compositeIndex)->deleteStream())
!= OK) {
String8 msg = String8::format("Camera %s: Unexpected error %s (%d) when "
"deleting composite stream %d", mCameraIdStr.string(), strerror(-err), err,
streamId);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
}
mCompositeStreamMap.removeItemsAt(compositeIndex);
}
}
}
@ -870,11 +937,25 @@ binder::Status CameraDeviceClient::createStream(
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
std::vector<int> surfaceIds;
err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format, streamInfo.dataSpace,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
isShared);
if (!camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0])) {
err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format, streamInfo.dataSpace,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
isShared);
} else {
sp<CompositeStream> compositeStream = new camera3::DepthCompositeStream(mDevice,
getRemoteCallback());
err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
isShared);
if (err == OK) {
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
compositeStream);
}
}
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@ -1808,7 +1889,14 @@ void CameraDeviceClient::notifyError(int32_t errorCode,
// Thread safe. Don't bother locking.
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb = getRemoteCallback();
if (remoteCb != 0) {
// Composites can have multiple internal streams. Error notifications coming from such internal
// streams may need to remain within camera service.
bool skipClientNotification = false;
for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
skipClientNotification |= mCompositeStreamMap.valueAt(i)->onError(errorCode, resultExtras);
}
if ((remoteCb != 0) && (!skipClientNotification)) {
remoteCb->onDeviceError(errorCode, resultExtras);
}
}
@ -1901,6 +1989,10 @@ void CameraDeviceClient::onResultAvailable(const CaptureResult& result) {
remoteCb->onResultReceived(result.mMetadata, result.mResultExtras,
result.mPhysicalMetadatas);
}
for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
mCompositeStreamMap.valueAt(i)->onResultAvailable(result);
}
}
binder::Status CameraDeviceClient::checkPidStatus(const char* checkLocation) {

@ -26,8 +26,10 @@
#include "CameraService.h"
#include "common/FrameProcessorBase.h"
#include "common/Camera2ClientBase.h"
#include "CompositeStream.h"
using android::camera3::OutputStreamInfo;
using android::camera3::CompositeStream;
namespace android {
@ -314,6 +316,8 @@ private:
// stream ID -> outputStreamInfo mapping
std::unordered_map<int32_t, OutputStreamInfo> mStreamInfoMap;
KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
static const int32_t MAX_SURFACES_PER_STREAM = 4;
sp<CameraProviderManager> mProviderManager;
};

@ -0,0 +1,203 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "Camera3-CompositeStream"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <utils/Trace.h>
#include "common/CameraDeviceBase.h"
#include "CameraDeviceClient.h"
#include "CompositeStream.h"
namespace android {
namespace camera3 {
CompositeStream::CompositeStream(wp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
mDevice(device),
mRemoteCallback(cb),
mNumPartialResults(1),
mErrorState(false) {
sp<CameraDeviceBase> cameraDevice = device.promote();
if (cameraDevice.get() != nullptr) {
CameraMetadata staticInfo = cameraDevice->info();
camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
if (entry.count > 0) {
mNumPartialResults = entry.data.i32[0];
}
}
}
status_t CompositeStream::createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera3_stream_rotation_t rotation, int * id, const String8& physicalCameraId,
std::vector<int> * surfaceIds, int streamSetId, bool isShared) {
if (hasDeferredConsumer) {
ALOGE("%s: Deferred consumers not supported in case of composite streams!",
__FUNCTION__);
return BAD_VALUE;
}
if (streamSetId != camera3::CAMERA3_STREAM_ID_INVALID) {
ALOGE("%s: Surface groups not supported in case of composite streams!",
__FUNCTION__);
return BAD_VALUE;
}
if (isShared) {
ALOGE("%s: Shared surfaces not supported in case of composite streams!",
__FUNCTION__);
return BAD_VALUE;
}
return createInternalStreams(consumers, hasDeferredConsumer, width, height, format, rotation, id,
physicalCameraId, surfaceIds, streamSetId, isShared);
}
status_t CompositeStream::deleteStream() {
{
Mutex::Autolock l(mMutex);
mPendingCaptureResults.clear();
mCaptureResults.clear();
mFrameNumberMap.clear();
mErrorFrameNumbers.clear();
}
return deleteInternalStreams();
}
void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) {
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
}
}
void CompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
Mutex::Autolock l(mMutex);
if (!mErrorState && !bufferInfo.mError) {
mFrameNumberMap.emplace(bufferInfo.mFrameNumber, bufferInfo.mTimestamp);
mInputReadyCondition.signal();
}
}
void CompositeStream::eraseResult(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
auto it = mPendingCaptureResults.find(frameNumber);
if (it == mPendingCaptureResults.end()) {
return;
}
it = mPendingCaptureResults.erase(it);
}
void CompositeStream::onResultAvailable(const CaptureResult& result) {
bool resultError = false;
{
Mutex::Autolock l(mMutex);
uint64_t frameNumber = result.mResultExtras.frameNumber;
bool resultReady = false;
auto it = mPendingCaptureResults.find(frameNumber);
if (it != mPendingCaptureResults.end()) {
it->second.append(result.mMetadata);
if (result.mResultExtras.partialResultCount >= mNumPartialResults) {
auto entry = it->second.find(ANDROID_SENSOR_TIMESTAMP);
if (entry.count == 1) {
auto ts = entry.data.i64[0];
mCaptureResults.emplace(ts, std::make_tuple(frameNumber, it->second));
resultReady = true;
} else {
ALOGE("%s: Timestamp metadata entry missing for frameNumber: %" PRIu64,
__FUNCTION__, frameNumber);
resultError = true;
}
mPendingCaptureResults.erase(it);
}
}
if (resultReady) {
mInputReadyCondition.signal();
}
}
if (resultError) {
onResultError(result.mResultExtras);
}
}
void CompositeStream::flagAnErrorFrameNumber(int64_t frameNumber) {
Mutex::Autolock l(mMutex);
mErrorFrameNumbers.emplace(frameNumber);
mInputReadyCondition.signal();
}
status_t CompositeStream::registerCompositeStreamListener(int32_t streamId) {
sp<CameraDeviceBase> device = mDevice.promote();
if (device.get() == nullptr) {
return NO_INIT;
}
auto ret = device->addBufferListenerForStream(streamId, this);
if (ret != OK) {
ALOGE("%s: Failed to register composite stream listener!", __FUNCTION__);
}
return ret;
}
bool CompositeStream::onError(int32_t errorCode, const CaptureResultExtras& resultExtras) {
auto ret = false;
switch (errorCode) {
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
onResultError(resultExtras);
break;
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
ret = onStreamBufferError(resultExtras);
break;
case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
// Invalid request, this shouldn't affect composite streams.
break;
default:
ALOGE("%s: Unrecoverable error: %d detected!", __FUNCTION__, errorCode);
Mutex::Autolock l(mMutex);
mErrorState = true;
break;
}
return ret;
}
void CompositeStream::notifyError(int64_t frameNumber) {
sp<hardware::camera2::ICameraDeviceCallbacks> remoteCb =
mRemoteCallback.promote();
if ((frameNumber >= 0) && (remoteCb.get() != nullptr)) {
CaptureResultExtras extras;
extras.errorStreamId = getStreamId();
extras.frameNumber = frameNumber;
remoteCb->onDeviceError(
hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
extras);
}
}
}; // namespace camera3
}; // namespace android

@ -0,0 +1,120 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_COMPOSITE_STREAM_H
#define ANDROID_SERVERS_CAMERA_CAMERA3_COMPOSITE_STREAM_H
#include <set>
#include <unordered_map>
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <camera/CameraMetadata.h>
#include <camera/camera2/OutputConfiguration.h>
#include "common/CameraDeviceBase.h"
#include "device3/Camera3StreamInterface.h"
namespace android {
class CameraDeviceClient;
class CameraMetadata;
class Surface;
namespace camera3 {
class CompositeStream : public camera3::Camera3StreamBufferListener {
public:
CompositeStream(wp<CameraDeviceBase> device, wp<hardware::camera2::ICameraDeviceCallbacks> cb);
virtual ~CompositeStream() {}
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
std::vector<int> *surfaceIds, int streamSetId, bool isShared);
status_t deleteStream();
// Create and register all internal camera streams.
virtual status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
std::vector<int> *surfaceIds, int streamSetId, bool isShared) = 0;
// Release all internal streams and corresponding resources.
virtual status_t deleteInternalStreams() = 0;
// Stream configuration completed.
virtual status_t configureStream() = 0;
// Insert the internal composite stream id in the user capture request.
virtual status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap,
Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) = 0;
// Return composite stream id.
virtual int getStreamId() = 0;
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
// Camera3StreamBufferListener implementation
void onBufferAcquired(const BufferInfo& /*bufferInfo*/) override { /*Empty for now */ }
void onBufferReleased(const BufferInfo& bufferInfo) override;
void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override;
protected:
status_t registerCompositeStreamListener(int32_t streamId);
void eraseResult(int64_t frameNumber);
void flagAnErrorFrameNumber(int64_t frameNumber);
void notifyError(int64_t frameNumber);
// Subclasses should check for buffer errors from internal streams and return 'true' in
// case the error notification should remain within camera service.
virtual bool onStreamBufferError(const CaptureResultExtras& resultExtras) = 0;
// Subclasses can decide how to handle result errors depending on whether or not the
// internal processing needs result data.
virtual void onResultError(const CaptureResultExtras& resultExtras) = 0;
// Device and/or service is in unrecoverable error state.
// Composite streams should behave accordingly.
void enableErrorState();
wp<CameraDeviceBase> mDevice;
wp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
mutable Mutex mMutex;
Condition mInputReadyCondition;
int32_t mNumPartialResults;
bool mErrorState;
// Frame number to capture result map of partial pending request results.
std::unordered_map<uint64_t, CameraMetadata> mPendingCaptureResults;
// Timestamp to capture (frame number, result) map of completed pending request results.
std::unordered_map<int64_t, std::tuple<int64_t, CameraMetadata>> mCaptureResults;
// Frame number to timestamp map
std::unordered_map<int64_t, int64_t> mFrameNumberMap;
// Keeps a set buffer/result frame numbers for any errors detected during processing.
std::set<int64_t> mErrorFrameNumbers;
};
}; //namespace camera3
}; //namespace android
#endif

@ -0,0 +1,148 @@
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_DEPTH_COMPOSITE_STREAM_H
#define ANDROID_SERVERS_CAMERA_CAMERA3_DEPTH_COMPOSITE_STREAM_H
#include <dynamic_depth/imaging_model.h>
#include <dynamic_depth/depth_map.h>
#include <gui/IProducerListener.h>
#include <gui/CpuConsumer.h>
#include "CompositeStream.h"
using dynamic_depth::DepthMap;
using dynamic_depth::Item;
using dynamic_depth::ImagingModel;
namespace android {
class CameraDeviceClient;
class CameraMetadata;
class Surface;
namespace camera3 {
class DepthCompositeStream : public CompositeStream, public Thread,
public CpuConsumer::FrameAvailableListener {
public:
DepthCompositeStream(wp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb);
~DepthCompositeStream() override;
static bool isDepthCompositeStream(const sp<Surface> &surface);
// CompositeStream overrides
status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
status_t deleteInternalStreams() override;
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
int32_t* /*out*/currentStreamId) override;
int getStreamId() override { return mBlobStreamId; }
// CpuConsumer listener implementation
void onFrameAvailable(const BufferItem& item) override;
// Return stream information about the internal camera streams
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
protected:
bool threadLoop() override;
bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
void onResultError(const CaptureResultExtras& resultExtras) override;
private:
struct InputFrame {
CpuConsumer::LockedBuffer depthBuffer;
CpuConsumer::LockedBuffer jpegBuffer;
CameraMetadata result;
bool error;
bool errorNotified;
int64_t frameNumber;
InputFrame() : error(false), errorNotified(false), frameNumber(-1) { }
};
// Helper methods
static void getSupportedDepthSizes(const CameraMetadata& ch,
std::vector<std::tuple<size_t, size_t>>* depthSizes /*out*/);
static status_t getMatchingDepthSize(size_t width, size_t height,
const std::vector<std::tuple<size_t, size_t>>& supporedDepthSizes,
size_t *depthWidth /*out*/, size_t *depthHeight /*out*/);
// Dynamic depth processing
status_t encodeGrayscaleJpeg(size_t width, size_t height, uint8_t *in, void *out,
const size_t maxOutSize, uint8_t jpegQuality, size_t &actualSize);
std::unique_ptr<DepthMap> processDepthMapFrame(const CpuConsumer::LockedBuffer &depthMapBuffer,
size_t maxJpegSize, uint8_t jpegQuality,
std::vector<std::unique_ptr<Item>>* items /*out*/);
std::unique_ptr<ImagingModel> getImagingModel();
status_t processInputFrame(const InputFrame &inputFrame);
// Buffer/Results handling
void compilePendingInputLocked();
void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
void releaseInputFramesLocked(int64_t currentTs);
// Find first complete and valid frame with smallest timestamp
bool getNextReadyInputLocked(int64_t *currentTs /*inout*/);
// Find next failing frame number with smallest timestamp and return respective frame number
int64_t getNextFailingInputLocked(int64_t *currentTs /*inout*/);
static const nsecs_t kWaitDuration = 10000000; // 10 ms
static const auto kDepthMapPixelFormat = HAL_PIXEL_FORMAT_Y16;
static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH;
static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
struct ProducerListener : public BnProducerListener {
// ProducerListener implementation
void onBufferReleased() override { /*No impl. for now*/ };
};
int mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
size_t mBlobWidth, mBlobHeight;
sp<CpuConsumer> mBlobConsumer, mDepthConsumer;
bool mDepthBufferAcquired, mBlobBufferAcquired;
sp<Surface> mDepthSurface, mBlobSurface, mOutputSurface;
sp<ProducerListener> mProducerListener;
ssize_t mMaxJpegSize;
std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
std::vector<float> mInstrinsicCalibration, mLensDistortion;
bool mIsLogicalCamera;
// Keep all incoming Depth buffer timestamps pending further processing.
std::vector<int64_t> mInputDepthBuffers;
// Keep all incoming Jpeg/Blob buffer timestamps pending further processing.
std::vector<int64_t> mInputJpegBuffers;
// Map of all input frames pending further processing.
std::unordered_map<int64_t, InputFrame> mPendingInputFrames;
};
}; //namespace camera3
}; //namespace android
#endif

@ -58,6 +58,8 @@ const std::chrono::system_clock::duration kCameraKeepAliveDelay = 3s;
} // anonymous namespace
const float CameraProviderManager::kDepthARTolerance = .1f;
CameraProviderManager::HardwareServiceInteractionProxy
CameraProviderManager::sHardwareServiceInteractionProxy{};
@ -576,10 +578,7 @@ void CameraProviderManager::ProviderInfo::DeviceInfo3::getSupportedDynamicDepthS
// The dynamic depth spec. does not mention how close the AR ratio should be.
// Try using something appropriate.
float ARTolerance = .01f;
//TODO: Remove this before merging! This is for testing purposes only
ARTolerance = 10.f;
float ARTolerance = kDepthARTolerance;
for (const auto& blobSize : blobSizes) {
float jpegAR = static_cast<float> (std::get<0>(blobSize)) /

@ -265,6 +265,8 @@ public:
bool isLogicalCamera(const std::string& id, std::vector<std::string>* physicalCameraIds);
bool isHiddenPhysicalCamera(const std::string& cameraId);
static const float kDepthARTolerance;
private:
// All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
mutable std::mutex mInterfaceMutex;

@ -3119,10 +3119,12 @@ void Camera3Device::returnOutputBuffers(
status_t res = OK;
if (it != outputSurfaces.end()) {
res = stream->returnBuffer(
outputBuffers[i], timestamp, timestampIncreasing, it->second);
outputBuffers[i], timestamp, timestampIncreasing, it->second,
inResultExtras.frameNumber);
} else {
res = stream->returnBuffer(
outputBuffers[i], timestamp, timestampIncreasing);
outputBuffers[i], timestamp, timestampIncreasing, std::vector<size_t> (),
inResultExtras.frameNumber);
}
// Note: stream may be deallocated at this point, if this buffer was
@ -3139,7 +3141,8 @@ void Camera3Device::returnOutputBuffers(
// cancel the buffer
camera3_stream_buffer_t sb = outputBuffers[i];
sb.status = CAMERA3_BUFFER_STATUS_ERROR;
stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing);
stream->returnBuffer(sb, /*timestamp*/0, timestampIncreasing, std::vector<size_t> (),
inResultExtras.frameNumber);
// notify client buffer error
sp<NotificationListener> listener;
@ -3279,7 +3282,8 @@ void Camera3Device::flushInflightRequests() {
streamBuffer.stream = halStream;
switch (halStream->stream_type) {
case CAMERA3_STREAM_OUTPUT:
res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0);
res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
/*timestampIncreasing*/true, std::vector<size_t> (), frameNumber);
if (res != OK) {
ALOGE("%s: Can't return output buffer for frame %d to"
" stream %d: %s (%d)", __FUNCTION__,
@ -5469,6 +5473,8 @@ status_t Camera3Device::RequestThread::prepareHalRequests() {
return TIMED_OUT;
}
}
outputStream->fireBufferRequestForFrameNumber(
captureRequest->mResultExtras.frameNumber);
String8 physicalCameraId = outputStream->getPhysicalCameraId();
@ -5692,7 +5698,9 @@ void Camera3Device::RequestThread::cleanUpFailedRequests(bool sendRequestError)
outputBuffers->editItemAt(i).acquire_fence = -1;
}
outputBuffers->editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i], 0);
captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i], 0,
/*timestampIncreasing*/true, std::vector<size_t> (),
captureRequest->mResultExtras.frameNumber);
}
if (sendRequestError) {

@ -656,7 +656,7 @@ void Camera3Stream::removeOutstandingBuffer(const camera3_stream_buffer &buffer)
status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
nsecs_t timestamp, bool timestampIncreasing,
const std::vector<size_t>& surface_ids) {
const std::vector<size_t>& surface_ids, uint64_t frameNumber) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
@ -687,7 +687,7 @@ status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
*/
status_t res = returnBufferLocked(b, timestamp, surface_ids);
if (res == OK) {
fireBufferListenersLocked(b, /*acquired*/false, /*output*/true);
fireBufferListenersLocked(b, /*acquired*/false, /*output*/true, timestamp, frameNumber);
}
// Even if returning the buffer failed, we still want to signal whoever is waiting for the
@ -763,8 +763,21 @@ status_t Camera3Stream::getInputBufferProducer(sp<IGraphicBufferProducer> *produ
return getInputBufferProducerLocked(producer);
}
void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
for (auto &it : mBufferListenerList) {
sp<Camera3StreamBufferListener> listener = it.promote();
if (listener.get() != nullptr) {
listener->onBufferRequestForFrameNumber(frameNumber, getId());
}
}
}
void Camera3Stream::fireBufferListenersLocked(
const camera3_stream_buffer& buffer, bool acquired, bool output) {
const camera3_stream_buffer& buffer, bool acquired, bool output, nsecs_t timestamp,
uint64_t frameNumber) {
List<wp<Camera3StreamBufferListener> >::iterator it, end;
// TODO: finish implementing
@ -773,6 +786,8 @@ void Camera3Stream::fireBufferListenersLocked(
Camera3StreamBufferListener::BufferInfo();
info.mOutput = output;
info.mError = (buffer.status == CAMERA3_BUFFER_STATUS_ERROR);
info.mFrameNumber = frameNumber;
info.mTimestamp = timestamp;
// TODO: rest of fields
for (it = mBufferListenerList.begin(), end = mBufferListenerList.end();

@ -332,7 +332,8 @@ class Camera3Stream :
*/
status_t returnBuffer(const camera3_stream_buffer &buffer,
nsecs_t timestamp, bool timestampIncreasing,
const std::vector<size_t>& surface_ids = std::vector<size_t>());
const std::vector<size_t>& surface_ids = std::vector<size_t>(),
uint64_t frameNumber = 0);
/**
* Fill in the camera3_stream_buffer with the next valid buffer for this
@ -430,6 +431,11 @@ class Camera3Stream :
*/
status_t restoreConfiguredState();
/**
* Notify buffer stream listeners about incoming request with particular frame number.
*/
void fireBufferRequestForFrameNumber(uint64_t frameNumber) override;
protected:
const int mId;
/**
@ -538,7 +544,7 @@ class Camera3Stream :
static const nsecs_t kWaitForBufferDuration = 3000000000LL; // 3000 ms
void fireBufferListenersLocked(const camera3_stream_buffer& buffer,
bool acquired, bool output);
bool acquired, bool output, nsecs_t timestamp = 0, uint64_t frameNumber = 0);
List<wp<Camera3StreamBufferListener> > mBufferListenerList;
status_t cancelPrepareLocked();

@ -41,6 +41,8 @@ public:
virtual void onBufferAcquired(const BufferInfo& bufferInfo) = 0;
// Buffer was released by the HAL
virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0;
// Notify about incoming buffer request frame number
virtual void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) = 0;
};
}; //namespace camera3

@ -259,7 +259,8 @@ class Camera3StreamInterface : public virtual RefBase {
*/
virtual status_t returnBuffer(const camera3_stream_buffer &buffer,
nsecs_t timestamp, bool timestampIncreasing = true,
const std::vector<size_t>& surface_ids = std::vector<size_t>()) = 0;
const std::vector<size_t>& surface_ids = std::vector<size_t>(),
uint64_t frameNumber = 0) = 0;
/**
* Fill in the camera3_stream_buffer with the next valid buffer for this
@ -341,6 +342,11 @@ class Camera3StreamInterface : public virtual RefBase {
* Camera3Stream.
*/
virtual void setBufferFreedListener(wp<Camera3StreamBufferFreedListener> listener) = 0;
/**
* Notify buffer stream listeners about incoming request with particular frame number.
*/
virtual void fireBufferRequestForFrameNumber(uint64_t frameNumber) = 0;
};
} // namespace camera3

Loading…
Cancel
Save