Merge changes from topic "offlineProc-framework"

* changes:
  Camera: Various offline processing fixes
  Camera: some patchs for offline processing
gugelfrei
TreeHugger Robot 4 years ago committed by Android (Google) Code Review
commit b6a1bc586c

@ -66,7 +66,7 @@ FrameProcessor::~FrameProcessor() {
} }
bool FrameProcessor::processSingleFrame(CaptureResult &frame, bool FrameProcessor::processSingleFrame(CaptureResult &frame,
const sp<CameraDeviceBase> &device) { const sp<FrameProducer> &device) {
sp<Camera2Client> client = mClient.promote(); sp<Camera2Client> client = mClient.promote();
if (!client.get()) { if (!client.get()) {

@ -24,6 +24,7 @@
#include <utils/List.h> #include <utils/List.h>
#include <camera/CameraMetadata.h> #include <camera/CameraMetadata.h>
#include "common/CameraDeviceBase.h"
#include "common/FrameProcessorBase.h" #include "common/FrameProcessorBase.h"
struct camera_frame_metadata; struct camera_frame_metadata;
@ -54,7 +55,7 @@ class FrameProcessor : public FrameProcessorBase {
void processNewFrames(const sp<Camera2Client> &client); void processNewFrames(const sp<Camera2Client> &client);
virtual bool processSingleFrame(CaptureResult &frame, virtual bool processSingleFrame(CaptureResult &frame,
const sp<CameraDeviceBase> &device); const sp<FrameProducer> &device);
status_t processFaceDetect(const CameraMetadata &frame, status_t processFaceDetect(const CameraMetadata &frame,
const sp<Camera2Client> &client); const sp<Camera2Client> &client);

@ -117,8 +117,8 @@ status_t CameraDeviceClient::initializeImpl(TProviderPtr providerPtr, const Stri
threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string()); threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string());
mFrameProcessor->run(threadName.string()); mFrameProcessor->run(threadName.string());
mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
FRAME_PROCESSOR_LISTENER_MAX_ID, camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
/*listener*/this, /*listener*/this,
/*sendPartials*/true); /*sendPartials*/true);
@ -526,7 +526,8 @@ binder::Status CameraDeviceClient::endConfigure(int operatingMode,
// streams are also supported. // streams are also supported.
std::vector<int> internalStreams; std::vector<int> internalStreams;
mCompositeStreamMap.valueAt(i)->insertCompositeStreamIds(&internalStreams); mCompositeStreamMap.valueAt(i)->insertCompositeStreamIds(&internalStreams);
std::remove_if(offlineStreamIds->begin(), offlineStreamIds->end(), offlineStreamIds->erase(
std::remove_if(offlineStreamIds->begin(), offlineStreamIds->end(),
[&internalStreams] (int streamId) { [&internalStreams] (int streamId) {
auto it = std::find(internalStreams.begin(), internalStreams.end(), auto it = std::find(internalStreams.begin(), internalStreams.end(),
streamId); streamId);
@ -535,8 +536,7 @@ binder::Status CameraDeviceClient::endConfigure(int operatingMode,
return true; return true;
} }
return false; return false;}), offlineStreamIds->end());
});
if (internalStreams.empty()) { if (internalStreams.empty()) {
offlineStreamIds->push_back(mCompositeStreamMap.valueAt(i)->getStreamId()); offlineStreamIds->push_back(mCompositeStreamMap.valueAt(i)->getStreamId());
} }
@ -818,7 +818,7 @@ binder::Status CameraDeviceClient::isSessionConfigurationSupported(
} }
hardware::camera::device::V3_4::StreamConfiguration streamConfiguration; hardware::camera::device::V3_4::StreamConfiguration streamConfiguration;
bool earlyExit = false; bool earlyExit = false;
metadataGetter getMetadata = [this](const String8 &id) {return mDevice->info(id);}; metadataGetter getMetadata = [this](const String8 &id) {return mDevice->infoPhysical(id);};
std::vector<std::string> physicalCameraIds; std::vector<std::string> physicalCameraIds;
mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds); mProviderManager->isLogicalCamera(mCameraIdStr.string(), &physicalCameraIds);
res = convertToHALStreamCombination(sessionConfiguration, mCameraIdStr, res = convertToHALStreamCombination(sessionConfiguration, mCameraIdStr,
@ -1010,7 +1010,7 @@ binder::Status CameraDeviceClient::createStream(
sp<Surface> surface; sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer, res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
mCameraIdStr, mDevice->info(physicalCameraId)); mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
if (!res.isOk()) if (!res.isOk())
return res; return res;
@ -1314,7 +1314,7 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
OutputStreamInfo outInfo; OutputStreamInfo outInfo;
sp<Surface> surface; sp<Surface> surface;
res = createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false, surface, res = createSurfaceFromGbp(outInfo, /*isStreamInfoValid*/ false, surface,
newOutputsMap.valueAt(i), mCameraIdStr, mDevice->info(physicalCameraId)); newOutputsMap.valueAt(i), mCameraIdStr, mDevice->infoPhysical(physicalCameraId));
if (!res.isOk()) if (!res.isOk())
return res; return res;
@ -1897,7 +1897,7 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
sp<Surface> surface; sp<Surface> surface;
res = createSurfaceFromGbp(mStreamInfoMap[streamId], true /*isStreamInfoValid*/, res = createSurfaceFromGbp(mStreamInfoMap[streamId], true /*isStreamInfoValid*/,
surface, bufferProducer, mCameraIdStr, mDevice->info(physicalId)); surface, bufferProducer, mCameraIdStr, mDevice->infoPhysical(physicalId));
if (!res.isOk()) if (!res.isOk())
return res; return res;
@ -2005,7 +2005,8 @@ binder::Status CameraDeviceClient::switchToOffline(
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
} }
std::vector<int32_t> offlineStreamIds(offlineOutputIds.size()); std::vector<int32_t> offlineStreamIds;
offlineStreamIds.reserve(offlineOutputIds.size());
KeyedVector<sp<IBinder>, sp<CompositeStream>> offlineCompositeStreamMap; KeyedVector<sp<IBinder>, sp<CompositeStream>> offlineCompositeStreamMap;
for (const auto& streamId : offlineOutputIds) { for (const auto& streamId : offlineOutputIds) {
ssize_t index = mConfiguredOutputs.indexOfKey(streamId); ssize_t index = mConfiguredOutputs.indexOfKey(streamId);
@ -2206,8 +2207,8 @@ void CameraDeviceClient::detachDevice() {
ALOGV("Camera %s: Stopping processors", mCameraIdStr.string()); ALOGV("Camera %s: Stopping processors", mCameraIdStr.string());
mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, mFrameProcessor->removeListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
FRAME_PROCESSOR_LISTENER_MAX_ID, camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
/*listener*/this); /*listener*/this);
mFrameProcessor->requestExit(); mFrameProcessor->requestExit();
ALOGV("Camera %s: Waiting for threads", mCameraIdStr.string()); ALOGV("Camera %s: Waiting for threads", mCameraIdStr.string());

@ -258,8 +258,6 @@ private:
/** Preview callback related members */ /** Preview callback related members */
sp<camera2::FrameProcessorBase> mFrameProcessor; sp<camera2::FrameProcessorBase> mFrameProcessor;
static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0;
static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL;
std::vector<int32_t> mSupportedPhysicalRequestKeys; std::vector<int32_t> mSupportedPhysicalRequestKeys;

@ -41,6 +41,16 @@ status_t CameraOfflineSessionClient::initialize(sp<CameraProviderManager>, const
return NO_INIT; return NO_INIT;
} }
String8 threadName;
mFrameProcessor = new camera2::FrameProcessorBase(mOfflineSession);
threadName = String8::format("Offline-%s-FrameProc", mCameraIdStr.string());
mFrameProcessor->run(threadName.string());
mFrameProcessor->registerListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
/*listener*/this,
/*sendPartials*/true);
wp<NotificationListener> weakThis(this); wp<NotificationListener> weakThis(this);
res = mOfflineSession->initialize(weakThis); res = mOfflineSession->initialize(weakThis);
if (res != OK) { if (res != OK) {
@ -62,7 +72,7 @@ status_t CameraOfflineSessionClient::dump(int fd, const Vector<String16>& args)
return BasicClient::dump(fd, args); return BasicClient::dump(fd, args);
} }
status_t CameraOfflineSessionClient::dumpClient(int fd, const Vector<String16>& /*args*/) { status_t CameraOfflineSessionClient::dumpClient(int fd, const Vector<String16>& args) {
String8 result; String8 result;
result = " Offline session dump:\n"; result = " Offline session dump:\n";
@ -74,6 +84,8 @@ status_t CameraOfflineSessionClient::dumpClient(int fd, const Vector<String16>&
return NO_ERROR; return NO_ERROR;
} }
mFrameProcessor->dump(fd, args);
auto res = mOfflineSession->dump(fd); auto res = mOfflineSession->dump(fd);
if (res != OK) { if (res != OK) {
result = String8::format(" Error dumping offline session: %s (%d)", result = String8::format(" Error dumping offline session: %s (%d)",
@ -108,6 +120,12 @@ binder::Status CameraOfflineSessionClient::disconnect() {
remote->unlinkToDeath(sCameraService); remote->unlinkToDeath(sCameraService);
} }
mFrameProcessor->removeListener(camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MIN_ID,
camera2::FrameProcessorBase::FRAME_PROCESSOR_LISTENER_MAX_ID,
/*listener*/this);
mFrameProcessor->requestExit();
mFrameProcessor->join();
finishCameraOps(); finishCameraOps();
ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__, ALOGI("%s: Disconnected client for offline camera %s for PID %d", __FUNCTION__,
mCameraIdStr.string(), mClientPid); mCameraIdStr.string(), mClientPid);

@ -101,6 +101,8 @@ private:
sp<CameraOfflineSessionBase> mOfflineSession; sp<CameraOfflineSessionBase> mOfflineSession;
sp<camera2::FrameProcessorBase> mFrameProcessor;
// Offline composite stream map, output surface -> composite stream // Offline composite stream map, output surface -> composite stream
KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap; KeyedVector<sp<IBinder>, sp<CompositeStream>> mCompositeStreamMap;
}; };

@ -34,6 +34,7 @@
#include "gui/IGraphicBufferProducer.h" #include "gui/IGraphicBufferProducer.h"
#include "device3/Camera3StreamInterface.h" #include "device3/Camera3StreamInterface.h"
#include "binder/Status.h" #include "binder/Status.h"
#include "FrameProducer.h"
#include "CameraOfflineSessionBase.h" #include "CameraOfflineSessionBase.h"
@ -48,15 +49,10 @@ typedef std::unordered_map<int, std::vector<size_t> > SurfaceMap;
* Base interface for version >= 2 camera device classes, which interface to * Base interface for version >= 2 camera device classes, which interface to
* camera HAL device versions >= 2. * camera HAL device versions >= 2.
*/ */
class CameraDeviceBase : public virtual RefBase { class CameraDeviceBase : public virtual FrameProducer {
public: public:
virtual ~CameraDeviceBase(); virtual ~CameraDeviceBase();
/**
* The device's camera ID
*/
virtual const String8& getId() const = 0;
/** /**
* The device vendor tag ID * The device vendor tag ID
*/ */
@ -67,14 +63,10 @@ class CameraDeviceBase : public virtual RefBase {
virtual status_t dump(int fd, const Vector<String16> &args) = 0; virtual status_t dump(int fd, const Vector<String16> &args) = 0;
/**
* The device's static characteristics metadata buffer
*/
virtual const CameraMetadata& info() const = 0;
/** /**
* The physical camera device's static characteristics metadata buffer * The physical camera device's static characteristics metadata buffer
*/ */
virtual const CameraMetadata& info(const String8& physicalId) const = 0; virtual const CameraMetadata& infoPhysical(const String8& physicalId) const = 0;
struct PhysicalCameraSettings { struct PhysicalCameraSettings {
std::string cameraId; std::string cameraId;
@ -277,21 +269,6 @@ class CameraDeviceBase : public virtual RefBase {
*/ */
virtual bool willNotify3A() = 0; virtual bool willNotify3A() = 0;
/**
* Wait for a new frame to be produced, with timeout in nanoseconds.
* Returns TIMED_OUT when no frame produced within the specified duration
* May be called concurrently to most methods, except for getNextFrame
*/
virtual status_t waitForNextFrame(nsecs_t timeout) = 0;
/**
* Get next capture result frame from the result queue. Returns NOT_ENOUGH_DATA
* if the queue is empty; caller takes ownership of the metadata buffer inside
* the capture result object's metadata field.
* May be called concurrently to most methods, except for waitForNextFrame.
*/
virtual status_t getNextResult(CaptureResult *frame) = 0;
/** /**
* Trigger auto-focus. The latest ID used in a trigger autofocus or cancel * Trigger auto-focus. The latest ID used in a trigger autofocus or cancel
* autofocus call will be returned by the HAL in all subsequent AF * autofocus call will be returned by the HAL in all subsequent AF

@ -22,6 +22,7 @@
#include <utils/Timers.h> #include <utils/Timers.h>
#include "camera/CaptureResult.h" #include "camera/CaptureResult.h"
#include "FrameProducer.h"
namespace android { namespace android {
@ -54,27 +55,17 @@ class NotificationListener : public virtual RefBase {
virtual ~NotificationListener() {} virtual ~NotificationListener() {}
}; };
class CameraOfflineSessionBase : public virtual RefBase { class CameraOfflineSessionBase : public virtual FrameProducer {
public: public:
virtual ~CameraOfflineSessionBase(); virtual ~CameraOfflineSessionBase();
virtual status_t initialize( virtual status_t initialize(
wp<NotificationListener> listener) = 0; wp<NotificationListener> listener) = 0;
// The session's original camera ID
virtual const String8& getId() const = 0;
virtual status_t disconnect() = 0; virtual status_t disconnect() = 0;
virtual status_t dump(int fd) = 0; virtual status_t dump(int fd) = 0;
/**
* Capture result passing
*/
virtual status_t waitForNextFrame(nsecs_t timeout) = 0;
virtual status_t getNextResult(CaptureResult *frame) = 0;
// TODO: notification passing path // TODO: notification passing path
}; // class CameraOfflineSessionBase }; // class CameraOfflineSessionBase

@ -18,20 +18,21 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA #define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0 //#define LOG_NDEBUG 0
#include <map>
#include <utils/Log.h> #include <utils/Log.h>
#include <utils/Trace.h> #include <utils/Trace.h>
#include "common/FrameProducer.h"
#include "common/FrameProcessorBase.h" #include "common/FrameProcessorBase.h"
#include "common/CameraDeviceBase.h"
namespace android { namespace android {
namespace camera2 { namespace camera2 {
FrameProcessorBase::FrameProcessorBase(wp<CameraDeviceBase> device) : FrameProcessorBase::FrameProcessorBase(wp<FrameProducer> device) :
Thread(/*canCallJava*/false), Thread(/*canCallJava*/false),
mDevice(device), mDevice(device),
mNumPartialResults(1) { mNumPartialResults(1) {
sp<CameraDeviceBase> cameraDevice = device.promote(); sp<FrameProducer> cameraDevice = device.promote();
if (cameraDevice != 0) { if (cameraDevice != 0) {
CameraMetadata staticInfo = cameraDevice->info(); CameraMetadata staticInfo = cameraDevice->info();
camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT); camera_metadata_entry_t entry = staticInfo.find(ANDROID_REQUEST_PARTIAL_RESULT_COUNT);
@ -115,7 +116,7 @@ void FrameProcessorBase::dump(int fd, const Vector<String16>& /*args*/) {
bool FrameProcessorBase::threadLoop() { bool FrameProcessorBase::threadLoop() {
status_t res; status_t res;
sp<CameraDeviceBase> device; sp<FrameProducer> device;
{ {
device = mDevice.promote(); device = mDevice.promote();
if (device == 0) return false; if (device == 0) return false;
@ -132,7 +133,7 @@ bool FrameProcessorBase::threadLoop() {
return true; return true;
} }
void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device) { void FrameProcessorBase::processNewFrames(const sp<FrameProducer> &device) {
status_t res; status_t res;
ATRACE_CALL(); ATRACE_CALL();
CaptureResult result; CaptureResult result;
@ -142,7 +143,7 @@ void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device) {
while ( (res = device->getNextResult(&result)) == OK) { while ( (res = device->getNextResult(&result)) == OK) {
// TODO: instead of getting frame number from metadata, we should read // TODO: instead of getting frame number from metadata, we should read
// this from result.mResultExtras when CameraDeviceBase interface is fixed. // this from result.mResultExtras when FrameProducer interface is fixed.
camera_metadata_entry_t entry; camera_metadata_entry_t entry;
entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT); entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
@ -174,14 +175,14 @@ void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device) {
} }
bool FrameProcessorBase::processSingleFrame(CaptureResult &result, bool FrameProcessorBase::processSingleFrame(CaptureResult &result,
const sp<CameraDeviceBase> &device) { const sp<FrameProducer> &device) {
ALOGV("%s: Camera %s: Process single frame (is empty? %d)", ALOGV("%s: Camera %s: Process single frame (is empty? %d)",
__FUNCTION__, device->getId().string(), result.mMetadata.isEmpty()); __FUNCTION__, device->getId().string(), result.mMetadata.isEmpty());
return processListeners(result, device) == OK; return processListeners(result, device) == OK;
} }
status_t FrameProcessorBase::processListeners(const CaptureResult &result, status_t FrameProcessorBase::processListeners(const CaptureResult &result,
const sp<CameraDeviceBase> &device) { const sp<FrameProducer> &device) {
ATRACE_CALL(); ATRACE_CALL();
camera_metadata_ro_entry_t entry; camera_metadata_ro_entry_t entry;

@ -27,22 +27,25 @@
namespace android { namespace android {
class CameraDeviceBase; class FrameProducer;
namespace camera2 { namespace camera2 {
/* Output frame metadata processing thread. This thread waits for new /* Output frame metadata processing thread. This thread waits for new
* frames from the device, and analyzes them as necessary. * frames from the frame producer, and analyzes them as necessary.
*/ */
class FrameProcessorBase: public Thread { class FrameProcessorBase: public Thread {
public: public:
explicit FrameProcessorBase(wp<CameraDeviceBase> device); explicit FrameProcessorBase(wp<FrameProducer> device);
virtual ~FrameProcessorBase(); virtual ~FrameProcessorBase();
struct FilteredListener: virtual public RefBase { struct FilteredListener: virtual public RefBase {
virtual void onResultAvailable(const CaptureResult &result) = 0; virtual void onResultAvailable(const CaptureResult &result) = 0;
}; };
static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0;
static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL;
// Register a listener for a range of IDs [minId, maxId). Multiple listeners // Register a listener for a range of IDs [minId, maxId). Multiple listeners
// can be listening to the same range. Registering the same listener with // can be listening to the same range. Registering the same listener with
// the same range of IDs has no effect. // the same range of IDs has no effect.
@ -56,7 +59,7 @@ class FrameProcessorBase: public Thread {
void dump(int fd, const Vector<String16>& args); void dump(int fd, const Vector<String16>& args);
protected: protected:
static const nsecs_t kWaitDuration = 10000000; // 10 ms static const nsecs_t kWaitDuration = 10000000; // 10 ms
wp<CameraDeviceBase> mDevice; wp<FrameProducer> mDevice;
virtual bool threadLoop(); virtual bool threadLoop();
@ -74,13 +77,13 @@ class FrameProcessorBase: public Thread {
// Number of partial result the HAL will potentially send. // Number of partial result the HAL will potentially send.
int32_t mNumPartialResults; int32_t mNumPartialResults;
void processNewFrames(const sp<CameraDeviceBase> &device); void processNewFrames(const sp<FrameProducer> &device);
virtual bool processSingleFrame(CaptureResult &result, virtual bool processSingleFrame(CaptureResult &result,
const sp<CameraDeviceBase> &device); const sp<FrameProducer> &device);
status_t processListeners(const CaptureResult &result, status_t processListeners(const CaptureResult &result,
const sp<CameraDeviceBase> &device); const sp<FrameProducer> &device);
CameraMetadata mLastFrame; CameraMetadata mLastFrame;
std::vector<PhysicalCaptureResultInfo> mLastPhysicalFrames; std::vector<PhysicalCaptureResultInfo> mLastPhysicalFrames;

@ -0,0 +1,63 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_SERVERS_CAMERA_FRAMEPRODUCER_H
#define ANDROID_SERVERS_CAMERA_FRAMEPRODUCER_H
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <utils/Timers.h>
#include "camera/CameraMetadata.h"
#include "camera/CaptureResult.h"
namespace android {
/**
* Abstract class for HAL frame producers
*/
class FrameProducer : public virtual RefBase {
public:
/**
* Retrieve the static characteristics metadata buffer
*/
virtual const CameraMetadata& info() const = 0;
/**
* Retrieve the device camera ID
*/
virtual const String8& getId() const = 0;
/**
* Wait for a new frame to be produced, with timeout in nanoseconds.
* Returns TIMED_OUT when no frame produced within the specified duration
* May be called concurrently to most methods, except for getNextFrame
*/
virtual status_t waitForNextFrame(nsecs_t timeout) = 0;
/**
* Get next capture result frame from the result queue. Returns NOT_ENOUGH_DATA
* if the queue is empty; caller takes ownership of the metadata buffer inside
* the capture result object's metadata field.
* May be called concurrently to most methods, except for waitForNextFrame.
*/
virtual status_t getNextResult(CaptureResult *frame) = 0;
}; // class FrameProducer
} // namespace android
#endif

@ -814,7 +814,7 @@ status_t Camera3Device::dump(int fd, const Vector<String16> &args) {
return OK; return OK;
} }
const CameraMetadata& Camera3Device::info(const String8& physicalId) const { const CameraMetadata& Camera3Device::infoPhysical(const String8& physicalId) const {
ALOGVV("%s: E", __FUNCTION__); ALOGVV("%s: E", __FUNCTION__);
if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED || if (CC_UNLIKELY(mStatus == STATUS_UNINITIALIZED ||
mStatus == STATUS_ERROR)) { mStatus == STATUS_ERROR)) {
@ -837,7 +837,7 @@ const CameraMetadata& Camera3Device::info(const String8& physicalId) const {
const CameraMetadata& Camera3Device::info() const { const CameraMetadata& Camera3Device::info() const {
String8 emptyId; String8 emptyId;
return info(emptyId); return infoPhysical(emptyId);
} }
status_t Camera3Device::checkStatusOkToCaptureLocked() { status_t Camera3Device::checkStatusOkToCaptureLocked() {

@ -98,7 +98,7 @@ class Camera3Device :
status_t disconnect() override; status_t disconnect() override;
status_t dump(int fd, const Vector<String16> &args) override; status_t dump(int fd, const Vector<String16> &args) override;
const CameraMetadata& info() const override; const CameraMetadata& info() const override;
const CameraMetadata& info(const String8& physicalId) const override; const CameraMetadata& infoPhysical(const String8& physicalId) const override;
// Capture and setStreamingRequest will configure streams if currently in // Capture and setStreamingRequest will configure streams if currently in
// idle state // idle state

@ -167,7 +167,9 @@ status_t Camera3OfflineSession::disconnectImpl() {
streams.push_back(mInputStream); streams.push_back(mInputStream);
} }
mSession->close(); if (mSession != nullptr) {
mSession->close();
}
FlushInflightReqStates states { FlushInflightReqStates states {
mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager, mId, mOfflineReqsLock, mOfflineReqs, mUseHalBufManager,
@ -461,4 +463,8 @@ std::vector<sp<Camera3StreamInterface>> Camera3OfflineSession::getAllStreams() {
return ret; return ret;
} }
const CameraMetadata& Camera3OfflineSession::info() const {
return mDeviceInfo;
}
}; // namespace android }; // namespace android

@ -142,13 +142,14 @@ class Camera3OfflineSession :
/** /**
* CameraOfflineSessionBase interface * CameraOfflineSessionBase interface
*/ */
const String8& getId() const override;
status_t disconnect() override; status_t disconnect() override;
status_t dump(int fd) override; status_t dump(int fd) override;
// methods for capture result passing /**
* FrameProducer interface
*/
const String8& getId() const override;
const CameraMetadata& info() const override;
status_t waitForNextFrame(nsecs_t timeout) override; status_t waitForNextFrame(nsecs_t timeout) override;
status_t getNextResult(CaptureResult *frame) override; status_t getNextResult(CaptureResult *frame) override;

@ -40,7 +40,8 @@ class ZoomRatioMapper : private CoordinateMapper {
bool supportNativeZoomRatio, bool usePrecorrectArray); bool supportNativeZoomRatio, bool usePrecorrectArray);
ZoomRatioMapper(const ZoomRatioMapper& other) : ZoomRatioMapper(const ZoomRatioMapper& other) :
mHalSupportsZoomRatio(other.mHalSupportsZoomRatio), mHalSupportsZoomRatio(other.mHalSupportsZoomRatio),
mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight) {} mArrayWidth(other.mArrayWidth), mArrayHeight(other.mArrayHeight),
mIsValid(other.mIsValid) {}
/** /**
* Initialize request template with valid zoomRatio if necessary. * Initialize request template with valid zoomRatio if necessary.

Loading…
Cancel
Save