Merge pi-dr1-dev to aosp-master

Change-Id: I5d9258cdbcf160849cc159d017fd97ec5e6211ce
gugelfrei
Bill Yi 6 years ago
commit efd99ec863

@ -247,8 +247,9 @@ MediaBufferBase* MidiEngine::readBuffer() {
EAS_I32 numRendered;
EAS_RESULT result = EAS_Render(mEasData, p, mEasConfig->mixBufferSize, &numRendered);
if (result != EAS_SUCCESS) {
ALOGE("EAS_Render returned %ld", result);
break;
ALOGE("EAS_Render() returned %ld, numBytesOutput = %d", result, numBytesOutput);
buffer->release();
return NULL; // Stop processing to prevent infinite loops.
}
p += numRendered * mEasConfig->numChannels;
numBytesOutput += numRendered * mEasConfig->numChannels * sizeof(EAS_PCM);

@ -83,6 +83,7 @@ public:
* successful only when validation is successful.
*/
static constexpr char const * const xmlFiles[] = {
"odm/etc/media_profiles_V1_0.xml",
"vendor/etc/media_profiles_V1_0.xml",
"system/etc/media_profiles.xml"
};

@ -301,10 +301,13 @@ status_t FrameDecoder::extractInternal() {
err = mSource->read(&mediaBuffer, &mReadOptions);
mReadOptions.clearSeekTo();
if (err != OK) {
ALOGW("Input Error or EOS");
mHaveMoreInputs = false;
if (!mFirstSample && err == ERROR_END_OF_STREAM) {
(void)mDecoder->queueInputBuffer(
index, 0, 0, 0, MediaCodec::BUFFER_FLAG_EOS);
err = OK;
} else {
ALOGW("Input Error: err=%d", err);
}
break;
}

@ -860,7 +860,15 @@ static CodecBase *CreateCCodec() {
}
//static
sp<CodecBase> MediaCodec::GetCodecBase(const AString &name) {
sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
if (owner) {
if (strncmp(owner, "default", 8) == 0) {
return new ACodec;
} else if (strncmp(owner, "codec2", 7) == 0) {
return CreateCCodec();
}
}
if (name.startsWithIgnoreCase("c2.")) {
return CreateCCodec();
} else if (name.startsWithIgnoreCase("omx.")) {
@ -884,11 +892,6 @@ status_t MediaCodec::init(const AString &name) {
// we need to invest in an extra looper to free the main event
// queue.
mCodec = GetCodecBase(name);
if (mCodec == NULL) {
return NAME_NOT_FOUND;
}
mCodecInfo.clear();
bool secureCodec = false;
@ -922,6 +925,11 @@ status_t MediaCodec::init(const AString &name) {
return NAME_NOT_FOUND;
}
mCodec = GetCodecBase(name, mCodecInfo->getOwnerName());
if (mCodec == NULL) {
return NAME_NOT_FOUND;
}
if (mIsVideo) {
// video codec needs dedicated looper
if (mCodecLooper == NULL) {

File diff suppressed because it is too large Load Diff

@ -35,59 +35,49 @@
#define MAX_MEM_ALLOCS 100
extern "C" IA_ERRORCODE ixheaacd_dec_api(pVOID p_ia_module_obj,
WORD32 i_cmd, WORD32 i_idx, pVOID pv_value);
extern "C" IA_ERRORCODE ia_drc_dec_api(pVOID p_ia_module_obj,
WORD32 i_cmd, WORD32 i_idx, pVOID pv_value);
extern "C" IA_ERRORCODE ixheaacd_get_config_param(pVOID p_ia_process_api_obj,
pWORD32 pi_samp_freq,
pWORD32 pi_num_chan,
pWORD32 pi_pcm_wd_sz,
pWORD32 pi_channel_mask);
extern "C" IA_ERRORCODE ixheaacd_dec_api(pVOID p_ia_module_obj, WORD32 i_cmd, WORD32 i_idx,
pVOID pv_value);
extern "C" IA_ERRORCODE ia_drc_dec_api(pVOID p_ia_module_obj, WORD32 i_cmd, WORD32 i_idx,
pVOID pv_value);
extern "C" IA_ERRORCODE ixheaacd_get_config_param(pVOID p_ia_process_api_obj, pWORD32 pi_samp_freq,
pWORD32 pi_num_chan, pWORD32 pi_pcm_wd_sz,
pWORD32 pi_channel_mask);
namespace android {
struct SoftXAAC : public SimpleSoftOMXComponent {
SoftXAAC(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component);
SoftXAAC(const char* name, const OMX_CALLBACKTYPE* callbacks, OMX_PTR appData,
OMX_COMPONENTTYPE** component);
protected:
protected:
virtual ~SoftXAAC();
virtual OMX_ERRORTYPE internalGetParameter(
OMX_INDEXTYPE index, OMX_PTR params);
virtual OMX_ERRORTYPE internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params);
virtual OMX_ERRORTYPE internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR params);
virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
virtual void onQueueFilled(OMX_U32 portIndex);
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
virtual void onReset();
private:
private:
enum {
kNumInputBuffers = 4,
kNumOutputBuffers = 4,
kNumDelayBlocksMax = 8,
kNumInputBuffers = 4,
kNumOutputBuffers = 4,
kNumDelayBlocksMax = 8,
};
bool mIsADTS;
size_t mInputBufferCount;
size_t mOutputBufferCount;
bool mSignalledError;
OMX_BUFFERHEADERTYPE *mLastInHeader;
OMX_BUFFERHEADERTYPE* mLastInHeader;
int64_t mPrevTimestamp;
int64_t mCurrentTimestamp;
uint32_t mBufSize;
enum {
NONE,
AWAITING_DISABLED,
AWAITING_ENABLED
} mOutputPortSettingsChange;
enum { NONE, AWAITING_DISABLED, AWAITING_ENABLED } mOutputPortSettingsChange;
void initPorts();
status_t initDecoder();
@ -98,48 +88,43 @@ private:
int configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength);
int configMPEGDDrc();
int decodeXAACStream(uint8_t* inBuffer,
uint32_t inBufferLength,
int32_t *bytesConsumed,
int32_t *outBytes);
int decodeXAACStream(uint8_t* inBuffer, uint32_t inBufferLength, int32_t* bytesConsumed,
int32_t* outBytes);
int configflushDecode();
IA_ERRORCODE getXAACStreamInfo();
IA_ERRORCODE setXAACDRCInfo(int32_t drcCut,
int32_t drcBoost,
int32_t drcRefLevel,
IA_ERRORCODE setXAACDRCInfo(int32_t drcCut, int32_t drcBoost, int32_t drcRefLevel,
int32_t drcHeavyCompression
#ifdef ENABLE_MPEG_D_DRC
,int32_t drEffectType
,
int32_t drEffectType
#endif
);
);
bool mEndOfInput;
bool mEndOfOutput;
void* mXheaacCodecHandle;
void* mMpegDDrcHandle;
uint32_t mInputBufferSize;
uint32_t mOutputFrameLength;
int8_t* mInputBuffer;
int8_t* mOutputBuffer;
int32_t mSampFreq;
int32_t mNumChannels;
int32_t mPcmWdSz;
int32_t mChannelMask;
bool mIsCodecInitialized;
bool mIsCodecConfigFlushRequired;
int8_t *mDrcInBuf;
int8_t *mDrcOutBuf;
void* mXheaacCodecHandle;
void* mMpegDDrcHandle;
uint32_t mInputBufferSize;
uint32_t mOutputFrameLength;
int8_t* mInputBuffer;
int8_t* mOutputBuffer;
int32_t mSampFreq;
int32_t mNumChannels;
int32_t mPcmWdSz;
int32_t mChannelMask;
bool mIsCodecInitialized;
bool mIsCodecConfigFlushRequired;
int8_t* mDrcInBuf;
int8_t* mDrcOutBuf;
int32_t mMpegDDRCPresent;
int32_t mDRCFlag;
void* mMemoryArray[MAX_MEM_ALLOCS];
int32_t mMallocCount;
void* mMemoryArray[MAX_MEM_ALLOCS];
int32_t mMallocCount;
DISALLOW_EVIL_CONSTRUCTORS(SoftXAAC);
};
} // namespace android

@ -234,7 +234,11 @@ bool M3UParser::MediaGroup::getActiveURI(AString *uri, const char *baseURL) cons
if (mSelectedIndex >= 0 && i == (size_t)mSelectedIndex) {
const Media &item = mMediaItems.itemAt(i);
*uri = item.makeURL(baseURL);
if (item.mURI.empty()) {
*uri = "";
} else {
*uri = item.makeURL(baseURL);
}
return true;
}
}
@ -465,7 +469,7 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const {
}
if ((*uri).empty()) {
*uri = mItems.itemAt(index).mURI;
*uri = mItems.itemAt(index).makeURL(mBaseURI.c_str());
}
}

@ -377,7 +377,7 @@ private:
MediaCodec(const sp<ALooper> &looper, pid_t pid, uid_t uid);
static sp<CodecBase> GetCodecBase(const AString &name);
static sp<CodecBase> GetCodecBase(const AString &name, const char *owner = nullptr);
static status_t PostAndAwaitResponse(
const sp<AMessage> &msg, sp<AMessage> *response);

@ -1121,7 +1121,8 @@ status_t OMXNodeInstance::useBuffer(
}
case OMXBuffer::kBufferTypeANWBuffer: {
if (mPortMode[portIndex] != IOMX::kPortModePresetANWBuffer) {
if (mPortMode[portIndex] != IOMX::kPortModePresetANWBuffer
&& mPortMode[portIndex] != IOMX::kPortModeDynamicANWBuffer) {
break;
}
return useGraphicBuffer_l(portIndex, omxBuffer.mGraphicBuffer, buffer);
@ -1655,12 +1656,15 @@ status_t OMXNodeInstance::freeBuffer(
}
BufferMeta *buffer_meta = static_cast<BufferMeta *>(header->pAppPrivate);
// Invalidate buffers in the client side first before calling OMX_FreeBuffer.
// If not, pending events in the client side might access the buffers after free.
invalidateBufferID(buffer);
OMX_ERRORTYPE err = OMX_FreeBuffer(mHandle, portIndex, header);
CLOG_IF_ERROR(freeBuffer, err, "%s:%u %#x", portString(portIndex), portIndex, buffer);
delete buffer_meta;
buffer_meta = NULL;
invalidateBufferID(buffer);
return StatusFromOMXError(err);
}

@ -4877,11 +4877,15 @@ audio_devices_t AudioPolicyManager::getNewOutputDevice(const sp<AudioOutputDescr
// use device for strategy DTMF
// 9: the strategy for beacon, a.k.a. "transmitted through speaker" is active on the output:
// use device for strategy t-t-s
// FIXME: extend use of isStrategyActiveOnSameModule() to all strategies
// with a refined rule considering mutually exclusive devices (using same backend)
// as opposed to all streams on the same audio HAL module.
if (isStrategyActive(outputDesc, STRATEGY_ENFORCED_AUDIBLE) &&
mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) {
device = getDeviceForStrategy(STRATEGY_ENFORCED_AUDIBLE, fromCache);
} else if (isInCall() ||
isStrategyActive(outputDesc, STRATEGY_PHONE)) {
isStrategyActiveOnSameModule(outputDesc, STRATEGY_PHONE)) {
device = getDeviceForStrategy(STRATEGY_PHONE, fromCache);
} else if (isStrategyActive(outputDesc, STRATEGY_SONIFICATION)) {
device = getDeviceForStrategy(STRATEGY_SONIFICATION, fromCache);
@ -5889,6 +5893,20 @@ bool AudioPolicyManager::isStrategyActive(const sp<AudioOutputDescriptor>& outpu
return false;
}
bool AudioPolicyManager::isStrategyActiveOnSameModule(const sp<AudioOutputDescriptor>& outputDesc,
routing_strategy strategy, uint32_t inPastMs,
nsecs_t sysTime) const
{
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
if (outputDesc->sharesHwModuleWith(desc)
&& isStrategyActive(desc, strategy, inPastMs, sysTime)) {
return true;
}
}
return false;
}
audio_policy_forced_cfg_t AudioPolicyManager::getForceUse(audio_policy_force_use_t usage)
{
return mEngine->getForceUse(usage);

@ -321,6 +321,10 @@ protected:
bool isStrategyActive(const sp<AudioOutputDescriptor>& outputDesc, routing_strategy strategy,
uint32_t inPastMs = 0, nsecs_t sysTime = 0) const;
bool isStrategyActiveOnSameModule(const sp<AudioOutputDescriptor>& outputDesc,
routing_strategy strategy, uint32_t inPastMs = 0,
nsecs_t sysTime = 0) const;
// change the route of the specified output. Returns the number of ms we have slept to
// allow new routing to take effect in certain cases.
virtual uint32_t setOutputDevice(const sp<AudioOutputDescriptor>& outputDesc,

@ -106,7 +106,7 @@ status_t Camera2Client::initializeImpl(TProviderPtr providerPtr, const String8&
{
SharedParameters::Lock l(mParameters);
res = l.mParameters.initialize(&(mDevice->info()), mDeviceVersion);
res = l.mParameters.initialize(mDevice.get(), mDeviceVersion);
if (res != OK) {
ALOGE("%s: Camera %d: unable to build defaults: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
@ -254,6 +254,7 @@ status_t Camera2Client::dumpClient(int fd, const Vector<String16>& args) {
switch (p.sceneMode) {
case ANDROID_CONTROL_SCENE_MODE_DISABLED:
result.append("AUTO\n"); break;
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT)
CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE)

@ -41,23 +41,29 @@ Parameters::Parameters(int cameraId,
int cameraFacing) :
cameraId(cameraId),
cameraFacing(cameraFacing),
info(NULL) {
info(NULL),
mDefaultSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED) {
}
Parameters::~Parameters() {
}
status_t Parameters::initialize(const CameraMetadata *info, int deviceVersion) {
status_t Parameters::initialize(CameraDeviceBase *device, int deviceVersion) {
status_t res;
if (device == nullptr) {
ALOGE("%s: device is null!", __FUNCTION__);
return BAD_VALUE;
}
if (info->entryCount() == 0) {
const CameraMetadata& info = device->info();
if (info.entryCount() == 0) {
ALOGE("%s: No static information provided!", __FUNCTION__);
return BAD_VALUE;
}
Parameters::info = info;
Parameters::info = &info;
mDeviceVersion = deviceVersion;
res = buildFastInfo();
res = buildFastInfo(device);
if (res != OK) return res;
res = buildQuirks();
@ -557,6 +563,10 @@ status_t Parameters::initialize(const CameraMetadata *info, int deviceVersion) {
noSceneModes = true;
break;
case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
// Face priority can be used as alternate default if supported.
// Per API contract it shouldn't override the user set flash,
// white balance and focus modes.
mDefaultSceneMode = availableSceneModes.data.u8[i];
// Not in old API
addComma = false;
break;
@ -761,17 +771,7 @@ status_t Parameters::initialize(const CameraMetadata *info, int deviceVersion) {
focusingAreas.clear();
focusingAreas.add(Parameters::Area(0,0,0,0,0));
if (fastInfo.isExternalCamera) {
params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, -1.0);
} else {
camera_metadata_ro_entry_t availableFocalLengths =
staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, false);
if (!availableFocalLengths.count) return NO_INIT;
float minFocalLength = availableFocalLengths.data.f[0];
params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, minFocalLength);
}
params.setFloat(CameraParameters::KEY_FOCAL_LENGTH, fastInfo.defaultFocalLength);
float horizFov, vertFov;
res = calculatePictureFovs(&horizFov, &vertFov);
@ -993,7 +993,7 @@ String8 Parameters::get() const {
return paramsFlattened;
}
status_t Parameters::buildFastInfo() {
status_t Parameters::buildFastInfo(CameraDeviceBase *device) {
camera_metadata_ro_entry_t activeArraySize =
staticInfo(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 2, 4);
@ -1109,20 +1109,12 @@ status_t Parameters::buildFastInfo() {
focusDistanceCalibration.data.u8[0] !=
ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED);
camera_metadata_ro_entry_t hwLevel = staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
if (!hwLevel.count) return NO_INIT;
fastInfo.isExternalCamera =
hwLevel.data.u8[0] == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
camera_metadata_ro_entry_t availableFocalLengths =
staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, /*required*/false);
if (!availableFocalLengths.count && !fastInfo.isExternalCamera) return NO_INIT;
res = getDefaultFocalLength(device);
if (res != OK) return res;
SortedVector<int32_t> availableFormats = getAvailableOutputFormats();
if (!availableFormats.size()) return NO_INIT;
if (sceneModeOverrides.count > 0) {
// sceneModeOverrides is defined to have 3 entries for each scene mode,
// which are AE, AWB, and AF override modes the HAL wants for that scene
@ -1200,19 +1192,6 @@ status_t Parameters::buildFastInfo() {
fastInfo.bestFaceDetectMode = bestFaceDetectMode;
fastInfo.maxFaces = maxFaces;
// Find smallest (widest-angle) focal length to use as basis of still
// picture FOV reporting.
if (fastInfo.isExternalCamera) {
fastInfo.minFocalLength = -1.0;
} else {
fastInfo.minFocalLength = availableFocalLengths.data.f[0];
for (size_t i = 1; i < availableFocalLengths.count; i++) {
if (fastInfo.minFocalLength > availableFocalLengths.data.f[i]) {
fastInfo.minFocalLength = availableFocalLengths.data.f[i];
}
}
}
// Check if the HAL supports HAL_PIXEL_FORMAT_YCbCr_420_888
fastInfo.useFlexibleYuv = false;
for (size_t i = 0; i < availableFormats.size(); i++) {
@ -1760,7 +1739,7 @@ status_t Parameters::set(const String8& paramString) {
// SCENE_MODE
validatedParams.sceneMode = sceneModeStringToEnum(
newParams.get(CameraParameters::KEY_SCENE_MODE) );
newParams.get(CameraParameters::KEY_SCENE_MODE), mDefaultSceneMode);
if (validatedParams.sceneMode != sceneMode &&
validatedParams.sceneMode !=
ANDROID_CONTROL_SCENE_MODE_DISABLED) {
@ -1778,7 +1757,7 @@ status_t Parameters::set(const String8& paramString) {
}
}
bool sceneModeSet =
validatedParams.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED;
validatedParams.sceneMode != mDefaultSceneMode;
// FLASH_MODE
if (sceneModeSet) {
@ -2157,7 +2136,7 @@ status_t Parameters::updateRequest(CameraMetadata *request) const {
uint8_t reqSceneMode =
sceneModeActive ? sceneMode :
enableFaceDetect ? (uint8_t)ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
(uint8_t)ANDROID_CONTROL_SCENE_MODE_DISABLED;
mDefaultSceneMode;
res = request->update(ANDROID_CONTROL_SCENE_MODE,
&reqSceneMode, 1);
if (res != OK) return res;
@ -2446,6 +2425,50 @@ bool Parameters::useZeroShutterLag() const {
return true;
}
status_t Parameters::getDefaultFocalLength(CameraDeviceBase *device) {
if (device == nullptr) {
ALOGE("%s: Camera device is nullptr", __FUNCTION__);
return BAD_VALUE;
}
camera_metadata_ro_entry_t hwLevel = staticInfo(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
if (!hwLevel.count) return NO_INIT;
fastInfo.isExternalCamera =
hwLevel.data.u8[0] == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL;
camera_metadata_ro_entry_t availableFocalLengths =
staticInfo(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 0, 0, /*required*/false);
if (!availableFocalLengths.count && !fastInfo.isExternalCamera) return NO_INIT;
// Find focal length in PREVIEW template to use as default focal length.
if (fastInfo.isExternalCamera) {
fastInfo.defaultFocalLength = -1.0;
} else {
// Find smallest (widest-angle) focal length to use as basis of still
// picture FOV reporting.
fastInfo.defaultFocalLength = availableFocalLengths.data.f[0];
for (size_t i = 1; i < availableFocalLengths.count; i++) {
if (fastInfo.defaultFocalLength > availableFocalLengths.data.f[i]) {
fastInfo.defaultFocalLength = availableFocalLengths.data.f[i];
}
}
// Use focal length in preview template if it exists
CameraMetadata previewTemplate;
status_t res = device->createDefaultRequest(CAMERA3_TEMPLATE_PREVIEW, &previewTemplate);
if (res != OK) {
ALOGE("%s: Failed to create default PREVIEW request: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
camera_metadata_entry entry = previewTemplate.find(ANDROID_LENS_FOCAL_LENGTH);
if (entry.count != 0) {
fastInfo.defaultFocalLength = entry.data.f[0];
}
}
return OK;
}
const char* Parameters::getStateName(State state) {
#define CASE_ENUM_TO_CHAR(x) case x: return(#x); break;
switch(state) {
@ -2589,12 +2612,12 @@ int Parameters::abModeStringToEnum(const char *abMode) {
-1;
}
int Parameters::sceneModeStringToEnum(const char *sceneMode) {
int Parameters::sceneModeStringToEnum(const char *sceneMode, uint8_t defaultSceneMode) {
return
!sceneMode ?
ANDROID_CONTROL_SCENE_MODE_DISABLED :
defaultSceneMode :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_AUTO) ?
ANDROID_CONTROL_SCENE_MODE_DISABLED :
defaultSceneMode :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_ACTION) ?
ANDROID_CONTROL_SCENE_MODE_ACTION :
!strcmp(sceneMode, CameraParameters::SCENE_MODE_PORTRAIT) ?
@ -3241,12 +3264,12 @@ status_t Parameters::calculatePictureFovs(float *horizFov, float *vertFov)
if (horizFov != NULL) {
*horizFov = 180 / M_PI * 2 *
atanf(horizCropFactor * sensorSize.data.f[0] /
(2 * fastInfo.minFocalLength));
(2 * fastInfo.defaultFocalLength));
}
if (vertFov != NULL) {
*vertFov = 180 / M_PI * 2 *
atanf(vertCropFactor * sensorSize.data.f[1] /
(2 * fastInfo.minFocalLength));
(2 * fastInfo.defaultFocalLength));
}
return OK;
}

@ -30,6 +30,8 @@
#include <camera/CameraParameters2.h>
#include <camera/CameraMetadata.h>
#include "common/CameraDeviceBase.h"
namespace android {
namespace camera2 {
@ -241,7 +243,7 @@ struct Parameters {
};
DefaultKeyedVector<uint8_t, OverrideModes> sceneModeOverrides;
bool isExternalCamera;
float minFocalLength;
float defaultFocalLength;
bool useFlexibleYuv;
Size maxJpegSize;
Size maxZslSize;
@ -264,10 +266,10 @@ struct Parameters {
~Parameters();
// Sets up default parameters
status_t initialize(const CameraMetadata *info, int deviceVersion);
status_t initialize(CameraDeviceBase *device, int deviceVersion);
// Build fast-access device static info from static info
status_t buildFastInfo();
status_t buildFastInfo(CameraDeviceBase *device);
// Query for quirks from static info
status_t buildQuirks();
@ -300,6 +302,9 @@ struct Parameters {
// whether zero shutter lag should be used for non-recording operation
bool useZeroShutterLag() const;
// Get default focal length
status_t getDefaultFocalLength(CameraDeviceBase *camera);
// Calculate the crop region rectangle, either tightly about the preview
// resolution, or a region just based on the active array; both take
// into account the current zoom level.
@ -326,7 +331,7 @@ struct Parameters {
static const char* wbModeEnumToString(uint8_t wbMode);
static int effectModeStringToEnum(const char *effectMode);
static int abModeStringToEnum(const char *abMode);
static int sceneModeStringToEnum(const char *sceneMode);
static int sceneModeStringToEnum(const char *sceneMode, uint8_t defaultScene);
static flashMode_t flashModeStringToEnum(const char *flashMode);
static const char* flashModeEnumToString(flashMode_t flashMode);
static focusMode_t focusModeStringToEnum(const char *focusMode);
@ -434,6 +439,7 @@ private:
Size getMaxSize(const Vector<Size>& sizes);
int mDeviceVersion;
uint8_t mDefaultSceneMode;
};
// This class encapsulates the Parameters class so that it can only be accessed

@ -500,6 +500,10 @@ void Camera3StreamSplitter::onFrameAvailable(const BufferItem& /*item*/) {
SP_LOGV("acquired buffer %" PRId64 " from input at slot %d",
bufferItem.mGraphicBuffer->getId(), bufferItem.mSlot);
if (bufferItem.mTransformToDisplayInverse) {
bufferItem.mTransform |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
}
// Attach and queue the buffer to each of the outputs
BufferTracker& tracker = *(mBuffers[bufferId]);

Loading…
Cancel
Save