Camera: Add HEIC support

- Derive HEIC capabilities from camera HAL and media framework.
- Add HeicCompositeStream to encode camera buffers to HEIC buffers.
- Add ExifUtils to overwrite JPEG APP segments and send to media codec.
- Add NDK enums and corresponding format support.

Test: Camera CTS
Bug: 79465976
Change-Id: I0a885e76335f3eba4be0fd42241edb0b7349f284
gugelfrei
Shuzhen Wang 6 years ago
parent 627cb60a9f
commit 68ac7ada1f

@ -36,6 +36,8 @@ ACameraMetadata::ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYP
filterDurations(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
filterDurations(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
filterDurations(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS);
filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS);
}
// TODO: filter request/result keys
}
@ -174,6 +176,16 @@ ACameraMetadata::filterDurations(uint32_t tag) {
filteredDurations.push_back(duration);
}
break;
case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
if (format == HAL_PIXEL_FORMAT_BLOB) {
format = AIMAGE_FORMAT_HEIC;
filteredDurations.push_back(format);
filteredDurations.push_back(width);
filteredDurations.push_back(height);
filteredDurations.push_back(duration);
}
break;
default:
// Should not reach here
ALOGE("%s: Unkown tag 0x%x", __FUNCTION__, tag);
@ -247,6 +259,31 @@ ACameraMetadata::filterStreamConfigurations() {
filteredDepthStreamConfigs.push_back(isInput);
}
mData.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, filteredDepthStreamConfigs);
entry = mData.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
Vector<int32_t> filteredHeicStreamConfigs;
filteredHeicStreamConfigs.setCapacity(entry.count);
for (size_t i=0; i < entry.count; i += STREAM_CONFIGURATION_SIZE) {
int32_t format = entry.data.i32[i + STREAM_FORMAT_OFFSET];
int32_t width = entry.data.i32[i + STREAM_WIDTH_OFFSET];
int32_t height = entry.data.i32[i + STREAM_HEIGHT_OFFSET];
int32_t isInput = entry.data.i32[i + STREAM_IS_INPUT_OFFSET];
if (isInput == ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT) {
// Hide input streams
continue;
}
// Translate HAL formats to NDK format
if (format == HAL_PIXEL_FORMAT_BLOB) {
format = AIMAGE_FORMAT_HEIC;
}
filteredHeicStreamConfigs.push_back(format);
filteredHeicStreamConfigs.push_back(width);
filteredHeicStreamConfigs.push_back(height);
filteredHeicStreamConfigs.push_back(isInput);
}
mData.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
}
bool
@ -485,6 +522,8 @@ std::unordered_set<uint32_t> ACameraMetadata::sSystemTags ({
ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION,
ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
ANDROID_HEIC_INFO_SUPPORTED,
ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT,
});
/*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~

@ -71,6 +71,8 @@ typedef enum acamera_metadata_section {
ACAMERA_DEPTH,
ACAMERA_LOGICAL_MULTI_CAMERA,
ACAMERA_DISTORTION_CORRECTION,
ACAMERA_HEIC,
ACAMERA_HEIC_INFO,
ACAMERA_SECTION_COUNT,
ACAMERA_VENDOR = 0x8000
@ -112,6 +114,8 @@ typedef enum acamera_metadata_section_start {
ACAMERA_DISTORTION_CORRECTION_START
= ACAMERA_DISTORTION_CORRECTION
<< 16,
ACAMERA_HEIC_START = ACAMERA_HEIC << 16,
ACAMERA_HEIC_INFO_START = ACAMERA_HEIC_INFO << 16,
ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16
} acamera_metadata_section_start_t;
@ -1912,6 +1916,7 @@ typedef enum acamera_metadata_tag {
* <li>ACaptureRequest</li>
* </ul></p>
*
* <p>This tag is also used for HEIC image capture.</p>
*/
ACAMERA_JPEG_GPS_COORDINATES = // double[3]
ACAMERA_JPEG_START,
@ -1927,6 +1932,7 @@ typedef enum acamera_metadata_tag {
* <li>ACaptureRequest</li>
* </ul></p>
*
* <p>This tag is also used for HEIC image capture.</p>
*/
ACAMERA_JPEG_GPS_PROCESSING_METHOD = // byte
ACAMERA_JPEG_START + 1,
@ -1942,6 +1948,7 @@ typedef enum acamera_metadata_tag {
* <li>ACaptureRequest</li>
* </ul></p>
*
* <p>This tag is also used for HEIC image capture.</p>
*/
ACAMERA_JPEG_GPS_TIMESTAMP = // int64
ACAMERA_JPEG_START + 2,
@ -1986,6 +1993,10 @@ typedef enum acamera_metadata_tag {
* </code></pre>
* <p>For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
* also be set to EXTERNAL. The above code is not relevant in such case.</p>
* <p>This tag is also used to describe the orientation of the HEIC image capture, in which
* case the rotation is reflected by
* <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>, and not by
* rotating the image data itself.</p>
*
* @see ACAMERA_SENSOR_ORIENTATION
*/
@ -2003,7 +2014,8 @@ typedef enum acamera_metadata_tag {
* <li>ACaptureRequest</li>
* </ul></p>
*
* <p>85-95 is typical usage range.</p>
* <p>85-95 is typical usage range. This tag is also used to describe the quality
* of the HEIC image capture.</p>
*/
ACAMERA_JPEG_QUALITY = // byte
ACAMERA_JPEG_START + 4,
@ -2019,6 +2031,7 @@ typedef enum acamera_metadata_tag {
* <li>ACaptureRequest</li>
* </ul></p>
*
* <p>This tag is also used to describe the quality of the HEIC image capture.</p>
*/
ACAMERA_JPEG_THUMBNAIL_QUALITY = // byte
ACAMERA_JPEG_START + 5,
@ -2055,6 +2068,10 @@ typedef enum acamera_metadata_tag {
* orientation is requested. LEGACY device will always report unrotated thumbnail
* size.</li>
* </ul>
* <p>The tag is also used as thumbnail size for HEIC image format capture, in which case the
* the thumbnail rotation is reflected by
* <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>, and not by
* rotating the thumbnail data itself.</p>
*
* @see ACAMERA_JPEG_ORIENTATION
*/
@ -2088,6 +2105,7 @@ typedef enum acamera_metadata_tag {
* and vice versa.</li>
* <li>All non-<code>(0, 0)</code> sizes will have non-zero widths and heights.</li>
* </ul>
* <p>This list is also used as supported thumbnail sizes for HEIC image format capture.</p>
*
* @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
*/
@ -5757,6 +5775,80 @@ typedef enum acamera_metadata_tag {
ACAMERA_DISTORTION_CORRECTION_START + 1,
ACAMERA_DISTORTION_CORRECTION_END,
/**
* <p>The available HEIC (ISO/IEC 23008-12) stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*
* <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_t)</p>
*
* <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* </ul></p>
*
* <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
* <p>If the camera device supports HEIC image format, it will support identical set of stream
* combinations involving HEIC image format, compared to the combinations involving JPEG
* image format as required by the device's hardware level and capabilities.</p>
* <p>All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats.
* Configuring JPEG and HEIC streams at the same time is not supported.</p>
* <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
* AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
*/
ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS = // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_t)
ACAMERA_HEIC_START,
/**
* <p>This lists the minimum frame duration for each
* format/size combination for HEIC output formats.</p>
*
* <p>Type: int64[4*n]</p>
*
* <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* </ul></p>
*
* <p>This should correspond to the frame duration when only that
* stream is active, with all processing (typically in android.*.mode)
* set to either OFF or FAST.</p>
* <p>When multiple streams are used in a request, the minimum frame
* duration will be max(individual stream min durations).</p>
* <p>See ACAMERA_SENSOR_FRAME_DURATION and
* ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
* calculating the max frame rate.</p>
*
* @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
* @see ACAMERA_SENSOR_FRAME_DURATION
*/
ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS = // int64[4*n]
ACAMERA_HEIC_START + 1,
/**
* <p>This lists the maximum stall duration for each
* output format/size combination for HEIC streams.</p>
*
* <p>Type: int64[4*n]</p>
*
* <p>This tag may appear in:
* <ul>
* <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
* </ul></p>
*
* <p>A stall duration is how much extra time would get added
* to the normal minimum frame duration for a repeating request
* that has streams with non-zero stall.</p>
* <p>This functions similarly to
* ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for HEIC
* streams.</p>
* <p>All HEIC output stream formats may have a nonzero stall
* duration.</p>
*
* @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
*/
ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS = // int64[4*n]
ACAMERA_HEIC_START + 2,
ACAMERA_HEIC_END,
} acamera_metadata_tag_t;
/**
@ -8373,6 +8465,16 @@ typedef enum acamera_metadata_enum_acamera_distortion_correction_mode {
} acamera_metadata_enum_android_distortion_correction_mode_t;
// ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_heic_available_heic_stream_configurations {
ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_OUTPUT = 0,
ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT = 1,
} acamera_metadata_enum_android_heic_available_heic_stream_configurations_t;
#endif /* __ANDROID_API__ >= 24 */
__END_DECLS

@ -69,6 +69,7 @@ AImageReader::isSupportedFormatAndUsage(int32_t format, uint64_t usage) {
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
case AIMAGE_FORMAT_Y8:
case AIMAGE_FORMAT_HEIC:
return true;
case AIMAGE_FORMAT_PRIVATE:
// For private format, cpu usage is prohibited.
@ -96,6 +97,7 @@ AImageReader::getNumPlanesForFormat(int32_t format) {
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
case AIMAGE_FORMAT_Y8:
case AIMAGE_FORMAT_HEIC:
return 1;
case AIMAGE_FORMAT_PRIVATE:
return 0;

@ -526,7 +526,15 @@ enum AIMAGE_FORMATS {
* (in bytes) between adjacent rows.</p>
*
*/
AIMAGE_FORMAT_Y8 = 0x20203859
AIMAGE_FORMAT_Y8 = 0x20203859,
/**
* Compressed HEIC format.
*
* <p>This format defines the HEIC brand of High Efficiency Image File
* Format as described in ISO/IEC 23008-12.</p>
*/
AIMAGE_FORMAT_HEIC = 0x48454946,
};
/**

@ -41,6 +41,8 @@ cc_library_shared {
"api2/CameraDeviceClient.cpp",
"api2/CompositeStream.cpp",
"api2/DepthCompositeStream.cpp",
"api2/HeicEncoderInfoManager.cpp",
"api2/HeicCompositeStream.cpp",
"device1/CameraHardwareInterface.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3Stream.cpp",
@ -62,12 +64,14 @@ cc_library_shared {
"hidl/HidlCameraService.cpp",
"utils/CameraTraces.cpp",
"utils/AutoConditionLock.cpp",
"utils/ExifUtils.cpp",
"utils/TagMonitor.cpp",
"utils/LatencyHistogram.cpp",
],
shared_libs: [
"libdl",
"libexif",
"libui",
"liblog",
"libutilscallstack",
@ -85,8 +89,10 @@ cc_library_shared {
"libhidlbase",
"libhidltransport",
"libjpeg",
"libmedia_omx",
"libmemunreachable",
"libsensorprivacy",
"libstagefright",
"libstagefright_foundation",
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",

@ -62,7 +62,8 @@ void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
}
}
void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/, int /*streamId*/) {
void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/,
int /*streamId*/, const CameraMetadata& /*settings*/) {
// Intentionally left empty
}

@ -54,7 +54,8 @@ class JpegProcessor:
// Camera3StreamBufferListener implementation
void onBufferAcquired(const BufferInfo& bufferInfo) override;
void onBufferReleased(const BufferInfo& bufferInfo) override;
void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override;
void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
const CameraMetadata& settings) override;
status_t updateStream(const Parameters &params);
status_t deleteStream();

@ -34,6 +34,7 @@
#include <camera_metadata_hidden.h>
#include "DepthCompositeStream.h"
#include "HeicCompositeStream.h"
// Convenience methods for constructing binder::Status objects for error returns
@ -711,21 +712,35 @@ binder::Status CameraDeviceClient::isSessionConfigurationSupported(
return res;
if (!isStreamInfoValid) {
if (camera3::DepthCompositeStream::isDepthCompositeStream(surface)) {
bool isDepthCompositeStream =
camera3::DepthCompositeStream::isDepthCompositeStream(surface);
bool isHeicCompositeStream =
camera3::HeicCompositeStream::isHeicCompositeStream(surface);
if (isDepthCompositeStream || isHeicCompositeStream) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
std::vector<OutputStreamInfo> compositeStreams;
ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
if (isDepthCompositeStream) {
ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
mDevice->info(), &compositeStreams);
} else {
ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
mDevice->info(), &compositeStreams);
}
if (ret != OK) {
String8 msg = String8::format(
"Camera %s: Failed adding depth composite streams: %s (%d)",
"Camera %s: Failed adding composite streams: %s (%d)",
mCameraIdStr.string(), strerror(-ret), ret);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
if (compositeStreams.size() > 1) {
if (compositeStreams.size() == 0) {
// No internal streams means composite stream not
// supported.
*status = false;
return binder::Status::ok();
} else if (compositeStreams.size() > 1) {
streamCount += compositeStreams.size() - 1;
streamConfiguration.streams.resize(streamCount);
}
@ -937,15 +952,16 @@ binder::Status CameraDeviceClient::createStream(
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
std::vector<int> surfaceIds;
if (!camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0])) {
err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format, streamInfo.dataSpace,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
isShared);
} else {
sp<CompositeStream> compositeStream = new camera3::DepthCompositeStream(mDevice,
getRemoteCallback());
bool isDepthCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
if (isDepthCompositeStream || isHeicCompisiteStream) {
sp<CompositeStream> compositeStream;
if (isDepthCompositeStream) {
compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
} else {
compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
}
err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
@ -955,6 +971,12 @@ binder::Status CameraDeviceClient::createStream(
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
compositeStream);
}
} else {
err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format, streamInfo.dataSpace,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
isShared);
}
if (err != OK) {
@ -1437,6 +1459,8 @@ bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t heig
camera_metadata_ro_entry streamConfigs =
(dataSpace == HAL_DATASPACE_DEPTH) ?
info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
(dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
int32_t bestWidth = -1;
@ -1930,6 +1954,10 @@ void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
remoteCb->onCaptureStarted(resultExtras, timestamp);
}
Camera2ClientBase::notifyShutter(resultExtras, timestamp);
for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp);
}
}
void CameraDeviceClient::notifyPrepared(int streamId) {

@ -82,7 +82,8 @@ status_t CompositeStream::deleteStream() {
return deleteInternalStreams();
}
void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) {
void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
const CameraMetadata& /*settings*/) {
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());

@ -23,6 +23,7 @@
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <camera/CameraMetadata.h>
#include <camera/camera2/OutputConfiguration.h>
#include <gui/IProducerListener.h>
#include "common/CameraDeviceBase.h"
#include "device3/Camera3StreamInterface.h"
@ -66,15 +67,24 @@ public:
// Return composite stream id.
virtual int getStreamId() = 0;
// Notify when shutter notify is triggered
virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
// Camera3StreamBufferListener implementation
void onBufferAcquired(const BufferInfo& /*bufferInfo*/) override { /*Empty for now */ }
void onBufferReleased(const BufferInfo& bufferInfo) override;
void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override;
void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
const CameraMetadata& settings) override;
protected:
struct ProducerListener : public BnProducerListener {
// ProducerListener impementation
void onBufferReleased() override { /*No impl. for now*/ };
};
status_t registerCompositeStreamListener(int32_t streamId);
void eraseResult(int64_t frameNumber);
void flagAnErrorFrameNumber(int64_t frameNumber);

@ -21,7 +21,6 @@
#include <dynamic_depth/imaging_model.h>
#include <dynamic_depth/depth_map.h>
#include <gui/IProducerListener.h>
#include <gui/CpuConsumer.h>
#include "CompositeStream.h"
@ -116,11 +115,6 @@ private:
static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH;
static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
struct ProducerListener : public BnProducerListener {
// ProducerListener implementation
void onBufferReleased() override { /*No impl. for now*/ };
};
int mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
size_t mBlobWidth, mBlobHeight;
sp<CpuConsumer> mBlobConsumer, mDepthConsumer;

@ -0,0 +1,250 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
#define ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
#include <queue>
#include <gui/IProducerListener.h>
#include <gui/CpuConsumer.h>
#include <media/hardware/VideoAPI.h>
#include <media/MediaCodecBuffer.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaMuxer.h>
#include "CompositeStream.h"
namespace android {
namespace camera3 {
class HeicCompositeStream : public CompositeStream, public Thread,
public CpuConsumer::FrameAvailableListener {
public:
HeicCompositeStream(wp<CameraDeviceBase> device,
wp<hardware::camera2::ICameraDeviceCallbacks> cb);
~HeicCompositeStream() override;
static bool isHeicCompositeStream(const sp<Surface> &surface);
status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
status_t deleteInternalStreams() override;
status_t configureStream() override;
status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
int32_t* /*out*/currentStreamId) override;
void onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
int getStreamId() override { return mMainImageStreamId; }
// Use onShutter to keep track of frame number <-> timestamp mapping.
void onBufferReleased(const BufferInfo& bufferInfo) override;
void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
const CameraMetadata& settings) override;
// CpuConsumer listener implementation
void onFrameAvailable(const BufferItem& item) override;
// Return stream information about the internal camera streams
static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall);
static bool isInMemoryTempFileSupported();
protected:
bool threadLoop() override;
bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
void onResultError(const CaptureResultExtras& /*resultExtras*/) override {}
private:
//
// HEIC/HEVC Codec related structures, utility functions, and callbacks
//
struct CodecOutputBufferInfo {
int32_t index;
int32_t offset;
int32_t size;
int64_t timeUs;
uint32_t flags;
};
struct CodecInputBufferInfo {
int32_t index;
int64_t timeUs;
size_t tileIndex;
};
class CodecCallbackHandler : public AHandler {
public:
explicit CodecCallbackHandler(wp<HeicCompositeStream> parent) {
mParent = parent;
}
virtual void onMessageReceived(const sp<AMessage> &msg);
private:
wp<HeicCompositeStream> mParent;
};
enum {
kWhatCallbackNotify,
};
bool mUseHeic;
sp<MediaCodec> mCodec;
sp<ALooper> mCodecLooper, mCallbackLooper;
sp<CodecCallbackHandler> mCodecCallbackHandler;
sp<AMessage> mAsyncNotify;
sp<AMessage> mFormat;
size_t mNumOutputTiles;
int32_t mOutputWidth, mOutputHeight;
size_t mMaxHeicBufferSize;
int32_t mGridWidth, mGridHeight;
size_t mGridRows, mGridCols;
bool mUseGrid; // Whether to use framework YUV frame tiling.
static const int64_t kNoFrameDropMaxPtsGap = -1000000;
static const int32_t kNoGridOpRate = 30;
static const int32_t kGridOpRate = 120;
void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo);
void onHeicInputFrameAvailable(int32_t index); // Only called for YUV input mode.
void onHeicFormatChanged(sp<AMessage>& newFormat);
void onHeicCodecError();
status_t initializeCodec(uint32_t width, uint32_t height,
const sp<CameraDeviceBase>& cameraDevice);
void deinitCodec();
//
// Composite stream related structures, utility functions and callbacks.
//
struct InputFrame {
int32_t orientation;
int32_t quality;
CpuConsumer::LockedBuffer appSegmentBuffer;
std::vector<CodecOutputBufferInfo> codecOutputBuffers;
// Fields that are only applicable to HEVC tiling.
CpuConsumer::LockedBuffer yuvBuffer;
std::vector<CodecInputBufferInfo> codecInputBuffers;
bool error;
bool errorNotified;
int64_t frameNumber;
sp<MediaMuxer> muxer;
int fenceFd;
int fileFd;
ssize_t trackIndex;
ANativeWindowBuffer *anb;
bool appSegmentWritten;
size_t pendingOutputTiles;
size_t codecInputCounter;
InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
errorNotified(false), frameNumber(-1), fenceFd(-1), fileFd(-1),
trackIndex(-1), anb(nullptr), appSegmentWritten(false),
pendingOutputTiles(0), codecInputCounter(0) { }
};
void compilePendingInputLocked();
// Find first complete and valid frame with smallest timestamp
bool getNextReadyInputLocked(int64_t *currentTs /*out*/);
// Find next failing frame number with smallest timestamp and return respective frame number
int64_t getNextFailingInputLocked(int64_t *currentTs /*out*/);
status_t processInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
status_t processCodecInputFrame(InputFrame &inputFrame);
status_t startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
status_t processAppSegment(nsecs_t timestamp, InputFrame &inputFrame);
status_t processOneCodecOutputFrame(nsecs_t timestamp, InputFrame &inputFrame);
status_t processCompletedInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
void releaseInputFramesLocked(int64_t currentTs);
size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize,
size_t* app1SegmentSize);
int64_t findTimestampInNsLocked(int64_t timeInUs);
status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
const CpuConsumer::LockedBuffer& yuvBuffer,
size_t top, size_t left, size_t width, size_t height);
static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
static const nsecs_t kWaitDuration = 10000000; // 10 ms
static const int32_t kDefaultJpegQuality = 99;
static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
static const android_dataspace kAppSegmentDataSpace =
static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
static const android_dataspace kHeifDataSpace =
static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
int mAppSegmentStreamId, mAppSegmentSurfaceId;
sp<CpuConsumer> mAppSegmentConsumer;
sp<Surface> mAppSegmentSurface;
bool mAppSegmentBufferAcquired;
size_t mAppSegmentMaxSize;
int mMainImageStreamId, mMainImageSurfaceId;
sp<Surface> mMainImageSurface;
sp<CpuConsumer> mMainImageConsumer; // Only applicable for HEVC codec.
bool mYuvBufferAcquired; // Only applicable to HEVC codec
sp<Surface> mOutputSurface;
sp<ProducerListener> mProducerListener;
// Map from frame number to JPEG setting of orientation+quality
std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByFrameNumber;
// Map from timestamp to JPEG setting of orientation+quality
std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByTimestamp;
// Keep all incoming APP segment Blob buffer pending further processing.
std::vector<int64_t> mInputAppSegmentBuffers;
// Keep all incoming HEIC blob buffer pending further processing.
std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
std::queue<int64_t> mCodecOutputBufferTimestamps;
size_t mOutputBufferCounter;
// Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
std::vector<int64_t> mInputYuvBuffers;
// Keep all codec input buffers ready to be filled out (for HEVC YUV tiling only)
std::vector<int32_t> mCodecInputBuffers;
// Artificial strictly incremental YUV grid timestamp to make encoder happy.
int64_t mGridTimestampUs;
// In most common use case, entries are accessed in order.
std::map<int64_t, InputFrame> mPendingInputFrames;
};
}; // namespace camera3
}; // namespace android
#endif //ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H

@ -0,0 +1,294 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define LOG_TAG "HeicEncoderInfoManager"
//#define LOG_NDEBUG 0
#include <cstdint>
#include <regex>
#include <cutils/properties.h>
#include <log/log_main.h>
#include <system/graphics.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <media/stagefright/foundation/ABuffer.h>
#include "HeicEncoderInfoManager.h"
namespace android {
namespace camera3 {
HeicEncoderInfoManager::HeicEncoderInfoManager() :
mIsInited(false),
mMinSizeHeic(0, 0),
mMaxSizeHeic(INT32_MAX, INT32_MAX),
mHasHEVC(false),
mHasHEIC(false),
mDisableGrid(false) {
if (initialize() == OK) {
mIsInited = true;
}
}
HeicEncoderInfoManager::~HeicEncoderInfoManager() {
}
bool HeicEncoderInfoManager::isSizeSupported(int32_t width, int32_t height, bool* useHeic,
bool* useGrid, int64_t* stall) const {
if (useHeic == nullptr || useGrid == nullptr) {
ALOGE("%s: invalid parameters: useHeic %p, useGrid %p",
__FUNCTION__, useHeic, useGrid);
return false;
}
if (!mIsInited) return false;
bool chooseHeic = false, enableGrid = true;
if (mHasHEIC && width >= mMinSizeHeic.first &&
height >= mMinSizeHeic.second && width <= mMaxSizeHeic.first &&
height <= mMaxSizeHeic.second) {
chooseHeic = true;
enableGrid = false;
} else if (mHasHEVC) {
bool fullSizeSupportedByHevc = (width >= mMinSizeHevc.first &&
height >= mMinSizeHevc.second &&
width <= mMaxSizeHevc.first &&
height <= mMaxSizeHevc.second);
if (fullSizeSupportedByHevc && (mDisableGrid ||
(width <= 1920 && height <= 1080))) {
enableGrid = false;
}
} else {
// No encoder available for the requested size.
return false;
}
if (stall != nullptr) {
// Find preferred encoder which advertise
// "measured-frame-rate-WIDTHxHEIGHT-range" key.
const FrameRateMaps& maps =
(chooseHeic && mHeicFrameRateMaps.size() > 0) ?
mHeicFrameRateMaps : mHevcFrameRateMaps;
const auto& closestSize = findClosestSize(maps, width, height);
if (closestSize == maps.end()) {
// The "measured-frame-rate-WIDTHxHEIGHT-range" key is optional.
// Hardcode to some default value (3.33ms * tile count) based on resolution.
*stall = 3333333LL * width * height / (kGridWidth * kGridHeight);
return true;
}
// Derive stall durations based on average fps of the closest size.
constexpr int64_t NSEC_PER_SEC = 1000000000LL;
int32_t avgFps = (closestSize->second.first + closestSize->second.second)/2;
float ratio = 1.0f * width * height /
(closestSize->first.first * closestSize->first.second);
*stall = ratio * NSEC_PER_SEC / avgFps;
}
*useHeic = chooseHeic;
*useGrid = enableGrid;
return true;
}
status_t HeicEncoderInfoManager::initialize() {
mDisableGrid = property_get_bool("camera.heic.disable_grid", false);
sp<IMediaCodecList> codecsList = MediaCodecList::getInstance();
if (codecsList == nullptr) {
// No media codec available.
return OK;
}
sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
sp<AMessage> hevcDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC);
if (hevcDetails == nullptr) {
if (heicDetails != nullptr) {
ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
__FUNCTION__);
return BAD_VALUE;
}
return OK;
}
// Check CQ mode for HEVC codec
{
AString bitrateModes;
auto hasItem = hevcDetails->findString("feature-bitrate-modes", &bitrateModes);
if (!hasItem) {
ALOGE("%s: Failed to query bitrate modes for HEVC codec", __FUNCTION__);
return BAD_VALUE;
}
ALOGV("%s: HEVC codec's feature-bitrate-modes value is %d, %s",
__FUNCTION__, hasItem, bitrateModes.c_str());
std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase);
if (!std::regex_search(bitrateModes.c_str(), pattern)) {
return OK;
}
}
// HEIC size range
if (heicDetails != nullptr) {
auto res = getCodecSizeRange(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
heicDetails, &mMinSizeHeic, &mMaxSizeHeic, &mHeicFrameRateMaps);
if (res != OK) {
ALOGE("%s: Failed to get HEIC codec size range: %s (%d)", __FUNCTION__,
strerror(-res), res);
return BAD_VALUE;
}
mHasHEIC = true;
}
// HEVC size range
{
auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC,
hevcDetails, &mMinSizeHevc, &mMaxSizeHevc, &mHevcFrameRateMaps);
if (res != OK) {
ALOGE("%s: Failed to get HEVC codec size range: %s (%d)", __FUNCTION__,
strerror(-res), res);
return BAD_VALUE;
}
mHasHEVC = true;
}
return OK;
}
status_t HeicEncoderInfoManager::getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps) {
if (details == nullptr || maps == nullptr) {
ALOGE("%s: Invalid input: details: %p, maps: %p", __FUNCTION__, details.get(), maps);
return BAD_VALUE;
}
for (size_t i = 0; i < details->countEntries(); i++) {
AMessage::Type type;
const char* entryName = details->getEntryNameAt(i, &type);
if (type != AMessage::kTypeString) continue;
std::regex frameRateNamePattern("measured-frame-rate-([0-9]+)[*x]([0-9]+)-range",
std::regex_constants::icase);
std::cmatch sizeMatch;
if (std::regex_match(entryName, sizeMatch, frameRateNamePattern) &&
sizeMatch.size() == 3) {
AMessage::ItemData item = details->getEntryAt(i);
AString fpsRangeStr;
if (item.find(&fpsRangeStr)) {
ALOGV("%s: %s", entryName, fpsRangeStr.c_str());
std::regex frameRatePattern("([0-9]+)-([0-9]+)");
std::cmatch fpsMatch;
if (std::regex_match(fpsRangeStr.c_str(), fpsMatch, frameRatePattern) &&
fpsMatch.size() == 3) {
maps->emplace(
std::make_pair(stoi(sizeMatch[1]), stoi(sizeMatch[2])),
std::make_pair(stoi(fpsMatch[1]), stoi(fpsMatch[2])));
} else {
return BAD_VALUE;
}
}
}
}
return OK;
}
status_t HeicEncoderInfoManager::getCodecSizeRange(
const char* codecName,
sp<AMessage> details,
std::pair<int32_t, int32_t>* minSize,
std::pair<int32_t, int32_t>* maxSize,
FrameRateMaps* frameRateMaps) {
if (codecName == nullptr || minSize == nullptr || maxSize == nullptr ||
details == nullptr || frameRateMaps == nullptr) {
return BAD_VALUE;
}
AString sizeRange;
auto hasItem = details->findString("size-range", &sizeRange);
if (!hasItem) {
ALOGE("%s: Failed to query size range for codec %s", __FUNCTION__, codecName);
return BAD_VALUE;
}
ALOGV("%s: %s codec's size range is %s", __FUNCTION__, codecName, sizeRange.c_str());
std::regex pattern("([0-9]+)[*x]([0-9]+)-([0-9]+)[*x]([0-9]+)");
std::cmatch match;
if (std::regex_match(sizeRange.c_str(), match, pattern)) {
if (match.size() == 5) {
minSize->first = stoi(match[1]);
minSize->second = stoi(match[2]);
maxSize->first = stoi(match[3]);
maxSize->second = stoi(match[4]);
if (minSize->first > maxSize->first ||
minSize->second > maxSize->second) {
ALOGE("%s: Invalid %s code size range: %s",
__FUNCTION__, codecName, sizeRange.c_str());
return BAD_VALUE;
}
} else {
return BAD_VALUE;
}
}
auto res = getFrameRateMaps(details, frameRateMaps);
if (res != OK) {
return res;
}
return OK;
}
HeicEncoderInfoManager::FrameRateMaps::const_iterator HeicEncoderInfoManager::findClosestSize(
const FrameRateMaps& maps, int32_t width, int32_t height) const {
int32_t minDiff = INT32_MAX;
FrameRateMaps::const_iterator closestIter = maps.begin();
for (auto iter = maps.begin(); iter != maps.end(); iter++) {
// Use area difference between the sizes to approximate size
// difference.
int32_t diff = abs(iter->first.first * iter->first.second - width * height);
if (diff < minDiff) {
closestIter = iter;
minDiff = diff;
}
}
return closestIter;
}
sp<AMessage> HeicEncoderInfoManager::getCodecDetails(
sp<IMediaCodecList> codecsList, const char* name) {
ssize_t idx = codecsList->findCodecByType(name, true /*encoder*/);
if (idx < 0) {
return nullptr;
}
const sp<MediaCodecInfo> info = codecsList->getCodecInfo(idx);
if (info == nullptr) {
ALOGE("%s: Failed to get codec info for %s", __FUNCTION__, name);
return nullptr;
}
const sp<MediaCodecInfo::Capabilities> caps =
info->getCapabilitiesFor(name);
if (caps == nullptr) {
ALOGE("%s: Failed to get capabilities for codec %s", __FUNCTION__, name);
return nullptr;
}
const sp<AMessage> details = caps->getDetails();
if (details == nullptr) {
ALOGE("%s: Failed to get details for codec %s", __FUNCTION__, name);
return nullptr;
}
return details;
}
} //namespace camera3
} // namespace android

@ -0,0 +1,77 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H
#define ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H
#include <unordered_map>
#include <utility>
#include <utils/Errors.h>
#include <utils/StrongPointer.h>
#include <media/IMediaCodecList.h>
#include <media/stagefright/foundation/AMessage.h>
namespace android {
namespace camera3 {
class HeicEncoderInfoManager {
public:
static HeicEncoderInfoManager& getInstance() {
static HeicEncoderInfoManager instance;
return instance;
}
bool isSizeSupported(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall) const;
static const auto kGridWidth = 512;
static const auto kGridHeight = 512;
private:
struct SizePairHash {
std::size_t operator () (const std::pair<int32_t,int32_t> &p) const {
return p.first * 31 + p.second;
}
};
typedef std::unordered_map<std::pair<int32_t, int32_t>,
std::pair<int32_t, int32_t>, SizePairHash> FrameRateMaps;
HeicEncoderInfoManager();
virtual ~HeicEncoderInfoManager();
status_t initialize();
status_t getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps);
status_t getCodecSizeRange(const char* codecName, sp<AMessage> details,
std::pair<int32_t, int32_t>* minSize, std::pair<int32_t, int32_t>* maxSize,
FrameRateMaps* frameRateMaps);
FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
int32_t width, int32_t height) const;
sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
bool mIsInited;
std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
std::pair<int32_t, int32_t> mMinSizeHevc, mMaxSizeHevc;
bool mHasHEVC, mHasHEIC;
FrameRateMaps mHeicFrameRateMaps, mHevcFrameRateMaps;
bool mDisableGrid;
};
} // namespace camera3
} // namespace android
#endif // ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H

@ -38,6 +38,8 @@
#include <hwbinder/IPCThreadState.h>
#include <utils/Trace.h>
#include "api2/HeicCompositeStream.h"
namespace android {
using namespace ::android::hardware::camera;
@ -874,6 +876,130 @@ status_t CameraProviderManager::ProviderInfo::DeviceInfo3::removeAvailableKeys(
return res;
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fillHeicStreamCombinations(
std::vector<int32_t>* outputs,
std::vector<int64_t>* durations,
std::vector<int64_t>* stallDurations,
const camera_metadata_entry& halStreamConfigs,
const camera_metadata_entry& halStreamDurations) {
if (outputs == nullptr || durations == nullptr || stallDurations == nullptr) {
return BAD_VALUE;
}
static bool supportInMemoryTempFile =
camera3::HeicCompositeStream::isInMemoryTempFileSupported();
if (!supportInMemoryTempFile) {
ALOGI("%s: No HEIC support due to absence of in memory temp file support",
__FUNCTION__);
return OK;
}
for (size_t i = 0; i < halStreamConfigs.count; i += 4) {
int32_t format = halStreamConfigs.data.i32[i];
// Only IMPLEMENTATION_DEFINED and YUV_888 can be used to generate HEIC
// image.
if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
format != HAL_PIXEL_FORMAT_YCBCR_420_888) {
continue;
}
bool sizeAvail = false;
for (size_t j = 0; j < outputs->size(); j+= 4) {
if ((*outputs)[j+1] == halStreamConfigs.data.i32[i+1] &&
(*outputs)[j+2] == halStreamConfigs.data.i32[i+2]) {
sizeAvail = true;
break;
}
}
if (sizeAvail) continue;
int64_t stall = 0;
bool useHeic, useGrid;
if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(
halStreamConfigs.data.i32[i+1], halStreamConfigs.data.i32[i+2],
&useHeic, &useGrid, &stall)) {
if (useGrid != (format == HAL_PIXEL_FORMAT_YCBCR_420_888)) {
continue;
}
// HEIC configuration
int32_t config[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1],
halStreamConfigs.data.i32[i+2], 0 /*isInput*/};
outputs->insert(outputs->end(), config, config + 4);
// HEIC minFrameDuration
for (size_t j = 0; j < halStreamDurations.count; j += 4) {
if (halStreamDurations.data.i64[j] == format &&
halStreamDurations.data.i64[j+1] == halStreamConfigs.data.i32[i+1] &&
halStreamDurations.data.i64[j+2] == halStreamConfigs.data.i32[i+2]) {
int64_t duration[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1],
halStreamConfigs.data.i32[i+2], halStreamDurations.data.i64[j+3]};
durations->insert(durations->end(), duration, duration+4);
break;
}
}
// HEIC stallDuration
int64_t stallDuration[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1],
halStreamConfigs.data.i32[i+2], stall};
stallDurations->insert(stallDurations->end(), stallDuration, stallDuration+4);
}
}
return OK;
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags() {
auto& c = mCameraCharacteristics;
camera_metadata_entry halHeicSupport = c.find(ANDROID_HEIC_INFO_SUPPORTED);
if (halHeicSupport.count > 1) {
ALOGE("%s: Invalid entry count %zu for ANDROID_HEIC_INFO_SUPPORTED",
__FUNCTION__, halHeicSupport.count);
return BAD_VALUE;
} else if (halHeicSupport.count == 0 ||
halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_FALSE) {
// Camera HAL doesn't support mandatory stream combinations for HEIC.
return OK;
}
camera_metadata_entry maxJpegAppsSegments =
c.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
if (maxJpegAppsSegments.count != 1 || maxJpegAppsSegments.data.u8[0] == 0 ||
maxJpegAppsSegments.data.u8[0] > 16) {
ALOGE("%s: ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT must be within [1, 16]",
__FUNCTION__);
return BAD_VALUE;
}
// Populate HEIC output configurations and its related min frame duration
// and stall duration.
std::vector<int32_t> heicOutputs;
std::vector<int64_t> heicDurations;
std::vector<int64_t> heicStallDurations;
camera_metadata_entry halStreamConfigs =
c.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
camera_metadata_entry minFrameDurations =
c.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
status_t res = fillHeicStreamCombinations(&heicOutputs, &heicDurations, &heicStallDurations,
halStreamConfigs, minFrameDurations);
if (res != OK) {
ALOGE("%s: Failed to fill HEIC stream combinations: %s (%d)", __FUNCTION__,
strerror(-res), res);
return res;
}
c.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS,
heicOutputs.data(), heicOutputs.size());
c.update(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS,
heicDurations.data(), heicDurations.size());
c.update(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
heicStallDurations.data(), heicStallDurations.size());
return OK;
}
bool CameraProviderManager::isLogicalCamera(const std::string& id,
std::vector<std::string>* physicalCameraIds) {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
@ -1738,6 +1864,12 @@ CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string&
ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
stat);
}
res = deriveHeicTags();
if (OK != res) {
ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
__FUNCTION__, strerror(-res), res);
}
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&

@ -494,6 +494,12 @@ private:
std::vector<std::tuple<size_t, size_t>> *internalDepthSizes /*out*/);
status_t removeAvailableKeys(CameraMetadata& c, const std::vector<uint32_t>& keys,
uint32_t keyTag);
status_t fillHeicStreamCombinations(std::vector<int32_t>* outputs,
std::vector<int64_t>* durations,
std::vector<int64_t>* stallDurations,
const camera_metadata_entry& halStreamConfigs,
const camera_metadata_entry& halStreamDurations);
status_t deriveHeicTags();
};
private:

@ -1757,16 +1757,18 @@ status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
if (format == HAL_PIXEL_FORMAT_BLOB) {
ssize_t blobBufferSize;
if (dataSpace != HAL_DATASPACE_DEPTH) {
blobBufferSize = getJpegBufferSize(width, height);
if (dataSpace == HAL_DATASPACE_DEPTH) {
blobBufferSize = getPointCloudBufferSize();
if (blobBufferSize <= 0) {
SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize);
SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize);
return BAD_VALUE;
}
} else if (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS)) {
blobBufferSize = width * height;
} else {
blobBufferSize = getPointCloudBufferSize();
blobBufferSize = getJpegBufferSize(width, height);
if (blobBufferSize <= 0) {
SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize);
SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize);
return BAD_VALUE;
}
}
@ -5473,8 +5475,22 @@ status_t Camera3Device::RequestThread::prepareHalRequests() {
return TIMED_OUT;
}
}
outputStream->fireBufferRequestForFrameNumber(
captureRequest->mResultExtras.frameNumber);
{
sp<Camera3Device> parent = mParent.promote();
if (parent != nullptr) {
const String8& streamCameraId = outputStream->getPhysicalCameraId();
for (const auto& settings : captureRequest->mSettingsList) {
if ((streamCameraId.isEmpty() &&
parent->getId() == settings.cameraId.c_str()) ||
streamCameraId == settings.cameraId.c_str()) {
outputStream->fireBufferRequestForFrameNumber(
captureRequest->mResultExtras.frameNumber,
settings.metadata);
}
}
}
}
String8 physicalCameraId = outputStream->getPhysicalCameraId();

@ -763,14 +763,15 @@ status_t Camera3Stream::getInputBufferProducer(sp<IGraphicBufferProducer> *produ
return getInputBufferProducerLocked(producer);
}
void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber) {
void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber,
const CameraMetadata& settings) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
for (auto &it : mBufferListenerList) {
sp<Camera3StreamBufferListener> listener = it.promote();
if (listener.get() != nullptr) {
listener->onBufferRequestForFrameNumber(frameNumber, getId());
listener->onBufferRequestForFrameNumber(frameNumber, getId(), settings);
}
}
}

@ -434,7 +434,8 @@ class Camera3Stream :
/**
* Notify buffer stream listeners about incoming request with particular frame number.
*/
void fireBufferRequestForFrameNumber(uint64_t frameNumber) override;
void fireBufferRequestForFrameNumber(uint64_t frameNumber,
const CameraMetadata& settings) override;
protected:
const int mId;

@ -17,6 +17,7 @@
#ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
#define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
#include <camera/CameraMetadata.h>
#include <gui/Surface.h>
#include <utils/RefBase.h>
@ -42,7 +43,8 @@ public:
// Buffer was released by the HAL
virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0;
// Notify about incoming buffer request frame number
virtual void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) = 0;
virtual void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
const CameraMetadata& settings) = 0;
};
}; //namespace camera3

@ -18,6 +18,8 @@
#define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
#include <utils/RefBase.h>
#include <camera/CameraMetadata.h>
#include "Camera3StreamBufferListener.h"
#include "Camera3StreamBufferFreedListener.h"
@ -346,7 +348,8 @@ class Camera3StreamInterface : public virtual RefBase {
/**
* Notify buffer stream listeners about incoming request with particular frame number.
*/
virtual void fireBufferRequestForFrameNumber(uint64_t frameNumber) = 0;
virtual void fireBufferRequestForFrameNumber(uint64_t frameNumber,
const CameraMetadata& settings) = 0;
};
} // namespace camera3

File diff suppressed because it is too large Load Diff

@ -0,0 +1,245 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ANDROID_SERVERS_CAMERA_EXIF_UTILS_H
#define ANDROID_SERVERS_CAMERA_EXIF_UTILS_H
#include "CameraMetadata.h"
namespace android {
namespace camera3 {
// This is based on the camera HIDL shim implementation, which was in turned
// based on original ChromeOS ARC implementation of a V4L2 HAL
// ExifUtils can override APP1 segment with tags which caller set. ExifUtils can
// also add a thumbnail in the APP1 segment if thumbnail size is specified.
// ExifUtils can be reused with different images by calling initialize().
//
// Example of using this class :
// std::unique_ptr<ExifUtils> utils(ExifUtils::Create());
// utils->initialize(const unsigned char* app1Segment, size_t app1SegmentSize);
// ...
// // Call ExifUtils functions to set Exif tags.
// ...
// utils->GenerateApp1();
// unsigned int app1Length = utils->GetApp1Length();
// uint8_t* app1Buffer = new uint8_t[app1Length];
// memcpy(app1Buffer, utils->GetApp1Buffer(), app1Length);
class ExifUtils {
public:
virtual ~ExifUtils();
static ExifUtils* create();
// Initialize() can be called multiple times. The setting of Exif tags will be
// cleared.
virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize) = 0;
// Set all known fields from a metadata structure
virtual bool setFromMetadata(const CameraMetadata& metadata,
const size_t imageWidth, const size_t imageHeight) = 0;
// Sets the len aperture.
// Returns false if memory allocation fails.
virtual bool setAperture(uint32_t numerator, uint32_t denominator) = 0;
// Sets the value of brightness.
// Returns false if memory allocation fails.
virtual bool setBrightness(int32_t numerator, int32_t denominator) = 0;
// Sets the color space.
// Returns false if memory allocation fails.
virtual bool setColorSpace(uint16_t color_space) = 0;
// Sets the information to compressed data.
// Returns false if memory allocation fails.
virtual bool setComponentsConfiguration(const std::string& components_configuration) = 0;
// Sets the compression scheme used for the image data.
// Returns false if memory allocation fails.
virtual bool setCompression(uint16_t compression) = 0;
// Sets image contrast.
// Returns false if memory allocation fails.
virtual bool setContrast(uint16_t contrast) = 0;
// Sets the date and time of image last modified. It takes local time. The
// name of the tag is DateTime in IFD0.
// Returns false if memory allocation fails.
virtual bool setDateTime(const struct tm& t) = 0;
// Sets the image description.
// Returns false if memory allocation fails.
virtual bool setDescription(const std::string& description) = 0;
// Sets the digital zoom ratio. If the numerator is 0, it means digital zoom
// was not used.
// Returns false if memory allocation fails.
virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) = 0;
// Sets the exposure bias.
// Returns false if memory allocation fails.
virtual bool setExposureBias(int32_t numerator, int32_t denominator) = 0;
// Sets the exposure mode set when the image was shot.
// Returns false if memory allocation fails.
virtual bool setExposureMode(uint16_t exposure_mode) = 0;
// Sets the program used by the camera to set exposure when the picture is
// taken.
// Returns false if memory allocation fails.
virtual bool setExposureProgram(uint16_t exposure_program) = 0;
// Sets the exposure time, given in seconds.
// Returns false if memory allocation fails.
virtual bool setExposureTime(uint32_t numerator, uint32_t denominator) = 0;
// Sets the status of flash.
// Returns false if memory allocation fails.
virtual bool setFlash(uint16_t flash) = 0;
// Sets the F number.
// Returns false if memory allocation fails.
virtual bool setFNumber(uint32_t numerator, uint32_t denominator) = 0;
// Sets the focal length of lens used to take the image in millimeters.
// Returns false if memory allocation fails.
virtual bool setFocalLength(uint32_t numerator, uint32_t denominator) = 0;
// Sets the degree of overall image gain adjustment.
// Returns false if memory allocation fails.
virtual bool setGainControl(uint16_t gain_control) = 0;
// Sets the altitude in meters.
// Returns false if memory allocation fails.
virtual bool setGpsAltitude(double altitude) = 0;
// Sets the latitude with degrees minutes seconds format.
// Returns false if memory allocation fails.
virtual bool setGpsLatitude(double latitude) = 0;
// Sets the longitude with degrees minutes seconds format.
// Returns false if memory allocation fails.
virtual bool setGpsLongitude(double longitude) = 0;
// Sets GPS processing method.
// Returns false if memory allocation fails.
virtual bool setGpsProcessingMethod(const std::string& method) = 0;
// Sets GPS date stamp and time stamp (atomic clock). It takes UTC time.
// Returns false if memory allocation fails.
virtual bool setGpsTimestamp(const struct tm& t) = 0;
// Sets the height (number of rows) of main image.
// Returns false if memory allocation fails.
virtual bool setImageHeight(uint32_t length) = 0;
// Sets the width (number of columns) of main image.
// Returns false if memory allocation fails.
virtual bool setImageWidth(uint32_t width) = 0;
// Sets the ISO speed.
// Returns false if memory allocation fails.
virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings) = 0;
// Sets the kind of light source.
// Returns false if memory allocation fails.
virtual bool setLightSource(uint16_t light_source) = 0;
// Sets the smallest F number of the lens.
// Returns false if memory allocation fails.
virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator) = 0;
// Sets the metering mode.
// Returns false if memory allocation fails.
virtual bool setMeteringMode(uint16_t metering_mode) = 0;
// Sets image orientation.
// Returns false if memory allocation fails.
virtual bool setOrientation(uint16_t orientation) = 0;
// Sets the unit for measuring XResolution and YResolution.
// Returns false if memory allocation fails.
virtual bool setResolutionUnit(uint16_t resolution_unit) = 0;
// Sets image saturation.
// Returns false if memory allocation fails.
virtual bool setSaturation(uint16_t saturation) = 0;
// Sets the type of scene that was shot.
// Returns false if memory allocation fails.
virtual bool setSceneCaptureType(uint16_t type) = 0;
// Sets image sharpness.
// Returns false if memory allocation fails.
virtual bool setSharpness(uint16_t sharpness) = 0;
// Sets the shutter speed.
// Returns false if memory allocation fails.
virtual bool setShutterSpeed(int32_t numerator, int32_t denominator) = 0;
// Sets the distance to the subject, given in meters.
// Returns false if memory allocation fails.
virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator) = 0;
// Sets the fractions of seconds for the <DateTime> tag.
// Returns false if memory allocation fails.
virtual bool setSubsecTime(const std::string& subsec_time) = 0;
// Sets the white balance mode set when the image was shot.
// Returns false if memory allocation fails.
virtual bool setWhiteBalance(uint16_t white_balance) = 0;
// Sets the number of pixels per resolution unit in the image width.
// Returns false if memory allocation fails.
virtual bool setXResolution(uint32_t numerator, uint32_t denominator) = 0;
// Sets the position of chrominance components in relation to the luminance
// component.
// Returns false if memory allocation fails.
virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning) = 0;
// Sets the number of pixels per resolution unit in the image length.
// Returns false if memory allocation fails.
virtual bool setYResolution(uint32_t numerator, uint32_t denominator) = 0;
// Sets the manufacturer of camera.
// Returns false if memory allocation fails.
virtual bool setMake(const std::string& make) = 0;
// Sets the model number of camera.
// Returns false if memory allocation fails.
virtual bool setModel(const std::string& model) = 0;
// Generates APP1 segment.
// Returns false if generating APP1 segment fails.
virtual bool generateApp1() = 0;
// Gets buffer of APP1 segment. This method must be called only after calling
// GenerateAPP1().
virtual const uint8_t* getApp1Buffer() = 0;
// Gets length of APP1 segment. This method must be called only after calling
// GenerateAPP1().
virtual unsigned int getApp1Length() = 0;
};
} // namespace camera3
} // namespace android
#endif // ANDROID_SERVERS_CAMERA_EXIF_UTILS_H
Loading…
Cancel
Save