From 68ac7ada1f7329db65445d05c986899dbb13b745 Mon Sep 17 00:00:00 2001 From: Shuzhen Wang Date: Wed, 30 Jan 2019 14:03:28 -0800 Subject: [PATCH] Camera: Add HEIC support - Derive HEIC capabilities from camera HAL and media framework. - Add HeicCompositeStream to encode camera buffers to HEIC buffers. - Add ExifUtils to overwrite JPEG APP segments and send to media codec. - Add NDK enums and corresponding format support. Test: Camera CTS Bug: 79465976 Change-Id: I0a885e76335f3eba4be0fd42241edb0b7349f284 --- camera/ndk/impl/ACameraMetadata.cpp | 39 + .../include/camera/NdkCameraMetadataTags.h | 104 +- media/ndk/NdkImageReader.cpp | 2 + media/ndk/include/media/NdkImage.h | 10 +- services/camera/libcameraservice/Android.bp | 6 + .../api1/client2/JpegProcessor.cpp | 3 +- .../api1/client2/JpegProcessor.h | 3 +- .../api2/CameraDeviceClient.cpp | 54 +- .../libcameraservice/api2/CompositeStream.cpp | 3 +- .../libcameraservice/api2/CompositeStream.h | 12 +- .../api2/DepthCompositeStream.h | 6 - .../api2/HeicCompositeStream.cpp | 1606 +++++++++++++++++ .../api2/HeicCompositeStream.h | 250 +++ .../api2/HeicEncoderInfoManager.cpp | 294 +++ .../api2/HeicEncoderInfoManager.h | 77 + .../common/CameraProviderManager.cpp | 132 ++ .../common/CameraProviderManager.h | 6 + .../device3/Camera3Device.cpp | 30 +- .../device3/Camera3Stream.cpp | 5 +- .../libcameraservice/device3/Camera3Stream.h | 3 +- .../device3/Camera3StreamBufferListener.h | 4 +- .../device3/Camera3StreamInterface.h | 5 +- .../libcameraservice/utils/ExifUtils.cpp | 1046 +++++++++++ .../camera/libcameraservice/utils/ExifUtils.h | 245 +++ 24 files changed, 3908 insertions(+), 37 deletions(-) create mode 100644 services/camera/libcameraservice/api2/HeicCompositeStream.cpp create mode 100644 services/camera/libcameraservice/api2/HeicCompositeStream.h create mode 100644 services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp create mode 100644 services/camera/libcameraservice/api2/HeicEncoderInfoManager.h create mode 100644 services/camera/libcameraservice/utils/ExifUtils.cpp create mode 100644 services/camera/libcameraservice/utils/ExifUtils.h diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp index 50ad7b28c8..de40990b93 100644 --- a/camera/ndk/impl/ACameraMetadata.cpp +++ b/camera/ndk/impl/ACameraMetadata.cpp @@ -36,6 +36,8 @@ ACameraMetadata::ACameraMetadata(camera_metadata_t* buffer, ACAMERA_METADATA_TYP filterDurations(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS); filterDurations(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS); filterDurations(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); + filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS); + filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS); } // TODO: filter request/result keys } @@ -174,6 +176,16 @@ ACameraMetadata::filterDurations(uint32_t tag) { filteredDurations.push_back(duration); } break; + case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS: + case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS: + if (format == HAL_PIXEL_FORMAT_BLOB) { + format = AIMAGE_FORMAT_HEIC; + filteredDurations.push_back(format); + filteredDurations.push_back(width); + filteredDurations.push_back(height); + filteredDurations.push_back(duration); + } + break; default: // Should not reach here ALOGE("%s: Unkown tag 0x%x", __FUNCTION__, tag); @@ -247,6 +259,31 @@ ACameraMetadata::filterStreamConfigurations() { filteredDepthStreamConfigs.push_back(isInput); } mData.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, filteredDepthStreamConfigs); + + entry = mData.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS); + Vector filteredHeicStreamConfigs; + filteredHeicStreamConfigs.setCapacity(entry.count); + + for (size_t i=0; i < entry.count; i += STREAM_CONFIGURATION_SIZE) { + int32_t format = entry.data.i32[i + STREAM_FORMAT_OFFSET]; + int32_t width = entry.data.i32[i + STREAM_WIDTH_OFFSET]; + int32_t height = entry.data.i32[i + STREAM_HEIGHT_OFFSET]; + int32_t isInput = entry.data.i32[i + STREAM_IS_INPUT_OFFSET]; + if (isInput == ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT) { + // Hide input streams + continue; + } + // Translate HAL formats to NDK format + if (format == HAL_PIXEL_FORMAT_BLOB) { + format = AIMAGE_FORMAT_HEIC; + } + + filteredHeicStreamConfigs.push_back(format); + filteredHeicStreamConfigs.push_back(width); + filteredHeicStreamConfigs.push_back(height); + filteredHeicStreamConfigs.push_back(isInput); + } + mData.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs); } bool @@ -485,6 +522,8 @@ std::unordered_set ACameraMetadata::sSystemTags ({ ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, ANDROID_DEPTH_MAX_DEPTH_SAMPLES, + ANDROID_HEIC_INFO_SUPPORTED, + ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, }); /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h index 69b9e7e5a2..8c19e1d6cb 100644 --- a/camera/ndk/include/camera/NdkCameraMetadataTags.h +++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h @@ -71,6 +71,8 @@ typedef enum acamera_metadata_section { ACAMERA_DEPTH, ACAMERA_LOGICAL_MULTI_CAMERA, ACAMERA_DISTORTION_CORRECTION, + ACAMERA_HEIC, + ACAMERA_HEIC_INFO, ACAMERA_SECTION_COUNT, ACAMERA_VENDOR = 0x8000 @@ -112,6 +114,8 @@ typedef enum acamera_metadata_section_start { ACAMERA_DISTORTION_CORRECTION_START = ACAMERA_DISTORTION_CORRECTION << 16, + ACAMERA_HEIC_START = ACAMERA_HEIC << 16, + ACAMERA_HEIC_INFO_START = ACAMERA_HEIC_INFO << 16, ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16 } acamera_metadata_section_start_t; @@ -1912,6 +1916,7 @@ typedef enum acamera_metadata_tag { *
  • ACaptureRequest
  • *

    * + *

    This tag is also used for HEIC image capture.

    */ ACAMERA_JPEG_GPS_COORDINATES = // double[3] ACAMERA_JPEG_START, @@ -1927,6 +1932,7 @@ typedef enum acamera_metadata_tag { *
  • ACaptureRequest
  • *

    * + *

    This tag is also used for HEIC image capture.

    */ ACAMERA_JPEG_GPS_PROCESSING_METHOD = // byte ACAMERA_JPEG_START + 1, @@ -1942,6 +1948,7 @@ typedef enum acamera_metadata_tag { *
  • ACaptureRequest
  • *

    * + *

    This tag is also used for HEIC image capture.

    */ ACAMERA_JPEG_GPS_TIMESTAMP = // int64 ACAMERA_JPEG_START + 2, @@ -1986,6 +1993,10 @@ typedef enum acamera_metadata_tag { * *

    For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will * also be set to EXTERNAL. The above code is not relevant in such case.

    + *

    This tag is also used to describe the orientation of the HEIC image capture, in which + * case the rotation is reflected by + * EXIF orientation flag, and not by + * rotating the image data itself.

    * * @see ACAMERA_SENSOR_ORIENTATION */ @@ -2003,7 +2014,8 @@ typedef enum acamera_metadata_tag { *
  • ACaptureRequest
  • *

    * - *

    85-95 is typical usage range.

    + *

    85-95 is typical usage range. This tag is also used to describe the quality + * of the HEIC image capture.

    */ ACAMERA_JPEG_QUALITY = // byte ACAMERA_JPEG_START + 4, @@ -2019,6 +2031,7 @@ typedef enum acamera_metadata_tag { *
  • ACaptureRequest
  • *

    * + *

    This tag is also used to describe the quality of the HEIC image capture.

    */ ACAMERA_JPEG_THUMBNAIL_QUALITY = // byte ACAMERA_JPEG_START + 5, @@ -2055,6 +2068,10 @@ typedef enum acamera_metadata_tag { * orientation is requested. LEGACY device will always report unrotated thumbnail * size. * + *

    The tag is also used as thumbnail size for HEIC image format capture, in which case the + * the thumbnail rotation is reflected by + * EXIF orientation flag, and not by + * rotating the thumbnail data itself.

    * * @see ACAMERA_JPEG_ORIENTATION */ @@ -2088,6 +2105,7 @@ typedef enum acamera_metadata_tag { * and vice versa. *
  • All non-(0, 0) sizes will have non-zero widths and heights.
  • * + *

    This list is also used as supported thumbnail sizes for HEIC image format capture.

    * * @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS */ @@ -5757,6 +5775,80 @@ typedef enum acamera_metadata_tag { ACAMERA_DISTORTION_CORRECTION_START + 1, ACAMERA_DISTORTION_CORRECTION_END, + /** + *

    The available HEIC (ISO/IEC 23008-12) stream + * configurations that this camera device supports + * (i.e. format, width, height, output/input stream).

    + * + *

    Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_t)

    + * + *

    This tag may appear in: + *

      + *
    • ACameraMetadata from ACameraManager_getCameraCharacteristics
    • + *

    + * + *

    The configurations are listed as (format, width, height, input?) tuples.

    + *

    If the camera device supports HEIC image format, it will support identical set of stream + * combinations involving HEIC image format, compared to the combinations involving JPEG + * image format as required by the device's hardware level and capabilities.

    + *

    All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats. + * Configuring JPEG and HEIC streams at the same time is not supported.

    + *

    All the configuration tuples (format, width, height, input?) will contain + * AIMAGE_FORMAT_HEIC format as OUTPUT only.

    + */ + ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS = // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_t) + ACAMERA_HEIC_START, + /** + *

    This lists the minimum frame duration for each + * format/size combination for HEIC output formats.

    + * + *

    Type: int64[4*n]

    + * + *

    This tag may appear in: + *

      + *
    • ACameraMetadata from ACameraManager_getCameraCharacteristics
    • + *

    + * + *

    This should correspond to the frame duration when only that + * stream is active, with all processing (typically in android.*.mode) + * set to either OFF or FAST.

    + *

    When multiple streams are used in a request, the minimum frame + * duration will be max(individual stream min durations).

    + *

    See ACAMERA_SENSOR_FRAME_DURATION and + * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about + * calculating the max frame rate.

    + * + * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS + * @see ACAMERA_SENSOR_FRAME_DURATION + */ + ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS = // int64[4*n] + ACAMERA_HEIC_START + 1, + /** + *

    This lists the maximum stall duration for each + * output format/size combination for HEIC streams.

    + * + *

    Type: int64[4*n]

    + * + *

    This tag may appear in: + *

      + *
    • ACameraMetadata from ACameraManager_getCameraCharacteristics
    • + *

    + * + *

    A stall duration is how much extra time would get added + * to the normal minimum frame duration for a repeating request + * that has streams with non-zero stall.

    + *

    This functions similarly to + * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for HEIC + * streams.

    + *

    All HEIC output stream formats may have a nonzero stall + * duration.

    + * + * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS + */ + ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS = // int64[4*n] + ACAMERA_HEIC_START + 2, + ACAMERA_HEIC_END, + } acamera_metadata_tag_t; /** @@ -8373,6 +8465,16 @@ typedef enum acamera_metadata_enum_acamera_distortion_correction_mode { } acamera_metadata_enum_android_distortion_correction_mode_t; +// ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS +typedef enum acamera_metadata_enum_acamera_heic_available_heic_stream_configurations { + ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_OUTPUT = 0, + + ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT = 1, + +} acamera_metadata_enum_android_heic_available_heic_stream_configurations_t; + + + #endif /* __ANDROID_API__ >= 24 */ __END_DECLS diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp index 010c1aa520..c3eb4379db 100644 --- a/media/ndk/NdkImageReader.cpp +++ b/media/ndk/NdkImageReader.cpp @@ -69,6 +69,7 @@ AImageReader::isSupportedFormatAndUsage(int32_t format, uint64_t usage) { case AIMAGE_FORMAT_DEPTH16: case AIMAGE_FORMAT_DEPTH_POINT_CLOUD: case AIMAGE_FORMAT_Y8: + case AIMAGE_FORMAT_HEIC: return true; case AIMAGE_FORMAT_PRIVATE: // For private format, cpu usage is prohibited. @@ -96,6 +97,7 @@ AImageReader::getNumPlanesForFormat(int32_t format) { case AIMAGE_FORMAT_DEPTH16: case AIMAGE_FORMAT_DEPTH_POINT_CLOUD: case AIMAGE_FORMAT_Y8: + case AIMAGE_FORMAT_HEIC: return 1; case AIMAGE_FORMAT_PRIVATE: return 0; diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h index 15b340c2b7..14d88cbbb2 100644 --- a/media/ndk/include/media/NdkImage.h +++ b/media/ndk/include/media/NdkImage.h @@ -526,7 +526,15 @@ enum AIMAGE_FORMATS { * (in bytes) between adjacent rows.

    * */ - AIMAGE_FORMAT_Y8 = 0x20203859 + AIMAGE_FORMAT_Y8 = 0x20203859, + + /** + * Compressed HEIC format. + * + *

    This format defines the HEIC brand of High Efficiency Image File + * Format as described in ISO/IEC 23008-12.

    + */ + AIMAGE_FORMAT_HEIC = 0x48454946, }; /** diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp index a090479183..2d923bf49d 100644 --- a/services/camera/libcameraservice/Android.bp +++ b/services/camera/libcameraservice/Android.bp @@ -41,6 +41,8 @@ cc_library_shared { "api2/CameraDeviceClient.cpp", "api2/CompositeStream.cpp", "api2/DepthCompositeStream.cpp", + "api2/HeicEncoderInfoManager.cpp", + "api2/HeicCompositeStream.cpp", "device1/CameraHardwareInterface.cpp", "device3/Camera3Device.cpp", "device3/Camera3Stream.cpp", @@ -62,12 +64,14 @@ cc_library_shared { "hidl/HidlCameraService.cpp", "utils/CameraTraces.cpp", "utils/AutoConditionLock.cpp", + "utils/ExifUtils.cpp", "utils/TagMonitor.cpp", "utils/LatencyHistogram.cpp", ], shared_libs: [ "libdl", + "libexif", "libui", "liblog", "libutilscallstack", @@ -85,8 +89,10 @@ cc_library_shared { "libhidlbase", "libhidltransport", "libjpeg", + "libmedia_omx", "libmemunreachable", "libsensorprivacy", + "libstagefright", "libstagefright_foundation", "android.frameworks.cameraservice.common@2.0", "android.frameworks.cameraservice.service@2.0", diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp index e6f75f4d96..36395f3442 100755 --- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp @@ -62,7 +62,8 @@ void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) { } } -void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/, int /*streamId*/) { +void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/, + int /*streamId*/, const CameraMetadata& /*settings*/) { // Intentionally left empty } diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h index 2ee930e02b..53e6836017 100644 --- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h +++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h @@ -54,7 +54,8 @@ class JpegProcessor: // Camera3StreamBufferListener implementation void onBufferAcquired(const BufferInfo& bufferInfo) override; void onBufferReleased(const BufferInfo& bufferInfo) override; - void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override; + void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId, + const CameraMetadata& settings) override; status_t updateStream(const Parameters ¶ms); status_t deleteStream(); diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 9e203da4d4..b512f2b6b1 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -34,6 +34,7 @@ #include #include "DepthCompositeStream.h" +#include "HeicCompositeStream.h" // Convenience methods for constructing binder::Status objects for error returns @@ -711,21 +712,35 @@ binder::Status CameraDeviceClient::isSessionConfigurationSupported( return res; if (!isStreamInfoValid) { - if (camera3::DepthCompositeStream::isDepthCompositeStream(surface)) { + bool isDepthCompositeStream = + camera3::DepthCompositeStream::isDepthCompositeStream(surface); + bool isHeicCompositeStream = + camera3::HeicCompositeStream::isHeicCompositeStream(surface); + if (isDepthCompositeStream || isHeicCompositeStream) { // We need to take in to account that composite streams can have // additional internal camera streams. std::vector compositeStreams; - ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo, + if (isDepthCompositeStream) { + ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo, + mDevice->info(), &compositeStreams); + } else { + ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo, mDevice->info(), &compositeStreams); + } if (ret != OK) { String8 msg = String8::format( - "Camera %s: Failed adding depth composite streams: %s (%d)", + "Camera %s: Failed adding composite streams: %s (%d)", mCameraIdStr.string(), strerror(-ret), ret); ALOGE("%s: %s", __FUNCTION__, msg.string()); return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string()); } - if (compositeStreams.size() > 1) { + if (compositeStreams.size() == 0) { + // No internal streams means composite stream not + // supported. + *status = false; + return binder::Status::ok(); + } else if (compositeStreams.size() > 1) { streamCount += compositeStreams.size() - 1; streamConfiguration.streams.resize(streamCount); } @@ -937,15 +952,16 @@ binder::Status CameraDeviceClient::createStream( int streamId = camera3::CAMERA3_STREAM_ID_INVALID; std::vector surfaceIds; - if (!camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0])) { - err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width, - streamInfo.height, streamInfo.format, streamInfo.dataSpace, - static_cast(outputConfiguration.getRotation()), - &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(), - isShared); - } else { - sp compositeStream = new camera3::DepthCompositeStream(mDevice, - getRemoteCallback()); + bool isDepthCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]); + bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]); + if (isDepthCompositeStream || isHeicCompisiteStream) { + sp compositeStream; + if (isDepthCompositeStream) { + compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback()); + } else { + compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback()); + } + err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width, streamInfo.height, streamInfo.format, static_cast(outputConfiguration.getRotation()), @@ -955,6 +971,12 @@ binder::Status CameraDeviceClient::createStream( mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()), compositeStream); } + } else { + err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width, + streamInfo.height, streamInfo.format, streamInfo.dataSpace, + static_cast(outputConfiguration.getRotation()), + &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(), + isShared); } if (err != OK) { @@ -1437,6 +1459,8 @@ bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t heig camera_metadata_ro_entry streamConfigs = (dataSpace == HAL_DATASPACE_DEPTH) ? info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) : + (dataSpace == static_cast(HAL_DATASPACE_HEIF)) ? + info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) : info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); int32_t bestWidth = -1; @@ -1930,6 +1954,10 @@ void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras, remoteCb->onCaptureStarted(resultExtras, timestamp); } Camera2ClientBase::notifyShutter(resultExtras, timestamp); + + for (size_t i = 0; i < mCompositeStreamMap.size(); i++) { + mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp); + } } void CameraDeviceClient::notifyPrepared(int streamId) { diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp index 796bf42ae0..354eaf93eb 100644 --- a/services/camera/libcameraservice/api2/CompositeStream.cpp +++ b/services/camera/libcameraservice/api2/CompositeStream.cpp @@ -82,7 +82,8 @@ status_t CompositeStream::deleteStream() { return deleteInternalStreams(); } -void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) { +void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId, + const CameraMetadata& /*settings*/) { Mutex::Autolock l(mMutex); if (!mErrorState && (streamId == getStreamId())) { mPendingCaptureResults.emplace(frameNumber, CameraMetadata()); diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h index 583774550b..a401a8258c 100644 --- a/services/camera/libcameraservice/api2/CompositeStream.h +++ b/services/camera/libcameraservice/api2/CompositeStream.h @@ -23,6 +23,7 @@ #include #include #include +#include #include "common/CameraDeviceBase.h" #include "device3/Camera3StreamInterface.h" @@ -66,15 +67,24 @@ public: // Return composite stream id. virtual int getStreamId() = 0; + // Notify when shutter notify is triggered + virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {} + void onResultAvailable(const CaptureResult& result); bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras); // Camera3StreamBufferListener implementation void onBufferAcquired(const BufferInfo& /*bufferInfo*/) override { /*Empty for now */ } void onBufferReleased(const BufferInfo& bufferInfo) override; - void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override; + void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId, + const CameraMetadata& settings) override; protected: + struct ProducerListener : public BnProducerListener { + // ProducerListener impementation + void onBufferReleased() override { /*No impl. for now*/ }; + }; + status_t registerCompositeStreamListener(int32_t streamId); void eraseResult(int64_t frameNumber); void flagAnErrorFrameNumber(int64_t frameNumber); diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h index e8fe517e13..1bf31f47b3 100644 --- a/services/camera/libcameraservice/api2/DepthCompositeStream.h +++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h @@ -21,7 +21,6 @@ #include #include -#include #include #include "CompositeStream.h" @@ -116,11 +115,6 @@ private: static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH; static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF; - struct ProducerListener : public BnProducerListener { - // ProducerListener implementation - void onBufferReleased() override { /*No impl. for now*/ }; - }; - int mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId; size_t mBlobWidth, mBlobHeight; sp mBlobConsumer, mDepthConsumer; diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp new file mode 100644 index 0000000000..3eba863108 --- /dev/null +++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp @@ -0,0 +1,1606 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Camera3-HeicCompositeStream" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "common/CameraDeviceBase.h" +#include "utils/ExifUtils.h" +#include "HeicEncoderInfoManager.h" +#include "HeicCompositeStream.h" + +using android::hardware::camera::device::V3_5::CameraBlob; +using android::hardware::camera::device::V3_5::CameraBlobId; + +namespace android { +namespace camera3 { + +HeicCompositeStream::HeicCompositeStream(wp device, + wp cb) : + CompositeStream(device, cb), + mUseHeic(false), + mNumOutputTiles(1), + mOutputWidth(0), + mOutputHeight(0), + mMaxHeicBufferSize(0), + mGridWidth(HeicEncoderInfoManager::kGridWidth), + mGridHeight(HeicEncoderInfoManager::kGridHeight), + mGridRows(1), + mGridCols(1), + mUseGrid(false), + mAppSegmentStreamId(-1), + mAppSegmentSurfaceId(-1), + mAppSegmentBufferAcquired(false), + mMainImageStreamId(-1), + mMainImageSurfaceId(-1), + mYuvBufferAcquired(false), + mProducerListener(new ProducerListener()), + mOutputBufferCounter(0), + mGridTimestampUs(0) { +} + +HeicCompositeStream::~HeicCompositeStream() { + // Call deinitCodec in case stream hasn't been deleted yet to avoid any + // memory/resource leak. + deinitCodec(); + + mInputAppSegmentBuffers.clear(); + mCodecOutputBuffers.clear(); + + mAppSegmentStreamId = -1; + mAppSegmentSurfaceId = -1; + mAppSegmentConsumer.clear(); + mAppSegmentSurface.clear(); + + mMainImageStreamId = -1; + mMainImageSurfaceId = -1; + mMainImageConsumer.clear(); + mMainImageSurface.clear(); +} + +bool HeicCompositeStream::isHeicCompositeStream(const sp &surface) { + ANativeWindow *anw = surface.get(); + status_t err; + int format; + if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) { + String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err), + err); + ALOGE("%s: %s", __FUNCTION__, msg.string()); + return false; + } + + int dataspace; + if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) { + String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err), + err); + ALOGE("%s: %s", __FUNCTION__, msg.string()); + return false; + } + + return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF)); +} + +status_t HeicCompositeStream::createInternalStreams(const std::vector>& consumers, + bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format, + camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId, + std::vector *surfaceIds, int /*streamSetId*/, bool /*isShared*/) { + + sp device = mDevice.promote(); + if (!device.get()) { + ALOGE("%s: Invalid camera device!", __FUNCTION__); + return NO_INIT; + } + + status_t res = initializeCodec(width, height, device); + if (res != OK) { + ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)", + __FUNCTION__, strerror(-res), res); + return NO_INIT; + } + + sp producer; + sp consumer; + BufferQueue::createBufferQueue(&producer, &consumer); + mAppSegmentConsumer = new CpuConsumer(consumer, 1); + mAppSegmentConsumer->setFrameAvailableListener(this); + mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream")); + mAppSegmentSurface = new Surface(producer); + + res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format, + kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds); + if (res == OK) { + mAppSegmentSurfaceId = (*surfaceIds)[0]; + } else { + ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + if (!mUseGrid) { + res = mCodec->createInputSurface(&producer); + if (res != OK) { + ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } else { + BufferQueue::createBufferQueue(&producer, &consumer); + mMainImageConsumer = new CpuConsumer(consumer, 1); + mMainImageConsumer->setFrameAvailableListener(this); + mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream")); + } + mMainImageSurface = new Surface(producer); + + res = mCodec->start(); + if (res != OK) { + ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + std::vector sourceSurfaceId; + //Use YUV_888 format if framework tiling is needed. + int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 : + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace, + rotation, id, physicalCameraId, &sourceSurfaceId); + if (res == OK) { + mMainImageSurfaceId = sourceSurfaceId[0]; + mMainImageStreamId = *id; + } else { + ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + mOutputSurface = consumers[0]; + res = registerCompositeStreamListener(getStreamId()); + if (res != OK) { + ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__); + return res; + } + + return res; +} + +status_t HeicCompositeStream::deleteInternalStreams() { + requestExit(); + auto res = join(); + if (res != OK) { + ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__, + strerror(-res), res); + } + + deinitCodec(); + + if (mAppSegmentStreamId >= 0) { + sp device = mDevice.promote(); + if (!device.get()) { + ALOGE("%s: Invalid camera device!", __FUNCTION__); + return NO_INIT; + } + + res = device->deleteStream(mAppSegmentStreamId); + mAppSegmentStreamId = -1; + } + + return res; +} + +void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) { + Mutex::Autolock l(mMutex); + + if (bufferInfo.mError) return; + + mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp); +} + +// We need to get the settings early to handle the case where the codec output +// arrives earlier than result metadata. +void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId, + const CameraMetadata& settings) { + ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber); + + Mutex::Autolock l(mMutex); + if (mErrorState || (streamId != getStreamId())) { + return; + } + + mPendingCaptureResults.emplace(frameNumber, CameraMetadata()); + + camera_metadata_ro_entry entry; + + int32_t orientation = 0; + entry = settings.find(ANDROID_JPEG_ORIENTATION); + if (entry.count == 1) { + orientation = entry.data.i32[0]; + } + + int32_t quality = kDefaultJpegQuality; + entry = settings.find(ANDROID_JPEG_QUALITY); + if (entry.count == 1) { + quality = entry.data.i32[0]; + } + + mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality); +} + +void HeicCompositeStream::onFrameAvailable(const BufferItem& item) { + if (item.mDataSpace == static_cast(kAppSegmentDataSpace)) { + ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!", + __func__, ns2ms(item.mTimestamp)); + + Mutex::Autolock l(mMutex); + if (!mErrorState) { + mInputAppSegmentBuffers.push_back(item.mTimestamp); + mInputReadyCondition.signal(); + } + } else if (item.mDataSpace == kHeifDataSpace) { + ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!", + __func__, ns2ms(item.mTimestamp)); + + Mutex::Autolock l(mMutex); + if (!mUseGrid) { + ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling", + __FUNCTION__); + return; + } + if (!mErrorState) { + mInputYuvBuffers.push_back(item.mTimestamp); + mInputReadyCondition.signal(); + } + } else { + ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace); + } +} + +status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo, + const CameraMetadata& ch, std::vector* compositeOutput /*out*/) { + if (compositeOutput == nullptr) { + return BAD_VALUE; + } + + compositeOutput->clear(); + + bool useGrid, useHeic; + bool isSizeSupported = isSizeSupportedByHeifEncoder( + streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr); + if (!isSizeSupported) { + // Size is not supported by either encoder. + return OK; + } + + compositeOutput->insert(compositeOutput->end(), 2, streamInfo); + + // JPEG APPS segments Blob stream info + (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch); + (*compositeOutput)[0].height = 1; + (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB; + (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace; + (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN; + + // YUV/IMPLEMENTATION_DEFINED stream info + (*compositeOutput)[1].width = streamInfo.width; + (*compositeOutput)[1].height = streamInfo.height; + (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 : + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + (*compositeOutput)[1].dataSpace = kHeifDataSpace; + (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER : + useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER; + + return NO_ERROR; +} + +bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height, + bool* useHeic, bool* useGrid, int64_t* stall) { + static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance(); + return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall); +} + +bool HeicCompositeStream::isInMemoryTempFileSupported() { + int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC); + if (memfd == -1) { + if (errno != ENOSYS) { + ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno); + } + return false; + } + close(memfd); + return true; +} + +void HeicCompositeStream::onHeicOutputFrameAvailable( + const CodecOutputBufferInfo& outputBufferInfo) { + Mutex::Autolock l(mMutex); + + ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x", + __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset, + outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags); + + if (!mErrorState) { + if ((outputBufferInfo.size > 0) && + ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) { + mCodecOutputBuffers.push_back(outputBufferInfo); + mInputReadyCondition.signal(); + } else { + mCodec->releaseOutputBuffer(outputBufferInfo.index); + } + } else { + mCodec->releaseOutputBuffer(outputBufferInfo.index); + } +} + +void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) { + Mutex::Autolock l(mMutex); + + if (!mUseGrid) { + ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__); + return; + } + + mCodecInputBuffers.push_back(index); + mInputReadyCondition.signal(); +} + +void HeicCompositeStream::onHeicFormatChanged(sp& newFormat) { + if (newFormat == nullptr) { + ALOGE("%s: newFormat must not be null!", __FUNCTION__); + return; + } + + Mutex::Autolock l(mMutex); + + AString mime; + AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC); + newFormat->findString(KEY_MIME, &mime); + if (mime != mimeHeic) { + // For HEVC codec, below keys need to be filled out or overwritten so that the + // muxer can handle them as HEIC output image. + newFormat->setString(KEY_MIME, mimeHeic); + newFormat->setInt32(KEY_WIDTH, mOutputWidth); + newFormat->setInt32(KEY_HEIGHT, mOutputHeight); + if (mUseGrid) { + newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth); + newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight); + newFormat->setInt32(KEY_GRID_ROWS, mGridRows); + newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols); + } + } + newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/); + + int32_t gridRows, gridCols; + if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) && + newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) { + mNumOutputTiles = gridRows * gridCols; + } else { + mNumOutputTiles = 1; + } + + ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles); + mFormat = newFormat; +} + +void HeicCompositeStream::onHeicCodecError() { + Mutex::Autolock l(mMutex); + mErrorState = true; +} + +status_t HeicCompositeStream::configureStream() { + if (isRunning()) { + // Processing thread is already running, nothing more to do. + return NO_ERROR; + } + + if (mOutputSurface.get() == nullptr) { + ALOGE("%s: No valid output surface set!", __FUNCTION__); + return NO_INIT; + } + + auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener); + if (res != OK) { + ALOGE("%s: Unable to connect to native window for stream %d", + __FUNCTION__, mMainImageStreamId); + return res; + } + + if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB)) + != OK) { + ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__, + mMainImageStreamId); + return res; + } + + ANativeWindow *anwConsumer = mOutputSurface.get(); + int maxConsumerBuffers; + if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, + &maxConsumerBuffers)) != OK) { + ALOGE("%s: Unable to query consumer undequeued" + " buffer count for stream %d", __FUNCTION__, mMainImageStreamId); + return res; + } + + // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile + // buffer count. + int maxProducerBuffers = 1; + if ((res = native_window_set_buffer_count( + anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) { + ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId); + return res; + } + + if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) { + ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)", + __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res); + return res; + } + + run("HeicCompositeStreamProc"); + + return NO_ERROR; +} + +status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap, + Vector* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) { + if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) { + (*outSurfaceMap)[mAppSegmentStreamId] = std::vector(); + outputStreamIds->push_back(mAppSegmentStreamId); + } + (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId); + + if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) { + (*outSurfaceMap)[mMainImageStreamId] = std::vector(); + outputStreamIds->push_back(mMainImageStreamId); + } + (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId); + + if (currentStreamId != nullptr) { + *currentStreamId = mMainImageStreamId; + } + + return NO_ERROR; +} + +void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) { + Mutex::Autolock l(mMutex); + if (mErrorState) { + return; + } + + if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) { + mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp); + mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber]; + mSettingsByFrameNumber.erase(resultExtras.frameNumber); + mInputReadyCondition.signal(); + } +} + +void HeicCompositeStream::compilePendingInputLocked() { + while (!mSettingsByTimestamp.empty()) { + auto it = mSettingsByTimestamp.begin(); + mPendingInputFrames[it->first].orientation = it->second.first; + mPendingInputFrames[it->first].quality = it->second.second; + mSettingsByTimestamp.erase(it); + } + + while (!mInputAppSegmentBuffers.empty() && !mAppSegmentBufferAcquired) { + CpuConsumer::LockedBuffer imgBuffer; + auto it = mInputAppSegmentBuffers.begin(); + auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer); + if (res == NOT_ENOUGH_DATA) { + // Canot not lock any more buffers. + break; + } else if ((res != OK) || (*it != imgBuffer.timestamp)) { + if (res != OK) { + ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__, + strerror(-res), res); + } else { + ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64 + " received buffer with time stamp: %" PRId64, __FUNCTION__, + *it, imgBuffer.timestamp); + } + mPendingInputFrames[*it].error = true; + mInputAppSegmentBuffers.erase(it); + continue; + } + + if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) && + (mPendingInputFrames[imgBuffer.timestamp].error)) { + mAppSegmentConsumer->unlockBuffer(imgBuffer); + } else { + mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer; + mAppSegmentBufferAcquired = true; + } + mInputAppSegmentBuffers.erase(it); + } + + while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) { + CpuConsumer::LockedBuffer imgBuffer; + auto it = mInputYuvBuffers.begin(); + auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer); + if (res == NOT_ENOUGH_DATA) { + // Canot not lock any more buffers. + break; + } else if (res != OK) { + ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__, + strerror(-res), res); + mPendingInputFrames[*it].error = true; + mInputYuvBuffers.erase(it); + continue; + } else if (*it != imgBuffer.timestamp) { + ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with " + "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp); + mPendingInputFrames[*it].error = true; + mInputYuvBuffers.erase(it); + continue; + } + + if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) && + (mPendingInputFrames[imgBuffer.timestamp].error)) { + mMainImageConsumer->unlockBuffer(imgBuffer); + } else { + mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer; + mYuvBufferAcquired = true; + } + mInputYuvBuffers.erase(it); + } + + while (!mCodecOutputBuffers.empty()) { + auto it = mCodecOutputBuffers.begin(); + // Bitstream buffer timestamp doesn't necessarily directly correlate with input + // buffer timestamp. Assume encoder input to output is FIFO, use a queue + // to look up timestamp. + int64_t bufferTime = -1; + if (mCodecOutputBufferTimestamps.empty()) { + ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__); + } else { + // Direct mapping between camera timestamp (in ns) and codec timestamp (in us). + bufferTime = mCodecOutputBufferTimestamps.front(); + mOutputBufferCounter++; + if (mOutputBufferCounter == mNumOutputTiles) { + mCodecOutputBufferTimestamps.pop(); + mOutputBufferCounter = 0; + } + + mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it); + } + mCodecOutputBuffers.erase(it); + } + + while (!mFrameNumberMap.empty()) { + auto it = mFrameNumberMap.begin(); + mPendingInputFrames[it->second].frameNumber = it->first; + mFrameNumberMap.erase(it); + } + + // Heic composition doesn't depend on capture result, so no need to check + // mErrorFrameNumbers. Just remove them. + mErrorFrameNumbers.clear(); + + // Distribute codec input buffers to be filled out from YUV output + for (auto it = mPendingInputFrames.begin(); + it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) { + InputFrame& inputFrame(it->second); + if (inputFrame.codecInputCounter < mGridRows * mGridCols) { + // Available input tiles that are required for the current input + // image. + size_t newInputTiles = std::min(mCodecInputBuffers.size(), + mGridRows * mGridCols - inputFrame.codecInputCounter); + for (size_t i = 0; i < newInputTiles; i++) { + CodecInputBufferInfo inputInfo = + { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter }; + inputFrame.codecInputBuffers.push_back(inputInfo); + + mCodecInputBuffers.erase(mCodecInputBuffers.begin()); + inputFrame.codecInputCounter++; + } + break; + } + } +} + +bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) { + if (currentTs == nullptr) { + return false; + } + + bool newInputAvailable = false; + for (const auto& it : mPendingInputFrames) { + bool appSegmentBufferReady = (it.second.appSegmentBuffer.data != nullptr) && + !it.second.appSegmentWritten; + bool codecOutputReady = !it.second.codecOutputBuffers.empty(); + bool codecInputReady = (it.second.yuvBuffer.data != nullptr) && + (!it.second.codecInputBuffers.empty()); + if ((!it.second.error) && + (it.first < *currentTs) && + (appSegmentBufferReady || codecOutputReady || codecInputReady)) { + *currentTs = it.first; + newInputAvailable = true; + break; + } + } + + return newInputAvailable; +} + +int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) { + int64_t res = -1; + if (currentTs == nullptr) { + return res; + } + + for (const auto& it : mPendingInputFrames) { + if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) { + *currentTs = it.first; + res = it.second.frameNumber; + break; + } + } + + return res; +} + +status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp, + InputFrame &inputFrame) { + ATRACE_CALL(); + status_t res = OK; + + bool appSegmentBufferReady = inputFrame.appSegmentBuffer.data != nullptr && + !inputFrame.appSegmentWritten; + bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0; + bool codecInputReady = inputFrame.yuvBuffer.data != nullptr && + !inputFrame.codecInputBuffers.empty(); + + if (!appSegmentBufferReady && !codecOutputReady && !codecInputReady) { + ALOGW("%s: No valid appSegmentBuffer/codec input/outputBuffer available!", __FUNCTION__); + return OK; + } + + // Handle inputs for Hevc tiling + if (codecInputReady) { + res = processCodecInputFrame(inputFrame); + if (res != OK) { + ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + } + + // Initialize and start muxer if not yet done so + if (inputFrame.muxer == nullptr) { + res = startMuxerForInputFrame(timestamp, inputFrame); + if (res != OK) { + ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + } + + // Write JPEG APP segments data to the muxer. + if (appSegmentBufferReady && inputFrame.muxer != nullptr) { + res = processAppSegment(timestamp, inputFrame); + if (res != OK) { + ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + } + + // Write media codec bitstream buffers to muxer. + while (!inputFrame.codecOutputBuffers.empty()) { + res = processOneCodecOutputFrame(timestamp, inputFrame); + if (res != OK) { + ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + } + + if (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0) { + res = processCompletedInputFrame(timestamp, inputFrame); + if (res != OK) { + ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + } + + return res; +} + +status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) { + sp outputANW = mOutputSurface; + if (inputFrame.codecOutputBuffers.size() == 0) { + // No single codec output buffer has been generated. Continue to + // wait. + return OK; + } + + auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd); + if (res != OK) { + ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res), + res); + return res; + } + + // Combine current thread id, stream id and timestamp to uniquely identify image. + std::ostringstream tempOutputFile; + tempOutputFile << "HEIF-" << pthread_self() << "-" + << getStreamId() << "-" << timestamp; + inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC); + if (inputFrame.fileFd < 0) { + ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__, + tempOutputFile.str().c_str(), errno); + return NO_INIT; + } + inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF); + if (inputFrame.muxer == nullptr) { + ALOGE("%s: Failed to create MediaMuxer for file fd %d", + __FUNCTION__, inputFrame.fileFd); + return NO_INIT; + } + + res = inputFrame.muxer->setOrientationHint(inputFrame.orientation); + if (res != OK) { + ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + // Set encoder quality + { + sp qualityParams = new AMessage; + qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality); + res = mCodec->setParameters(qualityParams); + if (res != OK) { + ALOGE("%s: Failed to set codec quality: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } + + ssize_t trackId = inputFrame.muxer->addTrack(mFormat); + if (trackId < 0) { + ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId); + return NO_INIT; + } + + inputFrame.trackIndex = trackId; + inputFrame.pendingOutputTiles = mNumOutputTiles; + + res = inputFrame.muxer->start(); + if (res != OK) { + ALOGE("%s: Failed to start MediaMuxer: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + + return OK; +} + +status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) { + size_t app1Size = 0; + auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data, + inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height, + &app1Size); + ALOGV("%s: appSegmentSize is %zu, width %d, height %d, app1Size %zu", __FUNCTION__, + appSegmentSize, inputFrame.appSegmentBuffer.width, + inputFrame.appSegmentBuffer.height, app1Size); + if (appSegmentSize == 0) { + ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__); + return NO_INIT; + } + + std::unique_ptr exifUtils(ExifUtils::create()); + auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size); + if (!exifRes) { + ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__); + return BAD_VALUE; + } + //TODO: Use capture result metadata and static metadata to fill out the + //rest. + CameraMetadata dummyMeta; + exifRes = exifUtils->setFromMetadata(dummyMeta, mOutputWidth, mOutputHeight); + if (!exifRes) { + ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__); + return BAD_VALUE; + } + exifRes = exifUtils->setOrientation(inputFrame.orientation); + if (!exifRes) { + ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__); + return BAD_VALUE; + } + exifRes = exifUtils->generateApp1(); + if (!exifRes) { + ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__); + return BAD_VALUE; + } + + unsigned int newApp1Length = exifUtils->getApp1Length(); + const uint8_t *newApp1Segment = exifUtils->getApp1Buffer(); + + //Assemble the APP1 marker buffer required by MediaCodec + uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00}; + kExifApp1Marker[6] = static_cast(newApp1Length >> 8); + kExifApp1Marker[7] = static_cast(newApp1Length & 0xFF); + size_t appSegmentBufferSize = sizeof(kExifApp1Marker) + + appSegmentSize - app1Size + newApp1Length; + uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize]; + memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker)); + memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length); + if (appSegmentSize - app1Size > 0) { + memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length, + inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size); + } + + sp aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize); + auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex, + timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA); + delete[] appSegmentBuffer; + + if (res != OK) { + ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + inputFrame.appSegmentWritten = true; + + return OK; +} + +status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) { + for (auto& inputBuffer : inputFrame.codecInputBuffers) { + sp buffer; + auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer); + if (res != OK) { + ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + // Copy one tile from source to destination. + size_t tileX = inputBuffer.tileIndex % mGridCols; + size_t tileY = inputBuffer.tileIndex / mGridCols; + size_t top = mGridHeight * tileY; + size_t left = mGridWidth * tileX; + size_t width = (tileX == static_cast(mGridCols) - 1) ? + mOutputWidth - tileX * mGridWidth : mGridWidth; + size_t height = (tileY == static_cast(mGridRows) - 1) ? + mOutputHeight - tileY * mGridHeight : mGridHeight; + ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu", + __FUNCTION__, tileX, tileY, top, left, width, height); + + res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height); + if (res != OK) { + ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(), + inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/); + if (res != OK) { + ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)", + __FUNCTION__, strerror(-res), res); + return res; + } + } + + inputFrame.codecInputBuffers.clear(); + return OK; +} + +status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp, + InputFrame &inputFrame) { + auto it = inputFrame.codecOutputBuffers.begin(); + sp buffer; + status_t res = mCodec->getOutputBuffer(it->index, &buffer); + if (res != OK) { + ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)", + __FUNCTION__, it->index, strerror(-res), res); + return res; + } + if (buffer == nullptr) { + ALOGE("%s: Invalid Heic codec output buffer at index %d", + __FUNCTION__, it->index); + return BAD_VALUE; + } + + sp aBuffer = new ABuffer(buffer->data(), buffer->size()); + res = inputFrame.muxer->writeSampleData( + aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/); + if (res != OK) { + ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)", + __FUNCTION__, it->index, strerror(-res), res); + return res; + } + + mCodec->releaseOutputBuffer(it->index); + if (inputFrame.pendingOutputTiles == 0) { + ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__); + } else { + inputFrame.pendingOutputTiles--; + } + + inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin()); + return OK; +} + +status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp, + InputFrame &inputFrame) { + sp outputANW = mOutputSurface; + inputFrame.muxer->stop(); + + // Copy the content of the file to memory. + sp gb = GraphicBuffer::from(inputFrame.anb); + void* dstBuffer; + auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd); + if (res != OK) { + ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END); + if (static_cast(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) { + ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu", + __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob)); + return BAD_VALUE; + } + + lseek(inputFrame.fileFd, 0, SEEK_SET); + ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize); + if (bytesRead < fSize) { + ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize); + return BAD_VALUE; + } + + close(inputFrame.fileFd); + inputFrame.fileFd = -1; + + // Fill in HEIC header + uint8_t *header = static_cast(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob); + struct CameraBlob *blobHeader = (struct CameraBlob *)header; + // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp + blobHeader->blobId = static_cast(0x00FE); + blobHeader->blobSize = fSize; + + res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp); + if (res != OK) { + ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", + __FUNCTION__, getStreamId(), strerror(-res), res); + return res; + } + + res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1); + if (res != OK) { + ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + inputFrame.anb = nullptr; + + return OK; +} + + +void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) { + if (inputFrame == nullptr) { + return; + } + + if (inputFrame->appSegmentBuffer.data != nullptr) { + mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer); + inputFrame->appSegmentBuffer.data = nullptr; + mAppSegmentBufferAcquired = false; + } + + while (!inputFrame->codecOutputBuffers.empty()) { + auto it = inputFrame->codecOutputBuffers.begin(); + ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index); + mCodec->releaseOutputBuffer(it->index); + inputFrame->codecOutputBuffers.erase(it); + } + + if (inputFrame->yuvBuffer.data != nullptr) { + mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer); + inputFrame->yuvBuffer.data = nullptr; + mYuvBufferAcquired = false; + } + + while (!inputFrame->codecInputBuffers.empty()) { + auto it = inputFrame->codecInputBuffers.begin(); + inputFrame->codecInputBuffers.erase(it); + } + + if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) { + notifyError(inputFrame->frameNumber); + inputFrame->errorNotified = true; + } + + if (inputFrame->fileFd >= 0) { + close(inputFrame->fileFd); + inputFrame->fileFd = -1; + } + + if (inputFrame->anb != nullptr) { + sp outputANW = mOutputSurface; + outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1); + inputFrame->anb = nullptr; + } +} + +void HeicCompositeStream::releaseInputFramesLocked(int64_t currentTs) { + auto it = mPendingInputFrames.begin(); + while (it != mPendingInputFrames.end()) { + if (it->first <= currentTs) { + releaseInputFrameLocked(&it->second); + it = mPendingInputFrames.erase(it); + } else { + it++; + } + } +} + +status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height, + const sp& cameraDevice) { + ALOGV("%s", __FUNCTION__); + + bool useGrid = false; + bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height, + &mUseHeic, &useGrid, nullptr); + if (!isSizeSupported) { + ALOGE("%s: Encoder doesnt' support size %u x %u!", + __FUNCTION__, width, height); + return BAD_VALUE; + } + + // Create Looper for MediaCodec. + auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC; + mCodecLooper = new ALooper; + mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper"); + status_t res = mCodecLooper->start( + false, // runOnCallingThread + false, // canCallJava + PRIORITY_AUDIO); + if (res != OK) { + ALOGE("%s: Failed to start codec looper: %s (%d)", + __FUNCTION__, strerror(-res), res); + return NO_INIT; + } + + // Create HEIC/HEVC codec. + mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/); + if (mCodec == nullptr) { + ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime); + return NO_INIT; + } + + // Create Looper and handler for Codec callback. + mCodecCallbackHandler = new CodecCallbackHandler(this); + if (mCodecCallbackHandler == nullptr) { + ALOGE("%s: Failed to create codec callback handler", __FUNCTION__); + return NO_MEMORY; + } + mCallbackLooper = new ALooper; + mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper"); + res = mCallbackLooper->start( + false, // runOnCallingThread + false, // canCallJava + PRIORITY_AUDIO); + if (res != OK) { + ALOGE("%s: Failed to start media callback looper: %s (%d)", + __FUNCTION__, strerror(-res), res); + return NO_INIT; + } + mCallbackLooper->registerHandler(mCodecCallbackHandler); + + mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler); + res = mCodec->setCallback(mAsyncNotify); + if (res != OK) { + ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + // Create output format and configure the Codec. + sp outputFormat = new AMessage(); + outputFormat->setString(KEY_MIME, desiredMime); + outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ); + outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality); + // Ask codec to skip timestamp check and encode all frames. + outputFormat->setInt64("max-pts-gap-to-encoder", kNoFrameDropMaxPtsGap); + + int32_t gridWidth, gridHeight, gridRows, gridCols; + if (useGrid || mUseHeic) { + gridWidth = HeicEncoderInfoManager::kGridWidth; + gridHeight = HeicEncoderInfoManager::kGridHeight; + gridRows = (height + gridHeight - 1)/gridHeight; + gridCols = (width + gridWidth - 1)/gridWidth; + + if (mUseHeic) { + outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth); + outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight); + outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols); + outputFormat->setInt32(KEY_GRID_ROWS, gridRows); + } + + } else { + gridWidth = width; + gridHeight = height; + gridRows = 1; + gridCols = 1; + } + + outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth); + outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight); + outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0); + outputFormat->setInt32(KEY_COLOR_FORMAT, + useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface); + outputFormat->setInt32(KEY_FRAME_RATE, gridRows * gridCols); + // This only serves as a hint to encoder when encoding is not real-time. + outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate); + + res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/, + nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE); + if (res != OK) { + ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + mGridWidth = gridWidth; + mGridHeight = gridHeight; + mGridRows = gridRows; + mGridCols = gridCols; + mUseGrid = useGrid; + mOutputWidth = width; + mOutputHeight = height; + mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info()); + mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize; + + return OK; +} + +void HeicCompositeStream::deinitCodec() { + ALOGV("%s", __FUNCTION__); + if (mCodec != nullptr) { + mCodec->stop(); + mCodec->release(); + mCodec.clear(); + } + + if (mCodecLooper != nullptr) { + mCodecLooper->stop(); + mCodecLooper.clear(); + } + + if (mCallbackLooper != nullptr) { + mCallbackLooper->stop(); + mCallbackLooper.clear(); + } + + mAsyncNotify.clear(); + mFormat.clear(); +} + +// Return the size of the complete list of app segment, 0 indicates failure +size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer, + size_t maxSize, size_t *app1SegmentSize) { + if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) { + ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p", + __FUNCTION__, appSegmentBuffer, app1SegmentSize); + return 0; + } + + size_t expectedSize = 0; + // First check for EXIF transport header at the end of the buffer + const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob)); + const struct CameraBlob *blob = (const struct CameraBlob*)(header); + if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) { + ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId); + return 0; + } + + expectedSize = blob->blobSize; + if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) { + ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize); + return 0; + } + + uint32_t totalSize = 0; + + // Verify APP1 marker (mandatory) + uint8_t app1Marker[] = {0xFF, 0xE1}; + if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) { + ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__, + appSegmentBuffer[0], appSegmentBuffer[1]); + return 0; + } + totalSize += sizeof(app1Marker); + + uint16_t app1Size = (static_cast(appSegmentBuffer[totalSize]) << 8) + + appSegmentBuffer[totalSize+1]; + totalSize += app1Size; + + ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u", + __FUNCTION__, expectedSize, app1Size); + while (totalSize < expectedSize) { + if (appSegmentBuffer[totalSize] != 0xFF || + appSegmentBuffer[totalSize+1] <= 0xE1 || + appSegmentBuffer[totalSize+1] > 0xEF) { + // Invalid APPn marker + ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__, + appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]); + return 0; + } + totalSize += 2; + + uint16_t appnSize = (static_cast(appSegmentBuffer[totalSize]) << 8) + + appSegmentBuffer[totalSize+1]; + totalSize += appnSize; + } + + if (totalSize != expectedSize) { + ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu", + __FUNCTION__, totalSize, expectedSize); + return 0; + } + + *app1SegmentSize = app1Size + sizeof(app1Marker); + return expectedSize; +} + +int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) { + for (const auto& fn : mFrameNumberMap) { + if (timeInUs == ns2us(fn.second)) { + return fn.second; + } + } + for (const auto& inputFrame : mPendingInputFrames) { + if (timeInUs == ns2us(inputFrame.first)) { + return inputFrame.first; + } + } + return -1; +} + +status_t HeicCompositeStream::copyOneYuvTile(sp& codecBuffer, + const CpuConsumer::LockedBuffer& yuvBuffer, + size_t top, size_t left, size_t width, size_t height) { + ATRACE_CALL(); + + // Get stride information for codecBuffer + sp imageData; + if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) { + ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__); + return BAD_VALUE; + } + if (imageData->size() != sizeof(MediaImage2)) { + ALOGE("%s: Invalid codec input image size %zu, expected %zu", + __FUNCTION__, imageData->size(), sizeof(MediaImage2)); + return BAD_VALUE; + } + MediaImage2* imageInfo = reinterpret_cast(imageData->data()); + if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV || + imageInfo->mBitDepth != 8 || + imageInfo->mBitDepthAllocated != 8 || + imageInfo->mNumPlanes != 3) { + ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, " + "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__, + imageInfo->mType, imageInfo->mBitDepth, + imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes); + return BAD_VALUE; + } + + ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d", + __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride); + ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d", + __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset, + imageInfo->mPlane[MediaImage2::V].mOffset, + imageInfo->mPlane[MediaImage2::U].mRowInc, + imageInfo->mPlane[MediaImage2::V].mRowInc, + imageInfo->mPlane[MediaImage2::U].mColInc, + imageInfo->mPlane[MediaImage2::V].mColInc); + + // Y + for (auto row = top; row < top+height; row++) { + uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset + + imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top); + memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width); + } + + // U is Cb, V is Cr + bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset > + imageInfo->mPlane[MediaImage2::U].mOffset; + uint32_t codecUvOffsetDiff = codecUPlaneFirst ? + imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset : + imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset; + bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) && + (imageInfo->mPlane[MediaImage2::U].mRowInc == + imageInfo->mPlane[MediaImage2::V].mRowInc) && + (imageInfo->mPlane[MediaImage2::U].mColInc == 2) && + (imageInfo->mPlane[MediaImage2::V].mColInc == 2); + bool isCodecUvPlannar = + ((codecUPlaneFirst && codecUvOffsetDiff >= + imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) || + ((!codecUPlaneFirst && codecUvOffsetDiff >= + imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) && + imageInfo->mPlane[MediaImage2::U].mColInc == 1 && + imageInfo->mPlane[MediaImage2::V].mColInc == 1; + bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb; + + if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 && + (codecUPlaneFirst == cameraUPlaneFirst)) { + // UV semiplannar + // The chrome plane could be either Cb first, or Cr first. Take the + // smaller address. + uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr); + MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V; + for (auto row = top/2; row < (top+height)/2; row++) { + uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset + + imageInfo->mPlane[dstPlane].mRowInc * (row - top/2); + memcpy(dst, src+row*yuvBuffer.chromaStride+left, width); + } + } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) { + // U plane + for (auto row = top/2; row < (top+height)/2; row++) { + uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset + + imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2); + memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2); + } + + // V plane + for (auto row = top/2; row < (top+height)/2; row++) { + uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset + + imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2); + memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2); + } + } else { + // Convert between semiplannar and plannar + uint8_t *dst = codecBuffer->data(); + for (auto row = top/2; row < (top+height)/2; row++) { + for (auto col = left/2; col < (left+width)/2; col++) { + // U/Cb + int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset + + imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) + + imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2); + int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col; + dst[dstIndex] = yuvBuffer.dataCb[srcIndex]; + + // V/Cr + dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset + + imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) + + imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2); + srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col; + dst[dstIndex] = yuvBuffer.dataCr[srcIndex]; + } + } + } + return OK; +} + +size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) { + camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT); + size_t maxAppsSegment = 1; + if (entry.count > 0) { + maxAppsSegment = entry.data.u8[0] < 1 ? 1 : + entry.data.u8[0] > 16 ? 16 : entry.data.u8[0]; + } + return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob); +} + +bool HeicCompositeStream::threadLoop() { + int64_t currentTs = INT64_MAX; + bool newInputAvailable = false; + + { + Mutex::Autolock l(mMutex); + if (mErrorState) { + // In case we landed in error state, return any pending buffers and + // halt all further processing. + compilePendingInputLocked(); + releaseInputFramesLocked(currentTs); + return false; + } + + + while (!newInputAvailable) { + compilePendingInputLocked(); + newInputAvailable = getNextReadyInputLocked(¤tTs); + + if (!newInputAvailable) { + auto failingFrameNumber = getNextFailingInputLocked(¤tTs); + if (failingFrameNumber >= 0) { + // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is + // possible for two internal stream buffers to fail. In such scenario the + // composite stream should notify the client about a stream buffer error only + // once and this information is kept within 'errorNotified'. + // Any present failed input frames will be removed on a subsequent call to + // 'releaseInputFramesLocked()'. + releaseInputFrameLocked(&mPendingInputFrames[currentTs]); + currentTs = INT64_MAX; + } + + auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration); + if (ret == TIMED_OUT) { + return true; + } else if (ret != OK) { + ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__, + strerror(-ret), ret); + return false; + } + } + } + } + + auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]); + Mutex::Autolock l(mMutex); + if (res != OK) { + ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)", + __FUNCTION__, currentTs, strerror(-res), res); + mPendingInputFrames[currentTs].error = true; + } + + if (mPendingInputFrames[currentTs].error || + (mPendingInputFrames[currentTs].appSegmentWritten && + mPendingInputFrames[currentTs].pendingOutputTiles == 0)) { + releaseInputFramesLocked(currentTs); + } + + return true; +} + +bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) { + bool res = false; + // Buffer errors concerning internal composite streams should not be directly visible to + // camera clients. They must only receive a single buffer error with the public composite + // stream id. + if ((resultExtras.errorStreamId == mAppSegmentStreamId) || + (resultExtras.errorStreamId == mMainImageStreamId)) { + flagAnErrorFrameNumber(resultExtras.frameNumber); + res = true; + } + + return res; +} + +void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp &msg) { + sp parent = mParent.promote(); + if (parent == nullptr) return; + + switch (msg->what()) { + case kWhatCallbackNotify: { + int32_t cbID; + if (!msg->findInt32("callbackID", &cbID)) { + ALOGE("kWhatCallbackNotify: callbackID is expected."); + break; + } + + ALOGV("kWhatCallbackNotify: cbID = %d", cbID); + + switch (cbID) { + case MediaCodec::CB_INPUT_AVAILABLE: { + int32_t index; + if (!msg->findInt32("index", &index)) { + ALOGE("CB_INPUT_AVAILABLE: index is expected."); + break; + } + parent->onHeicInputFrameAvailable(index); + break; + } + + case MediaCodec::CB_OUTPUT_AVAILABLE: { + int32_t index; + size_t offset; + size_t size; + int64_t timeUs; + int32_t flags; + + if (!msg->findInt32("index", &index)) { + ALOGE("CB_OUTPUT_AVAILABLE: index is expected."); + break; + } + if (!msg->findSize("offset", &offset)) { + ALOGE("CB_OUTPUT_AVAILABLE: offset is expected."); + break; + } + if (!msg->findSize("size", &size)) { + ALOGE("CB_OUTPUT_AVAILABLE: size is expected."); + break; + } + if (!msg->findInt64("timeUs", &timeUs)) { + ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected."); + break; + } + if (!msg->findInt32("flags", &flags)) { + ALOGE("CB_OUTPUT_AVAILABLE: flags is expected."); + break; + } + + CodecOutputBufferInfo bufferInfo = { + index, + (int32_t)offset, + (int32_t)size, + timeUs, + (uint32_t)flags}; + + parent->onHeicOutputFrameAvailable(bufferInfo); + break; + } + + case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: { + sp format; + if (!msg->findMessage("format", &format)) { + ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected."); + break; + } + + parent->onHeicFormatChanged(format); + break; + } + + case MediaCodec::CB_ERROR: { + status_t err; + int32_t actionCode; + AString detail; + if (!msg->findInt32("err", &err)) { + ALOGE("CB_ERROR: err is expected."); + break; + } + if (!msg->findInt32("action", &actionCode)) { + ALOGE("CB_ERROR: action is expected."); + break; + } + msg->findString("detail", &detail); + ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)", + err, actionCode, detail.c_str()); + + parent->onHeicCodecError(); + break; + } + + default: { + ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID); + break; + } + } + break; + } + + default: + ALOGE("shouldn't be here"); + break; + } +} + +}; // namespace camera3 +}; // namespace android diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h new file mode 100644 index 0000000000..0a762566de --- /dev/null +++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h @@ -0,0 +1,250 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H +#define ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H + +#include + +#include +#include + +#include +#include +#include +#include +#include + +#include "CompositeStream.h" + +namespace android { +namespace camera3 { + +class HeicCompositeStream : public CompositeStream, public Thread, + public CpuConsumer::FrameAvailableListener { +public: + HeicCompositeStream(wp device, + wp cb); + ~HeicCompositeStream() override; + + static bool isHeicCompositeStream(const sp &surface); + + status_t createInternalStreams(const std::vector>& consumers, + bool hasDeferredConsumer, uint32_t width, uint32_t height, int format, + camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId, + std::vector *surfaceIds, int streamSetId, bool isShared) override; + + status_t deleteInternalStreams() override; + + status_t configureStream() override; + + status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector* /*out*/outputStreamIds, + int32_t* /*out*/currentStreamId) override; + + void onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override; + + int getStreamId() override { return mMainImageStreamId; } + + // Use onShutter to keep track of frame number <-> timestamp mapping. + void onBufferReleased(const BufferInfo& bufferInfo) override; + void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId, + const CameraMetadata& settings) override; + + // CpuConsumer listener implementation + void onFrameAvailable(const BufferItem& item) override; + + // Return stream information about the internal camera streams + static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo, + const CameraMetadata& ch, std::vector* compositeOutput /*out*/); + + static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height, + bool* useHeic, bool* useGrid, int64_t* stall); + static bool isInMemoryTempFileSupported(); +protected: + + bool threadLoop() override; + bool onStreamBufferError(const CaptureResultExtras& resultExtras) override; + void onResultError(const CaptureResultExtras& /*resultExtras*/) override {} + +private: + // + // HEIC/HEVC Codec related structures, utility functions, and callbacks + // + struct CodecOutputBufferInfo { + int32_t index; + int32_t offset; + int32_t size; + int64_t timeUs; + uint32_t flags; + }; + + struct CodecInputBufferInfo { + int32_t index; + int64_t timeUs; + size_t tileIndex; + }; + + class CodecCallbackHandler : public AHandler { + public: + explicit CodecCallbackHandler(wp parent) { + mParent = parent; + } + virtual void onMessageReceived(const sp &msg); + private: + wp mParent; + }; + + enum { + kWhatCallbackNotify, + }; + + bool mUseHeic; + sp mCodec; + sp mCodecLooper, mCallbackLooper; + sp mCodecCallbackHandler; + sp mAsyncNotify; + sp mFormat; + size_t mNumOutputTiles; + + int32_t mOutputWidth, mOutputHeight; + size_t mMaxHeicBufferSize; + int32_t mGridWidth, mGridHeight; + size_t mGridRows, mGridCols; + bool mUseGrid; // Whether to use framework YUV frame tiling. + + static const int64_t kNoFrameDropMaxPtsGap = -1000000; + static const int32_t kNoGridOpRate = 30; + static const int32_t kGridOpRate = 120; + + void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo); + void onHeicInputFrameAvailable(int32_t index); // Only called for YUV input mode. + void onHeicFormatChanged(sp& newFormat); + void onHeicCodecError(); + + status_t initializeCodec(uint32_t width, uint32_t height, + const sp& cameraDevice); + void deinitCodec(); + + // + // Composite stream related structures, utility functions and callbacks. + // + struct InputFrame { + int32_t orientation; + int32_t quality; + + CpuConsumer::LockedBuffer appSegmentBuffer; + std::vector codecOutputBuffers; + + // Fields that are only applicable to HEVC tiling. + CpuConsumer::LockedBuffer yuvBuffer; + std::vector codecInputBuffers; + + bool error; + bool errorNotified; + int64_t frameNumber; + + sp muxer; + int fenceFd; + int fileFd; + ssize_t trackIndex; + ANativeWindowBuffer *anb; + + bool appSegmentWritten; + size_t pendingOutputTiles; + size_t codecInputCounter; + + InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false), + errorNotified(false), frameNumber(-1), fenceFd(-1), fileFd(-1), + trackIndex(-1), anb(nullptr), appSegmentWritten(false), + pendingOutputTiles(0), codecInputCounter(0) { } + }; + + void compilePendingInputLocked(); + // Find first complete and valid frame with smallest timestamp + bool getNextReadyInputLocked(int64_t *currentTs /*out*/); + // Find next failing frame number with smallest timestamp and return respective frame number + int64_t getNextFailingInputLocked(int64_t *currentTs /*out*/); + + status_t processInputFrame(nsecs_t timestamp, InputFrame &inputFrame); + status_t processCodecInputFrame(InputFrame &inputFrame); + status_t startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame); + status_t processAppSegment(nsecs_t timestamp, InputFrame &inputFrame); + status_t processOneCodecOutputFrame(nsecs_t timestamp, InputFrame &inputFrame); + status_t processCompletedInputFrame(nsecs_t timestamp, InputFrame &inputFrame); + + void releaseInputFrameLocked(InputFrame *inputFrame /*out*/); + void releaseInputFramesLocked(int64_t currentTs); + + size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize, + size_t* app1SegmentSize); + int64_t findTimestampInNsLocked(int64_t timeInUs); + status_t copyOneYuvTile(sp& codecBuffer, + const CpuConsumer::LockedBuffer& yuvBuffer, + size_t top, size_t left, size_t width, size_t height); + static size_t calcAppSegmentMaxSize(const CameraMetadata& info); + + static const nsecs_t kWaitDuration = 10000000; // 10 ms + static const int32_t kDefaultJpegQuality = 99; + static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF; + static const android_dataspace kAppSegmentDataSpace = + static_cast(HAL_DATASPACE_JPEG_APP_SEGMENTS); + static const android_dataspace kHeifDataSpace = + static_cast(HAL_DATASPACE_HEIF); + + int mAppSegmentStreamId, mAppSegmentSurfaceId; + sp mAppSegmentConsumer; + sp mAppSegmentSurface; + bool mAppSegmentBufferAcquired; + size_t mAppSegmentMaxSize; + + int mMainImageStreamId, mMainImageSurfaceId; + sp mMainImageSurface; + sp mMainImageConsumer; // Only applicable for HEVC codec. + bool mYuvBufferAcquired; // Only applicable to HEVC codec + + sp mOutputSurface; + sp mProducerListener; + + + // Map from frame number to JPEG setting of orientation+quality + std::map> mSettingsByFrameNumber; + // Map from timestamp to JPEG setting of orientation+quality + std::map> mSettingsByTimestamp; + + // Keep all incoming APP segment Blob buffer pending further processing. + std::vector mInputAppSegmentBuffers; + + // Keep all incoming HEIC blob buffer pending further processing. + std::vector mCodecOutputBuffers; + std::queue mCodecOutputBufferTimestamps; + size_t mOutputBufferCounter; + + // Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only) + std::vector mInputYuvBuffers; + // Keep all codec input buffers ready to be filled out (for HEVC YUV tiling only) + std::vector mCodecInputBuffers; + + // Artificial strictly incremental YUV grid timestamp to make encoder happy. + int64_t mGridTimestampUs; + + // In most common use case, entries are accessed in order. + std::map mPendingInputFrames; +}; + +}; // namespace camera3 +}; // namespace android + +#endif //ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp new file mode 100644 index 0000000000..ed9be6e934 --- /dev/null +++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp @@ -0,0 +1,294 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "HeicEncoderInfoManager" +//#define LOG_NDEBUG 0 + +#include +#include + +#include +#include +#include + +#include +#include +#include + +#include "HeicEncoderInfoManager.h" + +namespace android { +namespace camera3 { + +HeicEncoderInfoManager::HeicEncoderInfoManager() : + mIsInited(false), + mMinSizeHeic(0, 0), + mMaxSizeHeic(INT32_MAX, INT32_MAX), + mHasHEVC(false), + mHasHEIC(false), + mDisableGrid(false) { + if (initialize() == OK) { + mIsInited = true; + } +} + +HeicEncoderInfoManager::~HeicEncoderInfoManager() { +} + +bool HeicEncoderInfoManager::isSizeSupported(int32_t width, int32_t height, bool* useHeic, + bool* useGrid, int64_t* stall) const { + if (useHeic == nullptr || useGrid == nullptr) { + ALOGE("%s: invalid parameters: useHeic %p, useGrid %p", + __FUNCTION__, useHeic, useGrid); + return false; + } + if (!mIsInited) return false; + + bool chooseHeic = false, enableGrid = true; + if (mHasHEIC && width >= mMinSizeHeic.first && + height >= mMinSizeHeic.second && width <= mMaxSizeHeic.first && + height <= mMaxSizeHeic.second) { + chooseHeic = true; + enableGrid = false; + } else if (mHasHEVC) { + bool fullSizeSupportedByHevc = (width >= mMinSizeHevc.first && + height >= mMinSizeHevc.second && + width <= mMaxSizeHevc.first && + height <= mMaxSizeHevc.second); + if (fullSizeSupportedByHevc && (mDisableGrid || + (width <= 1920 && height <= 1080))) { + enableGrid = false; + } + } else { + // No encoder available for the requested size. + return false; + } + + if (stall != nullptr) { + // Find preferred encoder which advertise + // "measured-frame-rate-WIDTHxHEIGHT-range" key. + const FrameRateMaps& maps = + (chooseHeic && mHeicFrameRateMaps.size() > 0) ? + mHeicFrameRateMaps : mHevcFrameRateMaps; + const auto& closestSize = findClosestSize(maps, width, height); + if (closestSize == maps.end()) { + // The "measured-frame-rate-WIDTHxHEIGHT-range" key is optional. + // Hardcode to some default value (3.33ms * tile count) based on resolution. + *stall = 3333333LL * width * height / (kGridWidth * kGridHeight); + return true; + } + + // Derive stall durations based on average fps of the closest size. + constexpr int64_t NSEC_PER_SEC = 1000000000LL; + int32_t avgFps = (closestSize->second.first + closestSize->second.second)/2; + float ratio = 1.0f * width * height / + (closestSize->first.first * closestSize->first.second); + *stall = ratio * NSEC_PER_SEC / avgFps; + } + + *useHeic = chooseHeic; + *useGrid = enableGrid; + return true; +} + +status_t HeicEncoderInfoManager::initialize() { + mDisableGrid = property_get_bool("camera.heic.disable_grid", false); + sp codecsList = MediaCodecList::getInstance(); + if (codecsList == nullptr) { + // No media codec available. + return OK; + } + + sp heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC); + sp hevcDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC); + + if (hevcDetails == nullptr) { + if (heicDetails != nullptr) { + ALOGE("%s: Device must support HEVC codec if HEIC codec is available!", + __FUNCTION__); + return BAD_VALUE; + } + return OK; + } + + // Check CQ mode for HEVC codec + { + AString bitrateModes; + auto hasItem = hevcDetails->findString("feature-bitrate-modes", &bitrateModes); + if (!hasItem) { + ALOGE("%s: Failed to query bitrate modes for HEVC codec", __FUNCTION__); + return BAD_VALUE; + } + ALOGV("%s: HEVC codec's feature-bitrate-modes value is %d, %s", + __FUNCTION__, hasItem, bitrateModes.c_str()); + std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase); + if (!std::regex_search(bitrateModes.c_str(), pattern)) { + return OK; + } + } + + // HEIC size range + if (heicDetails != nullptr) { + auto res = getCodecSizeRange(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, + heicDetails, &mMinSizeHeic, &mMaxSizeHeic, &mHeicFrameRateMaps); + if (res != OK) { + ALOGE("%s: Failed to get HEIC codec size range: %s (%d)", __FUNCTION__, + strerror(-res), res); + return BAD_VALUE; + } + mHasHEIC = true; + } + + // HEVC size range + { + auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC, + hevcDetails, &mMinSizeHevc, &mMaxSizeHevc, &mHevcFrameRateMaps); + if (res != OK) { + ALOGE("%s: Failed to get HEVC codec size range: %s (%d)", __FUNCTION__, + strerror(-res), res); + return BAD_VALUE; + } + + mHasHEVC = true; + } + + return OK; +} + +status_t HeicEncoderInfoManager::getFrameRateMaps(sp details, FrameRateMaps* maps) { + if (details == nullptr || maps == nullptr) { + ALOGE("%s: Invalid input: details: %p, maps: %p", __FUNCTION__, details.get(), maps); + return BAD_VALUE; + } + + for (size_t i = 0; i < details->countEntries(); i++) { + AMessage::Type type; + const char* entryName = details->getEntryNameAt(i, &type); + if (type != AMessage::kTypeString) continue; + std::regex frameRateNamePattern("measured-frame-rate-([0-9]+)[*x]([0-9]+)-range", + std::regex_constants::icase); + std::cmatch sizeMatch; + if (std::regex_match(entryName, sizeMatch, frameRateNamePattern) && + sizeMatch.size() == 3) { + AMessage::ItemData item = details->getEntryAt(i); + AString fpsRangeStr; + if (item.find(&fpsRangeStr)) { + ALOGV("%s: %s", entryName, fpsRangeStr.c_str()); + std::regex frameRatePattern("([0-9]+)-([0-9]+)"); + std::cmatch fpsMatch; + if (std::regex_match(fpsRangeStr.c_str(), fpsMatch, frameRatePattern) && + fpsMatch.size() == 3) { + maps->emplace( + std::make_pair(stoi(sizeMatch[1]), stoi(sizeMatch[2])), + std::make_pair(stoi(fpsMatch[1]), stoi(fpsMatch[2]))); + } else { + return BAD_VALUE; + } + } + } + } + return OK; +} + +status_t HeicEncoderInfoManager::getCodecSizeRange( + const char* codecName, + sp details, + std::pair* minSize, + std::pair* maxSize, + FrameRateMaps* frameRateMaps) { + if (codecName == nullptr || minSize == nullptr || maxSize == nullptr || + details == nullptr || frameRateMaps == nullptr) { + return BAD_VALUE; + } + + AString sizeRange; + auto hasItem = details->findString("size-range", &sizeRange); + if (!hasItem) { + ALOGE("%s: Failed to query size range for codec %s", __FUNCTION__, codecName); + return BAD_VALUE; + } + ALOGV("%s: %s codec's size range is %s", __FUNCTION__, codecName, sizeRange.c_str()); + std::regex pattern("([0-9]+)[*x]([0-9]+)-([0-9]+)[*x]([0-9]+)"); + std::cmatch match; + if (std::regex_match(sizeRange.c_str(), match, pattern)) { + if (match.size() == 5) { + minSize->first = stoi(match[1]); + minSize->second = stoi(match[2]); + maxSize->first = stoi(match[3]); + maxSize->second = stoi(match[4]); + if (minSize->first > maxSize->first || + minSize->second > maxSize->second) { + ALOGE("%s: Invalid %s code size range: %s", + __FUNCTION__, codecName, sizeRange.c_str()); + return BAD_VALUE; + } + } else { + return BAD_VALUE; + } + } + + auto res = getFrameRateMaps(details, frameRateMaps); + if (res != OK) { + return res; + } + + return OK; +} + +HeicEncoderInfoManager::FrameRateMaps::const_iterator HeicEncoderInfoManager::findClosestSize( + const FrameRateMaps& maps, int32_t width, int32_t height) const { + int32_t minDiff = INT32_MAX; + FrameRateMaps::const_iterator closestIter = maps.begin(); + for (auto iter = maps.begin(); iter != maps.end(); iter++) { + // Use area difference between the sizes to approximate size + // difference. + int32_t diff = abs(iter->first.first * iter->first.second - width * height); + if (diff < minDiff) { + closestIter = iter; + minDiff = diff; + } + } + return closestIter; +} + +sp HeicEncoderInfoManager::getCodecDetails( + sp codecsList, const char* name) { + ssize_t idx = codecsList->findCodecByType(name, true /*encoder*/); + if (idx < 0) { + return nullptr; + } + + const sp info = codecsList->getCodecInfo(idx); + if (info == nullptr) { + ALOGE("%s: Failed to get codec info for %s", __FUNCTION__, name); + return nullptr; + } + const sp caps = + info->getCapabilitiesFor(name); + if (caps == nullptr) { + ALOGE("%s: Failed to get capabilities for codec %s", __FUNCTION__, name); + return nullptr; + } + const sp details = caps->getDetails(); + if (details == nullptr) { + ALOGE("%s: Failed to get details for codec %s", __FUNCTION__, name); + return nullptr; + } + + return details; +} +} //namespace camera3 +} // namespace android diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h new file mode 100644 index 0000000000..fb0b9140a9 --- /dev/null +++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H +#define ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H + +#include +#include +#include +#include + +#include +#include + +namespace android { +namespace camera3 { + +class HeicEncoderInfoManager { +public: + static HeicEncoderInfoManager& getInstance() { + static HeicEncoderInfoManager instance; + return instance; + } + + bool isSizeSupported(int32_t width, int32_t height, + bool* useHeic, bool* useGrid, int64_t* stall) const; + + static const auto kGridWidth = 512; + static const auto kGridHeight = 512; +private: + struct SizePairHash { + std::size_t operator () (const std::pair &p) const { + return p.first * 31 + p.second; + } + }; + + typedef std::unordered_map, + std::pair, SizePairHash> FrameRateMaps; + + HeicEncoderInfoManager(); + virtual ~HeicEncoderInfoManager(); + + status_t initialize(); + status_t getFrameRateMaps(sp details, FrameRateMaps* maps); + status_t getCodecSizeRange(const char* codecName, sp details, + std::pair* minSize, std::pair* maxSize, + FrameRateMaps* frameRateMaps); + FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps, + int32_t width, int32_t height) const; + sp getCodecDetails(sp codecsList, const char* name); + + bool mIsInited; + std::pair mMinSizeHeic, mMaxSizeHeic; + std::pair mMinSizeHevc, mMaxSizeHevc; + bool mHasHEVC, mHasHEIC; + FrameRateMaps mHeicFrameRateMaps, mHevcFrameRateMaps; + bool mDisableGrid; + +}; + +} // namespace camera3 +} // namespace android + +#endif // ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp index 8ee3298ffe..f35c66afe8 100644 --- a/services/camera/libcameraservice/common/CameraProviderManager.cpp +++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp @@ -38,6 +38,8 @@ #include #include +#include "api2/HeicCompositeStream.h" + namespace android { using namespace ::android::hardware::camera; @@ -874,6 +876,130 @@ status_t CameraProviderManager::ProviderInfo::DeviceInfo3::removeAvailableKeys( return res; } +status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fillHeicStreamCombinations( + std::vector* outputs, + std::vector* durations, + std::vector* stallDurations, + const camera_metadata_entry& halStreamConfigs, + const camera_metadata_entry& halStreamDurations) { + if (outputs == nullptr || durations == nullptr || stallDurations == nullptr) { + return BAD_VALUE; + } + + static bool supportInMemoryTempFile = + camera3::HeicCompositeStream::isInMemoryTempFileSupported(); + if (!supportInMemoryTempFile) { + ALOGI("%s: No HEIC support due to absence of in memory temp file support", + __FUNCTION__); + return OK; + } + + for (size_t i = 0; i < halStreamConfigs.count; i += 4) { + int32_t format = halStreamConfigs.data.i32[i]; + // Only IMPLEMENTATION_DEFINED and YUV_888 can be used to generate HEIC + // image. + if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && + format != HAL_PIXEL_FORMAT_YCBCR_420_888) { + continue; + } + + bool sizeAvail = false; + for (size_t j = 0; j < outputs->size(); j+= 4) { + if ((*outputs)[j+1] == halStreamConfigs.data.i32[i+1] && + (*outputs)[j+2] == halStreamConfigs.data.i32[i+2]) { + sizeAvail = true; + break; + } + } + if (sizeAvail) continue; + + int64_t stall = 0; + bool useHeic, useGrid; + if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder( + halStreamConfigs.data.i32[i+1], halStreamConfigs.data.i32[i+2], + &useHeic, &useGrid, &stall)) { + if (useGrid != (format == HAL_PIXEL_FORMAT_YCBCR_420_888)) { + continue; + } + + // HEIC configuration + int32_t config[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1], + halStreamConfigs.data.i32[i+2], 0 /*isInput*/}; + outputs->insert(outputs->end(), config, config + 4); + + // HEIC minFrameDuration + for (size_t j = 0; j < halStreamDurations.count; j += 4) { + if (halStreamDurations.data.i64[j] == format && + halStreamDurations.data.i64[j+1] == halStreamConfigs.data.i32[i+1] && + halStreamDurations.data.i64[j+2] == halStreamConfigs.data.i32[i+2]) { + int64_t duration[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1], + halStreamConfigs.data.i32[i+2], halStreamDurations.data.i64[j+3]}; + durations->insert(durations->end(), duration, duration+4); + break; + } + } + + // HEIC stallDuration + int64_t stallDuration[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1], + halStreamConfigs.data.i32[i+2], stall}; + stallDurations->insert(stallDurations->end(), stallDuration, stallDuration+4); + } + } + return OK; +} + +status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags() { + auto& c = mCameraCharacteristics; + + camera_metadata_entry halHeicSupport = c.find(ANDROID_HEIC_INFO_SUPPORTED); + if (halHeicSupport.count > 1) { + ALOGE("%s: Invalid entry count %zu for ANDROID_HEIC_INFO_SUPPORTED", + __FUNCTION__, halHeicSupport.count); + return BAD_VALUE; + } else if (halHeicSupport.count == 0 || + halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_FALSE) { + // Camera HAL doesn't support mandatory stream combinations for HEIC. + return OK; + } + + camera_metadata_entry maxJpegAppsSegments = + c.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT); + if (maxJpegAppsSegments.count != 1 || maxJpegAppsSegments.data.u8[0] == 0 || + maxJpegAppsSegments.data.u8[0] > 16) { + ALOGE("%s: ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT must be within [1, 16]", + __FUNCTION__); + return BAD_VALUE; + } + + // Populate HEIC output configurations and its related min frame duration + // and stall duration. + std::vector heicOutputs; + std::vector heicDurations; + std::vector heicStallDurations; + + camera_metadata_entry halStreamConfigs = + c.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); + camera_metadata_entry minFrameDurations = + c.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS); + + status_t res = fillHeicStreamCombinations(&heicOutputs, &heicDurations, &heicStallDurations, + halStreamConfigs, minFrameDurations); + if (res != OK) { + ALOGE("%s: Failed to fill HEIC stream combinations: %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + c.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, + heicOutputs.data(), heicOutputs.size()); + c.update(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, + heicDurations.data(), heicDurations.size()); + c.update(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, + heicStallDurations.data(), heicStallDurations.size()); + + return OK; +} + bool CameraProviderManager::isLogicalCamera(const std::string& id, std::vector* physicalCameraIds) { std::lock_guard lock(mInterfaceMutex); @@ -1738,6 +1864,12 @@ CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat), stat); } + res = deriveHeicTags(); + if (OK != res) { + ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)", + __FUNCTION__, strerror(-res), res); + } + camera_metadata_entry flashAvailable = mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE); if (flashAvailable.count == 1 && diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h index 18869f53f6..3173eda1ff 100644 --- a/services/camera/libcameraservice/common/CameraProviderManager.h +++ b/services/camera/libcameraservice/common/CameraProviderManager.h @@ -494,6 +494,12 @@ private: std::vector> *internalDepthSizes /*out*/); status_t removeAvailableKeys(CameraMetadata& c, const std::vector& keys, uint32_t keyTag); + status_t fillHeicStreamCombinations(std::vector* outputs, + std::vector* durations, + std::vector* stallDurations, + const camera_metadata_entry& halStreamConfigs, + const camera_metadata_entry& halStreamDurations); + status_t deriveHeicTags(); }; private: diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 82dfc0fba9..918dcf775d 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -1757,16 +1757,18 @@ status_t Camera3Device::createStream(const std::vector>& consumers, if (format == HAL_PIXEL_FORMAT_BLOB) { ssize_t blobBufferSize; - if (dataSpace != HAL_DATASPACE_DEPTH) { - blobBufferSize = getJpegBufferSize(width, height); + if (dataSpace == HAL_DATASPACE_DEPTH) { + blobBufferSize = getPointCloudBufferSize(); if (blobBufferSize <= 0) { - SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize); + SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize); return BAD_VALUE; } + } else if (dataSpace == static_cast(HAL_DATASPACE_JPEG_APP_SEGMENTS)) { + blobBufferSize = width * height; } else { - blobBufferSize = getPointCloudBufferSize(); + blobBufferSize = getJpegBufferSize(width, height); if (blobBufferSize <= 0) { - SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize); + SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize); return BAD_VALUE; } } @@ -5473,8 +5475,22 @@ status_t Camera3Device::RequestThread::prepareHalRequests() { return TIMED_OUT; } } - outputStream->fireBufferRequestForFrameNumber( - captureRequest->mResultExtras.frameNumber); + + { + sp parent = mParent.promote(); + if (parent != nullptr) { + const String8& streamCameraId = outputStream->getPhysicalCameraId(); + for (const auto& settings : captureRequest->mSettingsList) { + if ((streamCameraId.isEmpty() && + parent->getId() == settings.cameraId.c_str()) || + streamCameraId == settings.cameraId.c_str()) { + outputStream->fireBufferRequestForFrameNumber( + captureRequest->mResultExtras.frameNumber, + settings.metadata); + } + } + } + } String8 physicalCameraId = outputStream->getPhysicalCameraId(); diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp index b296513245..d29e5c083d 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.cpp +++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp @@ -763,14 +763,15 @@ status_t Camera3Stream::getInputBufferProducer(sp *produ return getInputBufferProducerLocked(producer); } -void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber) { +void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber, + const CameraMetadata& settings) { ATRACE_CALL(); Mutex::Autolock l(mLock); for (auto &it : mBufferListenerList) { sp listener = it.promote(); if (listener.get() != nullptr) { - listener->onBufferRequestForFrameNumber(frameNumber, getId()); + listener->onBufferRequestForFrameNumber(frameNumber, getId(), settings); } } } diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h index 06deba9fa9..5eb6a23c15 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.h +++ b/services/camera/libcameraservice/device3/Camera3Stream.h @@ -434,7 +434,8 @@ class Camera3Stream : /** * Notify buffer stream listeners about incoming request with particular frame number. */ - void fireBufferRequestForFrameNumber(uint64_t frameNumber) override; + void fireBufferRequestForFrameNumber(uint64_t frameNumber, + const CameraMetadata& settings) override; protected: const int mId; diff --git a/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h index 0e6104e7ff..d0aee2710d 100644 --- a/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h +++ b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h @@ -17,6 +17,7 @@ #ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H #define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H +#include #include #include @@ -42,7 +43,8 @@ public: // Buffer was released by the HAL virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0; // Notify about incoming buffer request frame number - virtual void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) = 0; + virtual void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId, + const CameraMetadata& settings) = 0; }; }; //namespace camera3 diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h index 7b80cbd687..5cd11b7c70 100644 --- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h +++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h @@ -18,6 +18,8 @@ #define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H #include + +#include #include "Camera3StreamBufferListener.h" #include "Camera3StreamBufferFreedListener.h" @@ -346,7 +348,8 @@ class Camera3StreamInterface : public virtual RefBase { /** * Notify buffer stream listeners about incoming request with particular frame number. */ - virtual void fireBufferRequestForFrameNumber(uint64_t frameNumber) = 0; + virtual void fireBufferRequestForFrameNumber(uint64_t frameNumber, + const CameraMetadata& settings) = 0; }; } // namespace camera3 diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp new file mode 100644 index 0000000000..a4027cc4bc --- /dev/null +++ b/services/camera/libcameraservice/utils/ExifUtils.cpp @@ -0,0 +1,1046 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CameraServerExifUtils" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include + +#include +#include +#include +#include +#include + +#include "ExifUtils.h" + +extern "C" { +#include +} + +namespace std { + +template <> +struct default_delete { + inline void operator()(ExifEntry* entry) const { exif_entry_unref(entry); } +}; + +} // namespace std + + +namespace android { +namespace camera3 { + + +class ExifUtilsImpl : public ExifUtils { +public: + ExifUtilsImpl(); + + virtual ~ExifUtilsImpl(); + + // Initialize() can be called multiple times. The setting of Exif tags will be + // cleared. + virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize); + + // set all known fields from a metadata structure + virtual bool setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, + const size_t imageHeight); + + // sets the len aperture. + // Returns false if memory allocation fails. + virtual bool setAperture(uint32_t numerator, uint32_t denominator); + + // sets the value of brightness. + // Returns false if memory allocation fails. + virtual bool setBrightness(int32_t numerator, int32_t denominator); + + // sets the color space. + // Returns false if memory allocation fails. + virtual bool setColorSpace(uint16_t color_space); + + // sets the information to compressed data. + // Returns false if memory allocation fails. + virtual bool setComponentsConfiguration(const std::string& components_configuration); + + // sets the compression scheme used for the image data. + // Returns false if memory allocation fails. + virtual bool setCompression(uint16_t compression); + + // sets image contrast. + // Returns false if memory allocation fails. + virtual bool setContrast(uint16_t contrast); + + // sets the date and time of image last modified. It takes local time. The + // name of the tag is DateTime in IFD0. + // Returns false if memory allocation fails. + virtual bool setDateTime(const struct tm& t); + + // sets the image description. + // Returns false if memory allocation fails. + virtual bool setDescription(const std::string& description); + + // sets the digital zoom ratio. If the numerator is 0, it means digital zoom + // was not used. + // Returns false if memory allocation fails. + virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator); + + // sets the exposure bias. + // Returns false if memory allocation fails. + virtual bool setExposureBias(int32_t numerator, int32_t denominator); + + // sets the exposure mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setExposureMode(uint16_t exposure_mode); + + // sets the program used by the camera to set exposure when the picture is + // taken. + // Returns false if memory allocation fails. + virtual bool setExposureProgram(uint16_t exposure_program); + + // sets the exposure time, given in seconds. + // Returns false if memory allocation fails. + virtual bool setExposureTime(uint32_t numerator, uint32_t denominator); + + // sets the status of flash. + // Returns false if memory allocation fails. + virtual bool setFlash(uint16_t flash); + + // sets the F number. + // Returns false if memory allocation fails. + virtual bool setFNumber(uint32_t numerator, uint32_t denominator); + + // sets the focal length of lens used to take the image in millimeters. + // Returns false if memory allocation fails. + virtual bool setFocalLength(uint32_t numerator, uint32_t denominator); + + // sets the degree of overall image gain adjustment. + // Returns false if memory allocation fails. + virtual bool setGainControl(uint16_t gain_control); + + // sets the altitude in meters. + // Returns false if memory allocation fails. + virtual bool setGpsAltitude(double altitude); + + // sets the latitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLatitude(double latitude); + + // sets the longitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLongitude(double longitude); + + // sets GPS processing method. + // Returns false if memory allocation fails. + virtual bool setGpsProcessingMethod(const std::string& method); + + // sets GPS date stamp and time stamp (atomic clock). It takes UTC time. + // Returns false if memory allocation fails. + virtual bool setGpsTimestamp(const struct tm& t); + + // sets the length (number of rows) of main image. + // Returns false if memory allocation fails. + virtual bool setImageHeight(uint32_t length); + + // sets the width (number of columes) of main image. + // Returns false if memory allocation fails. + virtual bool setImageWidth(uint32_t width); + + // sets the ISO speed. + // Returns false if memory allocation fails. + virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings); + + // sets the kind of light source. + // Returns false if memory allocation fails. + virtual bool setLightSource(uint16_t light_source); + + // sets the smallest F number of the lens. + // Returns false if memory allocation fails. + virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator); + + // sets the metering mode. + // Returns false if memory allocation fails. + virtual bool setMeteringMode(uint16_t metering_mode); + + // sets image orientation. + // Returns false if memory allocation fails. + virtual bool setOrientation(uint16_t orientation); + + // sets the unit for measuring XResolution and YResolution. + // Returns false if memory allocation fails. + virtual bool setResolutionUnit(uint16_t resolution_unit); + + // sets image saturation. + // Returns false if memory allocation fails. + virtual bool setSaturation(uint16_t saturation); + + // sets the type of scene that was shot. + // Returns false if memory allocation fails. + virtual bool setSceneCaptureType(uint16_t type); + + // sets image sharpness. + // Returns false if memory allocation fails. + virtual bool setSharpness(uint16_t sharpness); + + // sets the shutter speed. + // Returns false if memory allocation fails. + virtual bool setShutterSpeed(int32_t numerator, int32_t denominator); + + // sets the distance to the subject, given in meters. + // Returns false if memory allocation fails. + virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator); + + // sets the fractions of seconds for the tag. + // Returns false if memory allocation fails. + virtual bool setSubsecTime(const std::string& subsec_time); + + // sets the white balance mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setWhiteBalance(uint16_t white_balance); + + // sets the number of pixels per resolution unit in the image width. + // Returns false if memory allocation fails. + virtual bool setXResolution(uint32_t numerator, uint32_t denominator); + + // sets the position of chrominance components in relation to the luminance + // component. + // Returns false if memory allocation fails. + virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning); + + // sets the number of pixels per resolution unit in the image length. + // Returns false if memory allocation fails. + virtual bool setYResolution(uint32_t numerator, uint32_t denominator); + + // sets the manufacturer of camera. + // Returns false if memory allocation fails. + virtual bool setMake(const std::string& make); + + // sets the model number of camera. + // Returns false if memory allocation fails. + virtual bool setModel(const std::string& model); + + // Generates APP1 segment. + // Returns false if generating APP1 segment fails. + virtual bool generateApp1(); + + // Gets buffer of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual const uint8_t* getApp1Buffer(); + + // Gets length of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual unsigned int getApp1Length(); + + protected: + // sets the version of this standard supported. + // Returns false if memory allocation fails. + virtual bool setExifVersion(const std::string& exif_version); + + // Resets the pointers and memories. + virtual void reset(); + + // Adds a variable length tag to |exif_data_|. It will remove the original one + // if the tag exists. + // Returns the entry of the tag. The reference count of returned ExifEntry is + // two. + virtual std::unique_ptr addVariableLengthEntry(ExifIfd ifd, + ExifTag tag, ExifFormat format, uint64_t components, unsigned int size); + + // Adds a entry of |tag| in |exif_data_|. It won't remove the original one if + // the tag exists. + // Returns the entry of the tag. It adds one reference count to returned + // ExifEntry. + virtual std::unique_ptr addEntry(ExifIfd ifd, ExifTag tag); + + // Helpe functions to add exif data with different types. + virtual bool setShort(ExifIfd ifd, ExifTag tag, uint16_t value, const std::string& msg); + + virtual bool setLong(ExifIfd ifd, ExifTag tag, uint32_t value, const std::string& msg); + + virtual bool setRational(ExifIfd ifd, ExifTag tag, uint32_t numerator, + uint32_t denominator, const std::string& msg); + + virtual bool setSRational(ExifIfd ifd, ExifTag tag, int32_t numerator, + int32_t denominator, const std::string& msg); + + virtual bool setString(ExifIfd ifd, ExifTag tag, ExifFormat format, + const std::string& buffer, const std::string& msg); + + // Destroys the buffer of APP1 segment if exists. + virtual void destroyApp1(); + + // The Exif data (APP1). Owned by this class. + ExifData* exif_data_; + // The raw data of APP1 segment. It's allocated by ExifMem in |exif_data_| but + // owned by this class. + uint8_t* app1_buffer_; + // The length of |app1_buffer_|. + unsigned int app1_length_; + +}; + +#define SET_SHORT(ifd, tag, value) \ + do { \ + if (setShort(ifd, tag, value, #tag) == false) \ + return false; \ + } while (0); + +#define SET_LONG(ifd, tag, value) \ + do { \ + if (setLong(ifd, tag, value, #tag) == false) \ + return false; \ + } while (0); + +#define SET_RATIONAL(ifd, tag, numerator, denominator) \ + do { \ + if (setRational(ifd, tag, numerator, denominator, #tag) == false) \ + return false; \ + } while (0); + +#define SET_SRATIONAL(ifd, tag, numerator, denominator) \ + do { \ + if (setSRational(ifd, tag, numerator, denominator, #tag) == false) \ + return false; \ + } while (0); + +#define SET_STRING(ifd, tag, format, buffer) \ + do { \ + if (setString(ifd, tag, format, buffer, #tag) == false) \ + return false; \ + } while (0); + +// This comes from the Exif Version 2.2 standard table 6. +const char gExifAsciiPrefix[] = {0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0}; + +static void setLatitudeOrLongitudeData(unsigned char* data, double num) { + // Take the integer part of |num|. + ExifLong degrees = static_cast(num); + ExifLong minutes = static_cast(60 * (num - degrees)); + ExifLong microseconds = + static_cast(3600000000u * (num - degrees - minutes / 60.0)); + exif_set_rational(data, EXIF_BYTE_ORDER_INTEL, {degrees, 1}); + exif_set_rational(data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, {minutes, 1}); + exif_set_rational(data + 2 * sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {microseconds, 1000000}); +} + +ExifUtils *ExifUtils::create() { + return new ExifUtilsImpl(); +} + +ExifUtils::~ExifUtils() { +} + +ExifUtilsImpl::ExifUtilsImpl() + : exif_data_(nullptr), app1_buffer_(nullptr), app1_length_(0) {} + +ExifUtilsImpl::~ExifUtilsImpl() { + reset(); +} + + +bool ExifUtilsImpl::initialize(const unsigned char *app1Segment, size_t app1SegmentSize) { + reset(); + exif_data_ = exif_data_new_from_data(app1Segment, app1SegmentSize); + if (exif_data_ == nullptr) { + ALOGE("%s: allocate memory for exif_data_ failed", __FUNCTION__); + return false; + } + // set the image options. + exif_data_set_option(exif_data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION); + exif_data_set_data_type(exif_data_, EXIF_DATA_TYPE_COMPRESSED); + exif_data_set_byte_order(exif_data_, EXIF_BYTE_ORDER_INTEL); + + // set exif version to 2.2. + if (!setExifVersion("0220")) { + return false; + } + + return true; +} + +bool ExifUtilsImpl::setAperture(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setBrightness(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_BRIGHTNESS_VALUE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setColorSpace(uint16_t color_space) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_COLOR_SPACE, color_space); + return true; +} + +bool ExifUtilsImpl::setComponentsConfiguration( + const std::string& components_configuration) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_COMPONENTS_CONFIGURATION, + EXIF_FORMAT_UNDEFINED, components_configuration); + return true; +} + +bool ExifUtilsImpl::setCompression(uint16_t compression) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression); + return true; +} + +bool ExifUtilsImpl::setContrast(uint16_t contrast) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_CONTRAST, contrast); + return true; +} + +bool ExifUtilsImpl::setDateTime(const struct tm& t) { + // The length is 20 bytes including NULL for termination in Exif standard. + char str[20]; + int result = snprintf(str, sizeof(str), "%04i:%02i:%02i %02i:%02i:%02i", + t.tm_year + 1900, t.tm_mon + 1, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec); + if (result != sizeof(str) - 1) { + ALOGW("%s: Input time is invalid", __FUNCTION__); + return false; + } + std::string buffer(str); + SET_STRING(EXIF_IFD_0, EXIF_TAG_DATE_TIME, EXIF_FORMAT_ASCII, buffer); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_ORIGINAL, EXIF_FORMAT_ASCII, buffer); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_DIGITIZED, EXIF_FORMAT_ASCII, buffer); + return true; +} + +bool ExifUtilsImpl::setDescription(const std::string& description) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_IMAGE_DESCRIPTION, EXIF_FORMAT_ASCII, description); + return true; +} + +bool ExifUtilsImpl::setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_DIGITAL_ZOOM_RATIO, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setExposureBias(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_BIAS_VALUE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setExposureMode(uint16_t exposure_mode) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_MODE, exposure_mode); + return true; +} + +bool ExifUtilsImpl::setExposureProgram(uint16_t exposure_program) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_PROGRAM, exposure_program); + return true; +} + +bool ExifUtilsImpl::setExposureTime(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setFlash(uint16_t flash) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_FLASH, flash); + return true; +} + +bool ExifUtilsImpl::setFNumber(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setFocalLength(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setGainControl(uint16_t gain_control) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_GAIN_CONTROL, gain_control); + return true; +} + +bool ExifUtilsImpl::setGpsAltitude(double altitude) { + ExifTag refTag = static_cast(EXIF_TAG_GPS_ALTITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_BYTE, 1, 1); + if (!refEntry) { + ALOGE("%s: Adding GPSAltitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (altitude >= 0) { + *refEntry->data = 0; + } else { + *refEntry->data = 1; + altitude *= -1; + } + + ExifTag tag = static_cast(EXIF_TAG_GPS_ALTITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 1, sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSAltitude exif entry failed", __FUNCTION__); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {static_cast(altitude * 1000), 1000}); + + return true; +} + +bool ExifUtilsImpl::setGpsLatitude(double latitude) { + const ExifTag refTag = static_cast(EXIF_TAG_GPS_LATITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2); + if (!refEntry) { + ALOGE("%s: Adding GPSLatitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (latitude >= 0) { + memcpy(refEntry->data, "N", sizeof("N")); + } else { + memcpy(refEntry->data, "S", sizeof("S")); + latitude *= -1; + } + + const ExifTag tag = static_cast(EXIF_TAG_GPS_LATITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSLatitude exif entry failed", __FUNCTION__); + return false; + } + setLatitudeOrLongitudeData(entry->data, latitude); + + return true; +} + +bool ExifUtilsImpl::setGpsLongitude(double longitude) { + ExifTag refTag = static_cast(EXIF_TAG_GPS_LONGITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2); + if (!refEntry) { + ALOGE("%s: Adding GPSLongitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (longitude >= 0) { + memcpy(refEntry->data, "E", sizeof("E")); + } else { + memcpy(refEntry->data, "W", sizeof("W")); + longitude *= -1; + } + + ExifTag tag = static_cast(EXIF_TAG_GPS_LONGITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSLongitude exif entry failed", __FUNCTION__); + return false; + } + setLatitudeOrLongitudeData(entry->data, longitude); + + return true; +} + +bool ExifUtilsImpl::setGpsProcessingMethod(const std::string& method) { + std::string buffer = + std::string(gExifAsciiPrefix, sizeof(gExifAsciiPrefix)) + method; + SET_STRING(EXIF_IFD_GPS, static_cast(EXIF_TAG_GPS_PROCESSING_METHOD), + EXIF_FORMAT_UNDEFINED, buffer); + return true; +} + +bool ExifUtilsImpl::setGpsTimestamp(const struct tm& t) { + const ExifTag dateTag = static_cast(EXIF_TAG_GPS_DATE_STAMP); + const size_t kGpsDateStampSize = 11; + std::unique_ptr entry = addVariableLengthEntry(EXIF_IFD_GPS, + dateTag, EXIF_FORMAT_ASCII, kGpsDateStampSize, kGpsDateStampSize); + if (!entry) { + ALOGE("%s: Adding GPSDateStamp exif entry failed", __FUNCTION__); + return false; + } + int result = snprintf(reinterpret_cast(entry->data), kGpsDateStampSize, + "%04i:%02i:%02i", t.tm_year + 1900, t.tm_mon + 1, t.tm_mday); + if (result != kGpsDateStampSize - 1) { + ALOGW("%s: Input time is invalid", __FUNCTION__); + return false; + } + + const ExifTag timeTag = static_cast(EXIF_TAG_GPS_TIME_STAMP); + entry = addVariableLengthEntry(EXIF_IFD_GPS, timeTag, EXIF_FORMAT_RATIONAL, 3, + 3 * sizeof(ExifRational)); + if (!entry) { + ALOGE("%s: Adding GPSTimeStamp exif entry failed", __FUNCTION__); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_hour), 1}); + exif_set_rational(entry->data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_min), 1}); + exif_set_rational(entry->data + 2 * sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_sec), 1}); + + return true; +} + +bool ExifUtilsImpl::setImageHeight(uint32_t length) { + SET_LONG(EXIF_IFD_0, EXIF_TAG_IMAGE_LENGTH, length); + SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_Y_DIMENSION, length); + return true; +} + +bool ExifUtilsImpl::setImageWidth(uint32_t width) { + SET_LONG(EXIF_IFD_0, EXIF_TAG_IMAGE_WIDTH, width); + SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_X_DIMENSION, width); + return true; +} + +bool ExifUtilsImpl::setIsoSpeedRating(uint16_t iso_speed_ratings) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_ISO_SPEED_RATINGS, iso_speed_ratings); + return true; +} + +bool ExifUtilsImpl::setLightSource(uint16_t light_source) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_LIGHT_SOURCE, light_source); + return true; +} + +bool ExifUtilsImpl::setMaxAperture(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_MAX_APERTURE_VALUE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setMeteringMode(uint16_t metering_mode) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_METERING_MODE, metering_mode); + return true; +} + +bool ExifUtilsImpl::setOrientation(uint16_t orientation) { + /* + * Orientation value: + * 1 2 3 4 5 6 7 8 + * + * 888888 888888 88 88 8888888888 88 88 8888888888 + * 88 88 88 88 88 88 88 88 88 88 88 88 + * 8888 8888 8888 8888 88 8888888888 8888888888 88 + * 88 88 88 88 + * 88 88 888888 888888 + */ + int value = 1; + switch (orientation) { + case 90: + value = 6; + break; + case 180: + value = 3; + break; + case 270: + value = 8; + break; + default: + break; + } + SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value); + return true; +} + +bool ExifUtilsImpl::setResolutionUnit(uint16_t resolution_unit) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_RESOLUTION_UNIT, resolution_unit); + return true; +} + +bool ExifUtilsImpl::setSaturation(uint16_t saturation) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SATURATION, saturation); + return true; +} + +bool ExifUtilsImpl::setSceneCaptureType(uint16_t type) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SCENE_CAPTURE_TYPE, type); + return true; +} + +bool ExifUtilsImpl::setSharpness(uint16_t sharpness) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SHARPNESS, sharpness); + return true; +} + +bool ExifUtilsImpl::setShutterSpeed(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SHUTTER_SPEED_VALUE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setSubjectDistance(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SUBJECT_DISTANCE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setSubsecTime(const std::string& subsec_time) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME, EXIF_FORMAT_ASCII, subsec_time); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_ORIGINAL, EXIF_FORMAT_ASCII, subsec_time); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_DIGITIZED, EXIF_FORMAT_ASCII, subsec_time); + return true; +} + +bool ExifUtilsImpl::setWhiteBalance(uint16_t white_balance) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, white_balance); + return true; +} + +bool ExifUtilsImpl::setXResolution(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_X_RESOLUTION, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setYCbCrPositioning(uint16_t ycbcr_positioning) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_YCBCR_POSITIONING, ycbcr_positioning); + return true; +} + +bool ExifUtilsImpl::setYResolution(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_Y_RESOLUTION, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::generateApp1() { + destroyApp1(); + // Save the result into |app1_buffer_|. + exif_data_save_data(exif_data_, &app1_buffer_, &app1_length_); + if (!app1_length_) { + ALOGE("%s: Allocate memory for app1_buffer_ failed", __FUNCTION__); + return false; + } + /* + * The JPEG segment size is 16 bits in spec. The size of APP1 segment should + * be smaller than 65533 because there are two bytes for segment size field. + */ + if (app1_length_ > 65533) { + destroyApp1(); + ALOGE("%s: The size of APP1 segment is too large", __FUNCTION__); + return false; + } + return true; +} + +const uint8_t* ExifUtilsImpl::getApp1Buffer() { + return app1_buffer_; +} + +unsigned int ExifUtilsImpl::getApp1Length() { + return app1_length_; +} + +bool ExifUtilsImpl::setExifVersion(const std::string& exif_version) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_EXIF_VERSION, EXIF_FORMAT_UNDEFINED, exif_version); + return true; +} + +bool ExifUtilsImpl::setMake(const std::string& make) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_MAKE, EXIF_FORMAT_ASCII, make); + return true; +} + +bool ExifUtilsImpl::setModel(const std::string& model) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_MODEL, EXIF_FORMAT_ASCII, model); + return true; +} + +void ExifUtilsImpl::reset() { + destroyApp1(); + if (exif_data_) { + /* + * Since we decided to ignore the original APP1, we are sure that there is + * no thumbnail allocated by libexif. |exif_data_->data| is actually + * allocated by JpegCompressor. sets |exif_data_->data| to nullptr to + * prevent exif_data_unref() destroy it incorrectly. + */ + exif_data_->data = nullptr; + exif_data_->size = 0; + exif_data_unref(exif_data_); + exif_data_ = nullptr; + } +} + +std::unique_ptr ExifUtilsImpl::addVariableLengthEntry(ExifIfd ifd, + ExifTag tag, ExifFormat format, uint64_t components, unsigned int size) { + // Remove old entry if exists. + exif_content_remove_entry(exif_data_->ifd[ifd], + exif_content_get_entry(exif_data_->ifd[ifd], tag)); + ExifMem* mem = exif_mem_new_default(); + if (!mem) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + return nullptr; + } + std::unique_ptr entry(exif_entry_new_mem(mem)); + if (!entry) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + exif_mem_unref(mem); + return nullptr; + } + void* tmpBuffer = exif_mem_alloc(mem, size); + if (!tmpBuffer) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + exif_mem_unref(mem); + return nullptr; + } + + entry->data = static_cast(tmpBuffer); + entry->tag = tag; + entry->format = format; + entry->components = components; + entry->size = size; + + exif_content_add_entry(exif_data_->ifd[ifd], entry.get()); + exif_mem_unref(mem); + + return entry; +} + +std::unique_ptr ExifUtilsImpl::addEntry(ExifIfd ifd, ExifTag tag) { + std::unique_ptr entry(exif_content_get_entry(exif_data_->ifd[ifd], tag)); + if (entry) { + // exif_content_get_entry() won't ref the entry, so we ref here. + exif_entry_ref(entry.get()); + return entry; + } + entry.reset(exif_entry_new()); + if (!entry) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + return nullptr; + } + entry->tag = tag; + exif_content_add_entry(exif_data_->ifd[ifd], entry.get()); + exif_entry_initialize(entry.get(), tag); + return entry; +} + +bool ExifUtilsImpl::setShort(ExifIfd ifd, ExifTag tag, uint16_t value, const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_short(entry->data, EXIF_BYTE_ORDER_INTEL, value); + return true; +} + +bool ExifUtilsImpl::setLong(ExifIfd ifd, ExifTag tag, uint32_t value, const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_long(entry->data, EXIF_BYTE_ORDER_INTEL, value); + return true; +} + +bool ExifUtilsImpl::setRational(ExifIfd ifd, ExifTag tag, uint32_t numerator, + uint32_t denominator, const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, {numerator, denominator}); + return true; +} + +bool ExifUtilsImpl::setSRational(ExifIfd ifd, ExifTag tag, int32_t numerator, + int32_t denominator, const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_srational(entry->data, EXIF_BYTE_ORDER_INTEL, {numerator, denominator}); + return true; +} + +bool ExifUtilsImpl::setString(ExifIfd ifd, ExifTag tag, ExifFormat format, + const std::string& buffer, const std::string& msg) { + size_t entry_size = buffer.length(); + // Since the exif format is undefined, NULL termination is not necessary. + if (format == EXIF_FORMAT_ASCII) { + entry_size++; + } + std::unique_ptr entry = + addVariableLengthEntry(ifd, tag, format, entry_size, entry_size); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + memcpy(entry->data, buffer.c_str(), entry_size); + return true; +} + +void ExifUtilsImpl::destroyApp1() { + /* + * Since there is no API to access ExifMem in ExifData->priv, we use free + * here, which is the default free function in libexif. See + * exif_data_save_data() for detail. + */ + free(app1_buffer_); + app1_buffer_ = nullptr; + app1_length_ = 0; +} + +bool ExifUtilsImpl::setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, const size_t imageHeight) { + // How precise the float-to-rational conversion for EXIF tags would be. + constexpr int kRationalPrecision = 10000; + if (!setImageWidth(imageWidth) || + !setImageHeight(imageHeight)) { + ALOGE("%s: setting image resolution failed.", __FUNCTION__); + return false; + } + + struct timespec tp; + struct tm time_info; + bool time_available = clock_gettime(CLOCK_REALTIME, &tp) != -1; + localtime_r(&tp.tv_sec, &time_info); + if (!setDateTime(time_info)) { + ALOGE("%s: setting data time failed.", __FUNCTION__); + return false; + } + + float focal_length; + camera_metadata_ro_entry entry = metadata.find(ANDROID_LENS_FOCAL_LENGTH); + if (entry.count) { + focal_length = entry.data.f[0]; + + if (!setFocalLength( + static_cast(focal_length * kRationalPrecision), kRationalPrecision)) { + ALOGE("%s: setting focal length failed.", __FUNCTION__); + return false; + } + } else { + ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__); + } + + if (metadata.exists(ANDROID_JPEG_GPS_COORDINATES)) { + entry = metadata.find(ANDROID_JPEG_GPS_COORDINATES); + if (entry.count < 3) { + ALOGE("%s: Gps coordinates in metadata is not complete.", __FUNCTION__); + return false; + } + if (!setGpsLatitude(entry.data.d[0])) { + ALOGE("%s: setting gps latitude failed.", __FUNCTION__); + return false; + } + if (!setGpsLongitude(entry.data.d[1])) { + ALOGE("%s: setting gps longitude failed.", __FUNCTION__); + return false; + } + if (!setGpsAltitude(entry.data.d[2])) { + ALOGE("%s: setting gps altitude failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { + entry = metadata.find(ANDROID_JPEG_GPS_PROCESSING_METHOD); + std::string method_str(reinterpret_cast(entry.data.u8)); + if (!setGpsProcessingMethod(method_str)) { + ALOGE("%s: setting gps processing method failed.", __FUNCTION__); + return false; + } + } + + if (time_available && metadata.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { + entry = metadata.find(ANDROID_JPEG_GPS_TIMESTAMP); + time_t timestamp = static_cast(entry.data.i64[0]); + if (gmtime_r(×tamp, &time_info)) { + if (!setGpsTimestamp(time_info)) { + ALOGE("%s: setting gps timestamp failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Time tranformation failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_JPEG_ORIENTATION)) { + entry = metadata.find(ANDROID_JPEG_ORIENTATION); + if (!setOrientation(entry.data.i32[0])) { + ALOGE("%s: setting orientation failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { + entry = metadata.find(ANDROID_SENSOR_EXPOSURE_TIME); + // int64_t of nanoseconds + if (!setExposureTime(entry.data.i64[0],1000000000u)) { + ALOGE("%s: setting exposure time failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_LENS_APERTURE)) { + const int kAperturePrecision = 10000; + entry = metadata.find(ANDROID_LENS_APERTURE); + if (!setFNumber(entry.data.f[0] * kAperturePrecision, kAperturePrecision)) { + ALOGE("%s: setting F number failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_FLASH_INFO_AVAILABLE)) { + entry = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_FALSE) { + const uint32_t kNoFlashFunction = 0x20; + if (!setFlash(kNoFlashFunction)) { + ALOGE("%s: setting flash failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Unsupported flash info: %d",__FUNCTION__, entry.data.u8[0]); + return false; + } + } + + if (metadata.exists(ANDROID_CONTROL_AWB_MODE)) { + entry = metadata.find(ANDROID_CONTROL_AWB_MODE); + if (entry.data.u8[0] == ANDROID_CONTROL_AWB_MODE_AUTO) { + const uint16_t kAutoWhiteBalance = 0; + if (!setWhiteBalance(kAutoWhiteBalance)) { + ALOGE("%s: setting white balance failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Unsupported awb mode: %d", __FUNCTION__, entry.data.u8[0]); + return false; + } + } + + if (time_available) { + char str[4]; + if (snprintf(str, sizeof(str), "%03ld", tp.tv_nsec / 1000000) < 0) { + ALOGE("%s: Subsec is invalid: %ld", __FUNCTION__, tp.tv_nsec); + return false; + } + if (!setSubsecTime(std::string(str))) { + ALOGE("%s: setting subsec time failed.", __FUNCTION__); + return false; + } + } + + return true; +} + +} // namespace camera3 +} // namespace android diff --git a/services/camera/libcameraservice/utils/ExifUtils.h b/services/camera/libcameraservice/utils/ExifUtils.h new file mode 100644 index 0000000000..8ccdd8fdca --- /dev/null +++ b/services/camera/libcameraservice/utils/ExifUtils.h @@ -0,0 +1,245 @@ +/* + * Copyright (C) 2019 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_EXIF_UTILS_H +#define ANDROID_SERVERS_CAMERA_EXIF_UTILS_H + +#include "CameraMetadata.h" + +namespace android { +namespace camera3 { + +// This is based on the camera HIDL shim implementation, which was in turned +// based on original ChromeOS ARC implementation of a V4L2 HAL + +// ExifUtils can override APP1 segment with tags which caller set. ExifUtils can +// also add a thumbnail in the APP1 segment if thumbnail size is specified. +// ExifUtils can be reused with different images by calling initialize(). +// +// Example of using this class : +// std::unique_ptr utils(ExifUtils::Create()); +// utils->initialize(const unsigned char* app1Segment, size_t app1SegmentSize); +// ... +// // Call ExifUtils functions to set Exif tags. +// ... +// utils->GenerateApp1(); +// unsigned int app1Length = utils->GetApp1Length(); +// uint8_t* app1Buffer = new uint8_t[app1Length]; +// memcpy(app1Buffer, utils->GetApp1Buffer(), app1Length); +class ExifUtils { + +public: + virtual ~ExifUtils(); + + static ExifUtils* create(); + + // Initialize() can be called multiple times. The setting of Exif tags will be + // cleared. + virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize) = 0; + + // Set all known fields from a metadata structure + virtual bool setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, const size_t imageHeight) = 0; + + // Sets the len aperture. + // Returns false if memory allocation fails. + virtual bool setAperture(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the value of brightness. + // Returns false if memory allocation fails. + virtual bool setBrightness(int32_t numerator, int32_t denominator) = 0; + + // Sets the color space. + // Returns false if memory allocation fails. + virtual bool setColorSpace(uint16_t color_space) = 0; + + // Sets the information to compressed data. + // Returns false if memory allocation fails. + virtual bool setComponentsConfiguration(const std::string& components_configuration) = 0; + + // Sets the compression scheme used for the image data. + // Returns false if memory allocation fails. + virtual bool setCompression(uint16_t compression) = 0; + + // Sets image contrast. + // Returns false if memory allocation fails. + virtual bool setContrast(uint16_t contrast) = 0; + + // Sets the date and time of image last modified. It takes local time. The + // name of the tag is DateTime in IFD0. + // Returns false if memory allocation fails. + virtual bool setDateTime(const struct tm& t) = 0; + + // Sets the image description. + // Returns false if memory allocation fails. + virtual bool setDescription(const std::string& description) = 0; + + // Sets the digital zoom ratio. If the numerator is 0, it means digital zoom + // was not used. + // Returns false if memory allocation fails. + virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the exposure bias. + // Returns false if memory allocation fails. + virtual bool setExposureBias(int32_t numerator, int32_t denominator) = 0; + + // Sets the exposure mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setExposureMode(uint16_t exposure_mode) = 0; + + // Sets the program used by the camera to set exposure when the picture is + // taken. + // Returns false if memory allocation fails. + virtual bool setExposureProgram(uint16_t exposure_program) = 0; + + // Sets the exposure time, given in seconds. + // Returns false if memory allocation fails. + virtual bool setExposureTime(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the status of flash. + // Returns false if memory allocation fails. + virtual bool setFlash(uint16_t flash) = 0; + + // Sets the F number. + // Returns false if memory allocation fails. + virtual bool setFNumber(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the focal length of lens used to take the image in millimeters. + // Returns false if memory allocation fails. + virtual bool setFocalLength(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the degree of overall image gain adjustment. + // Returns false if memory allocation fails. + virtual bool setGainControl(uint16_t gain_control) = 0; + + // Sets the altitude in meters. + // Returns false if memory allocation fails. + virtual bool setGpsAltitude(double altitude) = 0; + + // Sets the latitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLatitude(double latitude) = 0; + + // Sets the longitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLongitude(double longitude) = 0; + + // Sets GPS processing method. + // Returns false if memory allocation fails. + virtual bool setGpsProcessingMethod(const std::string& method) = 0; + + // Sets GPS date stamp and time stamp (atomic clock). It takes UTC time. + // Returns false if memory allocation fails. + virtual bool setGpsTimestamp(const struct tm& t) = 0; + + // Sets the height (number of rows) of main image. + // Returns false if memory allocation fails. + virtual bool setImageHeight(uint32_t length) = 0; + + // Sets the width (number of columns) of main image. + // Returns false if memory allocation fails. + virtual bool setImageWidth(uint32_t width) = 0; + + // Sets the ISO speed. + // Returns false if memory allocation fails. + virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings) = 0; + + // Sets the kind of light source. + // Returns false if memory allocation fails. + virtual bool setLightSource(uint16_t light_source) = 0; + + // Sets the smallest F number of the lens. + // Returns false if memory allocation fails. + virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the metering mode. + // Returns false if memory allocation fails. + virtual bool setMeteringMode(uint16_t metering_mode) = 0; + + // Sets image orientation. + // Returns false if memory allocation fails. + virtual bool setOrientation(uint16_t orientation) = 0; + + // Sets the unit for measuring XResolution and YResolution. + // Returns false if memory allocation fails. + virtual bool setResolutionUnit(uint16_t resolution_unit) = 0; + + // Sets image saturation. + // Returns false if memory allocation fails. + virtual bool setSaturation(uint16_t saturation) = 0; + + // Sets the type of scene that was shot. + // Returns false if memory allocation fails. + virtual bool setSceneCaptureType(uint16_t type) = 0; + + // Sets image sharpness. + // Returns false if memory allocation fails. + virtual bool setSharpness(uint16_t sharpness) = 0; + + // Sets the shutter speed. + // Returns false if memory allocation fails. + virtual bool setShutterSpeed(int32_t numerator, int32_t denominator) = 0; + + // Sets the distance to the subject, given in meters. + // Returns false if memory allocation fails. + virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the fractions of seconds for the tag. + // Returns false if memory allocation fails. + virtual bool setSubsecTime(const std::string& subsec_time) = 0; + + // Sets the white balance mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setWhiteBalance(uint16_t white_balance) = 0; + + // Sets the number of pixels per resolution unit in the image width. + // Returns false if memory allocation fails. + virtual bool setXResolution(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the position of chrominance components in relation to the luminance + // component. + // Returns false if memory allocation fails. + virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning) = 0; + + // Sets the number of pixels per resolution unit in the image length. + // Returns false if memory allocation fails. + virtual bool setYResolution(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the manufacturer of camera. + // Returns false if memory allocation fails. + virtual bool setMake(const std::string& make) = 0; + + // Sets the model number of camera. + // Returns false if memory allocation fails. + virtual bool setModel(const std::string& model) = 0; + + // Generates APP1 segment. + // Returns false if generating APP1 segment fails. + virtual bool generateApp1() = 0; + + // Gets buffer of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual const uint8_t* getApp1Buffer() = 0; + + // Gets length of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual unsigned int getApp1Length() = 0; +}; + +} // namespace camera3 +} // namespace android + +#endif // ANDROID_SERVERS_CAMERA_EXIF_UTILS_H