Merge "Camera: Improve monochrome camera capability support"

gugelfrei
TreeHugger Robot 6 years ago committed by Android (Google) Code Review
commit ba92a60c8c

@ -3524,6 +3524,8 @@ typedef enum acamera_metadata_tag {
* <p>Some devices may choose to provide a second set of calibration
* information for improved quality, including
* ACAMERA_SENSOR_REFERENCE_ILLUMINANT2 and its corresponding matrices.</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM1
* @see ACAMERA_SENSOR_COLOR_TRANSFORM1
@ -3553,6 +3555,8 @@ typedef enum acamera_metadata_tag {
* <p>If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM2,
* ACAMERA_SENSOR_CALIBRATION_TRANSFORM2, and
* ACAMERA_SENSOR_FORWARD_MATRIX2 will also be present.</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_CALIBRATION_TRANSFORM2
* @see ACAMERA_SENSOR_COLOR_TRANSFORM2
@ -3580,6 +3584,8 @@ typedef enum acamera_metadata_tag {
* colorspace) into this camera device's native sensor color
* space under the first reference illuminant
* (ACAMERA_SENSOR_REFERENCE_ILLUMINANT1).</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
*/
@ -3607,6 +3613,8 @@ typedef enum acamera_metadata_tag {
* (ACAMERA_SENSOR_REFERENCE_ILLUMINANT2).</p>
* <p>This matrix will only be present if the second reference
* illuminant is present.</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
*/
@ -3635,6 +3643,8 @@ typedef enum acamera_metadata_tag {
* and the CIE XYZ colorspace when calculating this transform will
* match the standard white point for the first reference illuminant
* (i.e. no chromatic adaptation will be applied by this transform).</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
*/
@ -3665,6 +3675,8 @@ typedef enum acamera_metadata_tag {
* (i.e. no chromatic adaptation will be applied by this transform).</p>
* <p>This matrix will only be present if the second reference
* illuminant is present.</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
*/
@ -3691,6 +3703,8 @@ typedef enum acamera_metadata_tag {
* this matrix is chosen so that the standard white point for this reference
* illuminant in the reference sensor colorspace is mapped to D50 in the
* CIE XYZ colorspace.</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
*/
@ -3719,6 +3733,8 @@ typedef enum acamera_metadata_tag {
* CIE XYZ colorspace.</p>
* <p>This matrix will only be present if the second reference
* illuminant is present.</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*
* @see ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
*/
@ -3751,6 +3767,7 @@ typedef enum acamera_metadata_tag {
* level values. For raw capture in particular, it is recommended to use
* pixels from ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS to calculate black
* level values for each frame.</p>
* <p>For a MONOCHROME camera device, all of the 2x2 channels must have the same values.</p>
*
* @see ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL
* @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
@ -3845,6 +3862,8 @@ typedef enum acamera_metadata_tag {
* used to interpolate between the provided color transforms when
* processing raw sensor data.</p>
* <p>The order of the values is R, G, B; where R is in the lowest index.</p>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*/
ACAMERA_SENSOR_NEUTRAL_COLOR_POINT = // rational[3]
ACAMERA_SENSOR_START + 18,
@ -3875,6 +3894,8 @@ typedef enum acamera_metadata_tag {
* that channel.</p>
* <p>A more detailed description of the noise model can be found in the
* Adobe DNG specification for the NoiseProfile tag.</p>
* <p>For a MONOCHROME camera, there is only one color channel. So the noise model coefficients
* will only contain one S and one O.</p>
*
* @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
*/
@ -3920,6 +3941,8 @@ typedef enum acamera_metadata_tag {
* <li>R &gt; 1.20 will require strong software correction to produce
* a usuable image (&gt;20% divergence).</li>
* </ul>
* <p>Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.</p>
*/
ACAMERA_SENSOR_GREEN_SPLIT = // float
ACAMERA_SENSOR_START + 22,
@ -4072,6 +4095,7 @@ typedef enum acamera_metadata_tag {
* layout key (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT), i.e. the
* nth value given corresponds to the black level offset for the nth
* color channel listed in the CFA.</p>
* <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values.</p>
* <p>This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is available or the
* camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.</p>
*
@ -4174,7 +4198,8 @@ typedef enum acamera_metadata_tag {
/**
* <p>The arrangement of color filters on sensor;
* represents the colors in the top-left 2x2 section of
* the sensor, in reading order.</p>
* the sensor, in reading order, for a Bayer camera, or the
* light spectrum it captures for MONOCHROME camera.</p>
*
* <p>Type: byte (acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t)</p>
*
@ -4643,13 +4668,13 @@ typedef enum acamera_metadata_tag {
* (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
* pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
* The map is assumed to be bilinearly interpolated between the sample points.</p>
* <p>The channel order is [R, Geven, Godd, B], where Geven is the green
* channel for the even rows of a Bayer pattern, and Godd is the odd rows.
* <p>For a Bayer camera, the channel order is [R, Geven, Godd, B], where Geven is
* the green channel for the even rows of a Bayer pattern, and Godd is the odd rows.
* The shading map is stored in a fully interleaved format, and its size
* is provided in the camera static metadata by ACAMERA_LENS_INFO_SHADING_MAP_SIZE.</p>
* <p>The shading map will generally have on the order of 30-40 rows and columns,
* and will be smaller than 64x64.</p>
* <p>As an example, given a very small map defined as:</p>
* <p>As an example, given a very small map for a Bayer camera defined as:</p>
* <pre><code>ACAMERA_LENS_INFO_SHADING_MAP_SIZE = [ 4, 3 ]
* ACAMERA_STATISTICS_LENS_SHADING_MAP =
* [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
@ -4669,6 +4694,17 @@ typedef enum acamera_metadata_tag {
* image of a gray wall (using bicubic interpolation for visual quality)
* as captured by the sensor gives:</p>
* <p><img alt="Image of a uniform white wall (inverse shading map)" src="../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
* <p>For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example
* shading map for such a camera is defined as:</p>
* <pre><code>ACAMERA_LENS_INFO_SHADING_MAP_SIZE = [ 4, 3 ]
* ACAMERA_STATISTICS_LENS_SHADING_MAP =
* [ 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2,
* 1.1, 1.1, 1.1, 1.1, 1.3, 1.3, 1.3, 1.3,
* 1.2, 1.2, 1.2, 1.2, 1.1, 1.1, 1.1, 1.1,
* 1.0, 1.0, 1.0, 1.0, 1.2, 1.2, 1.2, 1.2,
* 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2,
* 1.2, 1.2, 1.2, 1.2, 1.3, 1.3, 1.3, 1.3 ]
* </code></pre>
* <p>Note that the RAW image data might be subject to lens shading
* correction not reported on this map. Query
* ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED to see if RAW image data has subject
@ -5012,8 +5048,8 @@ typedef enum acamera_metadata_tag {
* of points can be less than max (that is, the request doesn't have to
* always provide a curve with number of points equivalent to
* ACAMERA_TONEMAP_MAX_CURVE_POINTS).</p>
* <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
* are ignored.</p>
* <p>For devices with MONOCHROME capability, all three channels must have the same set of
* control points.</p>
* <p>A few examples, and their corresponding graphical mappings; these
* only specify the red channel and the precision is limited to 4
* digits, for conciseness.</p>
@ -7373,11 +7409,15 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
/**
* <p>The camera device is a monochrome camera that doesn't contain a color filter array,
* and the pixel values on U and V planes are all 128.</p>
* and for YUV_420_888 stream, the pixel values on U and V planes are all 128.</p>
* <p>A MONOCHROME camera must support the guaranteed stream combinations required for
* its device level and capabilities. Additionally, if the monochrome camera device
* supports Y8 format, all mandatory stream combination requirements related to {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888} apply
* to {@link AIMAGE_FORMAT_Y8 Y8} as well.</p>
* to {@link AIMAGE_FORMAT_Y8 Y8} as well. There are no
* mandatory stream combination requirements with regard to
* {@link AIMAGE_FORMAT_Y8 Y8} for Bayer camera devices.</p>
* <p>Starting from Android Q, the SENSOR_INFO_COLOR_FILTER_ARRANGEMENT of a MONOCHROME
* camera will be either MONO or NIR.</p>
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12,
@ -7643,6 +7683,21 @@ typedef enum acamera_metadata_enum_acamera_sensor_info_color_filter_arrangement
*/
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4,
/**
* <p>Sensor doesn't have any Bayer color filter.
* Such sensor captures visible light in monochrome. The exact weighting and
* wavelengths captured is not specified, but generally only includes the visible
* frequencies. This value implies a MONOCHROME camera.</p>
*/
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO = 5,
/**
* <p>Sensor has a near infrared filter capturing light with wavelength between
* roughly 750nm and 1400nm, and the same filter covers the whole sensor array. This
* value implies a MONOCHROME camera.</p>
*/
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR = 6,
} acamera_metadata_enum_android_sensor_info_color_filter_arrangement_t;
// ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE

@ -49,6 +49,7 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
CFA_RGGB,
CFA_BGGR,
CFA_GBRG,
CFA_NONE,
};
OpcodeListBuilder();
@ -89,7 +90,6 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
CfaLayout cfa,
const float* lensShadingMap);
/**
* Add a GainMap opcode with the given fields. The mapGains array
* must have mapPointsV * mapPointsH * mapPlanes elements.
@ -197,6 +197,33 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
status_t addOpcodePreamble(uint32_t opcodeId);
private:
/**
* Add Bayer GainMap opcode(s) for the given metadata parameters.
* CFA layout must match the layout of the shading map passed into the
* lensShadingMap parameter.
*
* Returns OK on success, or a negative error code.
*/
status_t addBayerGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
CfaLayout cfa,
const float* lensShadingMap);
/**
* Add Bayer GainMap opcode(s) for the given metadata parameters.
* CFA layout must match the layout of the shading map passed into the
* lensShadingMap parameter.
*
* Returns OK on success, or a negative error code.
*/
status_t addMonochromeGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
const float* lensShadingMap);
};
} /*namespace img_utils*/

@ -60,34 +60,36 @@ status_t OpcodeListBuilder::addGainMapsForMetadata(uint32_t lsmWidth,
uint32_t activeAreaRight,
CfaLayout cfa,
const float* lensShadingMap) {
status_t err = OK;
uint32_t activeAreaWidth = activeAreaRight - activeAreaLeft;
uint32_t activeAreaHeight = activeAreaBottom - activeAreaTop;
double spacingV = 1.0 / std::max(1u, lsmHeight - 1);
double spacingH = 1.0 / std::max(1u, lsmWidth - 1);
std::vector<float> redMapVector(lsmWidth * lsmHeight);
float *redMap = redMapVector.data();
std::vector<float> greenEvenMapVector(lsmWidth * lsmHeight);
float *greenEvenMap = greenEvenMapVector.data();
std::vector<float> greenOddMapVector(lsmWidth * lsmHeight);
float *greenOddMap = greenOddMapVector.data();
std::vector<float> blueMapVector(lsmWidth * lsmHeight);
float *blueMap = blueMapVector.data();
size_t lsmMapSize = lsmWidth * lsmHeight * 4;
// Split lens shading map channels into separate arrays
size_t j = 0;
for (size_t i = 0; i < lsmMapSize; i += 4, ++j) {
redMap[j] = lensShadingMap[i + LSM_R_IND];
greenEvenMap[j] = lensShadingMap[i + LSM_GE_IND];
greenOddMap[j] = lensShadingMap[i + LSM_GO_IND];
blueMap[j] = lensShadingMap[i + LSM_B_IND];
switch (cfa) {
case CFA_RGGB:
case CFA_GRBG:
case CFA_GBRG:
case CFA_BGGR:
err = addBayerGainMapsForMetadata(lsmWidth, lsmHeight, activeAreaWidth,
activeAreaHeight, cfa, lensShadingMap);
break;
case CFA_NONE:
err = addMonochromeGainMapsForMetadata(lsmWidth, lsmHeight, activeAreaWidth,
activeAreaHeight, lensShadingMap);
break;
default:
ALOGE("%s: Unknown CFA layout %d", __FUNCTION__, cfa);
err = BAD_VALUE;
break;
}
return err;
}
status_t OpcodeListBuilder::addBayerGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
CfaLayout cfa,
const float* lensShadingMap) {
uint32_t redTop = 0;
uint32_t redLeft = 0;
uint32_t greenEvenTop = 0;
@ -143,6 +145,32 @@ status_t OpcodeListBuilder::addGainMapsForMetadata(uint32_t lsmWidth,
return BAD_VALUE;
}
std::vector<float> redMapVector(lsmWidth * lsmHeight);
float *redMap = redMapVector.data();
std::vector<float> greenEvenMapVector(lsmWidth * lsmHeight);
float *greenEvenMap = greenEvenMapVector.data();
std::vector<float> greenOddMapVector(lsmWidth * lsmHeight);
float *greenOddMap = greenOddMapVector.data();
std::vector<float> blueMapVector(lsmWidth * lsmHeight);
float *blueMap = blueMapVector.data();
double spacingV = 1.0 / std::max(1u, lsmHeight - 1);
double spacingH = 1.0 / std::max(1u, lsmWidth - 1);
size_t lsmMapSize = lsmWidth * lsmHeight * 4;
// Split lens shading map channels into separate arrays
size_t j = 0;
for (size_t i = 0; i < lsmMapSize; i += 4, ++j) {
redMap[j] = lensShadingMap[i + LSM_R_IND];
greenEvenMap[j] = lensShadingMap[i + LSM_GE_IND];
greenOddMap[j] = lensShadingMap[i + LSM_GO_IND];
blueMap[j] = lensShadingMap[i + LSM_B_IND];
}
status_t err = addGainMap(/*top*/redTop,
/*left*/redLeft,
/*bottom*/activeAreaHeight - 1,
@ -216,6 +244,46 @@ status_t OpcodeListBuilder::addGainMapsForMetadata(uint32_t lsmWidth,
return err;
}
status_t OpcodeListBuilder::addMonochromeGainMapsForMetadata(uint32_t lsmWidth,
uint32_t lsmHeight,
uint32_t activeAreaWidth,
uint32_t activeAreaHeight,
const float* lensShadingMap) {
std::vector<float> mapVector(lsmWidth * lsmHeight);
float *map = mapVector.data();
double spacingV = 1.0 / std::max(1u, lsmHeight - 1);
double spacingH = 1.0 / std::max(1u, lsmWidth - 1);
size_t lsmMapSize = lsmWidth * lsmHeight * 4;
// Split lens shading map channels into separate arrays
size_t j = 0;
for (size_t i = 0; i < lsmMapSize; i += 4, ++j) {
map[j] = lensShadingMap[i];
}
status_t err = addGainMap(/*top*/0,
/*left*/0,
/*bottom*/activeAreaHeight - 1,
/*right*/activeAreaWidth - 1,
/*plane*/0,
/*planes*/1,
/*rowPitch*/1,
/*colPitch*/1,
/*mapPointsV*/lsmHeight,
/*mapPointsH*/lsmWidth,
/*mapSpacingV*/spacingV,
/*mapSpacingH*/spacingH,
/*mapOriginV*/0,
/*mapOriginH*/0,
/*mapPlanes*/1,
/*mapGains*/map);
if (err != OK) return err;
return err;
}
status_t OpcodeListBuilder::addGainMap(uint32_t top,
uint32_t left,
uint32_t bottom,

@ -425,6 +425,102 @@ void CameraProviderManager::ProviderInfo::DeviceInfo3::queryPhysicalCameraIds()
}
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fixupMonochromeTags() {
status_t res = OK;
auto& c = mCameraCharacteristics;
// Override static metadata for MONOCHROME camera with older device version
if (mVersion.get_major() == 3 && mVersion.get_minor() < 5) {
camera_metadata_entry cap = c.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
for (size_t i = 0; i < cap.count; i++) {
if (cap.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
// ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
uint8_t cfa = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO;
res = c.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, &cfa, 1);
if (res != OK) {
ALOGE("%s: Failed to update COLOR_FILTER_ARRANGEMENT: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
// ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS
const std::vector<uint32_t> sKeys = {
ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
ANDROID_SENSOR_COLOR_TRANSFORM1,
ANDROID_SENSOR_COLOR_TRANSFORM2,
ANDROID_SENSOR_FORWARD_MATRIX1,
ANDROID_SENSOR_FORWARD_MATRIX2,
};
res = removeAvailableKeys(c, sKeys,
ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
if (res != OK) {
ALOGE("%s: Failed to update REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
// ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS
const std::vector<uint32_t> reqKeys = {
ANDROID_COLOR_CORRECTION_MODE,
ANDROID_COLOR_CORRECTION_TRANSFORM,
ANDROID_COLOR_CORRECTION_GAINS,
};
res = removeAvailableKeys(c, reqKeys, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS);
if (res != OK) {
ALOGE("%s: Failed to update REQUEST_AVAILABLE_REQUEST_KEYS: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
// ANDROID_REQUEST_AVAILABLE_RESULT_KEYS
const std::vector<uint32_t> resKeys = {
ANDROID_SENSOR_GREEN_SPLIT,
ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
ANDROID_COLOR_CORRECTION_MODE,
ANDROID_COLOR_CORRECTION_TRANSFORM,
ANDROID_COLOR_CORRECTION_GAINS,
};
res = removeAvailableKeys(c, resKeys, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS);
if (res != OK) {
ALOGE("%s: Failed to update REQUEST_AVAILABLE_RESULT_KEYS: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
// ANDROID_SENSOR_BLACK_LEVEL_PATTERN
camera_metadata_entry blEntry = c.find(ANDROID_SENSOR_BLACK_LEVEL_PATTERN);
for (size_t j = 1; j < blEntry.count; j++) {
blEntry.data.i32[j] = blEntry.data.i32[0];
}
}
}
}
return res;
}
status_t CameraProviderManager::ProviderInfo::DeviceInfo3::removeAvailableKeys(
CameraMetadata& c, const std::vector<uint32_t>& keys, uint32_t keyTag) {
status_t res = OK;
camera_metadata_entry keysEntry = c.find(keyTag);
if (keysEntry.count == 0) {
ALOGE("%s: Failed to find tag %u: %s (%d)", __FUNCTION__, keyTag, strerror(-res), res);
return res;
}
std::vector<int32_t> vKeys;
vKeys.reserve(keysEntry.count);
for (size_t i = 0; i < keysEntry.count; i++) {
if (std::find(keys.begin(), keys.end(), keysEntry.data.i32[i]) == keys.end()) {
vKeys.push_back(keysEntry.data.i32[i]);
}
}
res = c.update(keyTag, vKeys.data(), vKeys.size());
return res;
}
bool CameraProviderManager::isLogicalCamera(const std::string& id,
std::vector<std::string>* physicalCameraIds) {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
@ -1131,6 +1227,12 @@ CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string&
__FUNCTION__, mId.c_str(), CameraProviderManager::statusToString(status), status);
return;
}
status_t res = fixupMonochromeTags();
if (OK != res) {
ALOGE("%s: Unable to fix up monochrome tags based for older HAL version: %s (%d)",
__FUNCTION__, strerror(-res), res);
return;
}
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&

@ -385,6 +385,9 @@ private:
CameraMetadata mCameraCharacteristics;
std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
void queryPhysicalCameraIds();
status_t fixupMonochromeTags();
status_t removeAvailableKeys(CameraMetadata& c, const std::vector<uint32_t>& keys,
uint32_t keyTag);
};
private:

@ -79,7 +79,8 @@ Camera3Device::Camera3Device(const String8 &id):
mNextReprocessShutterFrameNumber(0),
mListener(NULL),
mVendorTagId(CAMERA_METADATA_INVALID_VENDOR_ID),
mLastTemplateId(-1)
mLastTemplateId(-1),
mNeedFixupMonochromeTags(false)
{
ATRACE_CALL();
ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
@ -188,6 +189,28 @@ status_t Camera3Device::initialize(sp<CameraProviderManager> manager, const Stri
mTagMonitor.parseTagsToMonitor(String8(monitorTags));
}
// Metadata tags needs fixup for monochrome camera device version less
// than 3.5.
hardware::hidl_version maxVersion{0,0};
res = manager->getHighestSupportedVersion(mId.string(), &maxVersion);
if (res != OK) {
ALOGE("%s: Error in getting camera device version id: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
int deviceVersion = HARDWARE_DEVICE_API_VERSION(
maxVersion.get_major(), maxVersion.get_minor());
bool isMonochrome = false;
camera_metadata_entry_t entry = mDeviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
for (size_t i = 0; i < entry.count; i++) {
uint8_t capability = entry.data.u8[i];
if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) {
isMonochrome = true;
}
}
mNeedFixupMonochromeTags = (isMonochrome && deviceVersion < CAMERA_DEVICE_API_VERSION_3_5);
return initializeCommonLocked();
}
@ -3323,6 +3346,13 @@ void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialRe
captureResult.mResultExtras = resultExtras;
captureResult.mMetadata = partialResult;
// Fix up result metadata for monochrome camera.
status_t res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
if (res != OK) {
SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
return;
}
insertResultLocked(&captureResult, frameNumber);
}
@ -3394,6 +3424,21 @@ void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
frameNumber, strerror(res), res);
return;
}
// Fix up result metadata for monochrome camera.
res = fixupMonochromeTags(mDeviceInfo, captureResult.mMetadata);
if (res != OK) {
SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
return;
}
for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
String8 cameraId8(physicalMetadata.mPhysicalCameraId);
res = fixupMonochromeTags(mPhysicalDeviceInfoMap.at(cameraId8.c_str()),
physicalMetadata.mPhysicalCameraMetadata);
if (res != OK) {
SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
return;
}
}
mTagMonitor.monitorMetadata(TagMonitor::RESULT,
frameNumber, timestamp.data.i64[0], captureResult.mMetadata);
@ -3567,7 +3612,7 @@ void Camera3Device::processCaptureResult(const camera3_capture_result *result) {
if (shutterTimestamp == 0) {
request.pendingMetadata = result->result;
request.collectedPartialResult = collectedPartialResult;
} else if (request.hasCallback) {
} else if (request.hasCallback) {
CameraMetadata metadata;
metadata = result->result;
sendCaptureResult(metadata, request.resultExtras,
@ -6369,4 +6414,75 @@ bool Camera3Device::RequestBufferStateMachine::checkSwitchToStopLocked() {
return false;
}
status_t Camera3Device::fixupMonochromeTags(const CameraMetadata& deviceInfo,
CameraMetadata& resultMetadata) {
status_t res = OK;
if (!mNeedFixupMonochromeTags) {
return res;
}
// Remove tags that are not applicable to monochrome camera.
int32_t tagsToRemove[] = {
ANDROID_SENSOR_GREEN_SPLIT,
ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
ANDROID_COLOR_CORRECTION_MODE,
ANDROID_COLOR_CORRECTION_TRANSFORM,
ANDROID_COLOR_CORRECTION_GAINS,
};
for (auto tag : tagsToRemove) {
res = resultMetadata.erase(tag);
if (res != OK) {
ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
return res;
}
}
// ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
for (size_t i = 1; i < blEntry.count; i++) {
blEntry.data.f[i] = blEntry.data.f[0];
}
// ANDROID_SENSOR_NOISE_PROFILE
camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
if (npEntry.count > 0 && npEntry.count % 2 == 0) {
double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
if (res != OK) {
ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
__FUNCTION__, strerror(-res), res);
return res;
}
}
// ANDROID_STATISTICS_LENS_SHADING_MAP
camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
if (lsSizeEntry.count == 2 && lsEntry.count > 0
&& (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
}
}
// ANDROID_TONEMAP_CURVE_BLUE
// ANDROID_TONEMAP_CURVE_GREEN
// ANDROID_TONEMAP_CURVE_RED
camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
if (tcbEntry.count > 0
&& tcbEntry.count == tcgEntry.count
&& tcbEntry.count == tcrEntry.count) {
for (size_t i = 0; i < tcbEntry.count; i++) {
tcbEntry.data.f[i] = tcrEntry.data.f[i];
tcgEntry.data.f[i] = tcrEntry.data.f[i];
}
}
return res;
}
}; // namespace android

@ -1334,6 +1334,9 @@ class Camera3Device :
int mRequestBufferStatusId;
} mRequestBufferSM;
// Fix up result metadata for monochrome camera.
bool mNeedFixupMonochromeTags;
status_t fixupMonochromeTags(const CameraMetadata& deviceInfo, CameraMetadata& resultMetadata);
}; // class Camera3Device
}; // namespace android

Loading…
Cancel
Save