diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h index 629d75afae..17938775ac 100644 --- a/camera/ndk/include/camera/NdkCameraMetadataTags.h +++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h @@ -471,10 +471,6 @@ typedef enum acamera_metadata_tag { * Otherwise will always be present.
*The maximum number of regions supported by the device is determined by the value * of android.control.maxRegionsAe.
- *The data representation is int[5 * area_count]. - * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight). - * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and - * ymax.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1, @@ -495,6 +491,10 @@ typedef enum acamera_metadata_tag { * region and output only the intersection rectangle as the metering region in the result * metadata. If the region is entirely outside the crop region, it will be ignored and * not reported in the result metadata.
+ *The data representation is int[5 * area_count]
.
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight)
.
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
The maximum number of focus areas supported by the device is determined by the value * of android.control.maxRegionsAf.
- *The data representation is int[5 * area_count]. - * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight). - * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and - * ymax.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1, @@ -657,6 +653,10 @@ typedef enum acamera_metadata_tag { * region and output only the intersection rectangle as the metering region in the result * metadata. If the region is entirely outside the crop region, it will be ignored and * not reported in the result metadata.
+ *The data representation is int[5 * area_count]
.
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight)
.
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
The maximum number of regions supported by the device is determined by the value * of android.control.maxRegionsAwb.
- *The data representation is int[5 * area_count]. - * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight). - * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and - * ymax.
*The coordinate system is based on the active pixel array, * with (0,0) being the top-left pixel in the active pixel array, and * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1, @@ -815,6 +811,10 @@ typedef enum acamera_metadata_tag { * region and output only the intersection rectangle as the metering region in the result * metadata. If the region is entirely outside the crop region, it will be ignored and * not reported in the result metadata.
+ *The data representation is int[5 * area_count]
.
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight)
.
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
When set to AUTO, the individual algorithm controls in * ACAMERA_CONTROL_* are in effect, such as ACAMERA_CONTROL_AF_MODE.
*When set to USE_SCENE_MODE, the individual controls in - * ACAMERA_CONTROL_* are mostly disabled, and the camera device implements - * one of the scene mode settings (such as ACTION, SUNSET, or PARTY) - * as it wishes. The camera device scene mode 3A settings are provided by - * capture results {@link ACameraMetadata} from - * {@link ACameraCaptureSession_captureCallback_result}.
+ * ACAMERA_CONTROL_* are mostly disabled, and the camera device + * implements one of the scene mode settings (such as ACTION, + * SUNSET, or PARTY) as it wishes. The camera device scene mode + * 3A settings are provided by {@link ACameraCaptureSession_captureCallback_result capture results}. *When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference * is that this frame will not be used by camera device background 3A statistics * update, as if this frame is never captured. This mode can be used in the scenario @@ -1043,20 +1042,18 @@ typedef enum acamera_metadata_tag { *
For constant-framerate recording, for each normal
* CamcorderProfile, that is, a
* CamcorderProfile that has
- * quality
- * in the range [
- * QUALITY_LOW,
- * QUALITY_2160P],
- * if the profile is supported by the device and has
- * videoFrameRate
- * x
, this list will always include (x
,x
).
x
, this list will
+ * always include (x
,x
).
*
* Also, a camera device must either not support any
* CamcorderProfile,
* or support at least one
- * normal CamcorderProfile
- * that has
+ * normal CamcorderProfile that has
* videoFrameRate x
>= 24.
For example, when requests are submitted in the following order:
- * Request A: enableZsl is true
, ACAMERA_CONTROL_CAPTURE_INTENT is PREVIEW
- * Request B: enableZsl is true
, ACAMERA_CONTROL_CAPTURE_INTENT is STILL_CAPTURE
The output images for request B may have contents captured before the output images for * request A, and the result metadata for request B may be older than the result metadata for * request A.
- *Note that when enableZsl is true
, it is not guaranteed to get output images captured in the
- * past for requests with STILL_CAPTURE capture intent.
Note that when enableZsl is true
, it is not guaranteed to get output images captured in
+ * the past for requests with STILL_CAPTURE capture intent.
For applications targeting SDK versions O and newer, the value of enableZsl in
* TEMPLATE_STILL_CAPTURE template may be true
. The value in other templates is always
* false
if present.
When an ACAMERA_JPEG_ORIENTATION of non-zero degree is requested, * the camera device will handle thumbnail rotation in one of the following ways:
*The position of the camera device's lens optical center,
* as a three-dimensional vector (x,y,z)
, relative to the
* optical center of the largest camera device facing in the
- * same direction as this camera, in the
- * Android sensor coordinate axes.
- * Note that only the axis definitions are shared with
+ * same direction as this camera, in the Android sensor coordinate
+ * axes. Note that only the axis definitions are shared with
* the sensor coordinate system, but not the origin.
If this device is the largest or only camera device with a
* given facing, then this position will be (0, 0, 0)
; a
@@ -2662,11 +2658,12 @@ typedef enum acamera_metadata_tag {
* into the 3 stream types as below:
A list of all keys that the camera device has available - * to use with {@link ACaptureRequest}.
+ * to use with {@link ACaptureRequest }. * *Type: int32[n]
* @@ -2809,9 +2806,7 @@ typedef enum acamera_metadata_tag { ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS = // int32[n] ACAMERA_REQUEST_START + 13, /** - *A list of all keys that the camera device has available - * to query with {@link ACameraMetadata} from - * {@link ACameraCaptureSession_captureCallback_result}.
+ *A list of all keys that the camera device has available to use with {@link ACameraCaptureSession_captureCallback_result }.
* *Type: int32[n]
* @@ -2842,9 +2837,7 @@ typedef enum acamera_metadata_tag { ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS = // int32[n] ACAMERA_REQUEST_START + 14, /** - *A list of all keys that the camera device has available - * to query with {@link ACameraMetadata} from - * {@link ACameraManager_getCameraCharacteristics}.
+ *A list of all keys that the camera device has available to use with {@link ACameraManager_getCameraCharacteristics }.
* *Type: int32[n]
* @@ -2876,7 +2869,6 @@ typedef enum acamera_metadata_tag { * * *This control can be used to implement digital zoom.
- *The data representation is int[4], which maps to (left, top, width, height).
*The crop region coordinate system is based off
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0, 0)
being the
* top-left corner of the sensor active array.
The data representation is int[4], which maps to (left, top, width, height).
* * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE @@ -3061,13 +3054,14 @@ typedef enum acamera_metadata_tag { * ignored). *The following formats may always have a stall duration:
*The following formats will never have a stall duration:
*All other formats may or may not have an allowed stall duration on * a per-capability basis; refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES @@ -3177,39 +3171,29 @@ typedef enum acamera_metadata_tag { * can run concurrently to the rest of the camera pipeline, but * cannot process more than 1 capture at a time. * - *
The necessary information for the application, given the model above, - * is provided via - * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. - * These are used to determine the maximum frame rate / minimum frame - * duration that is possible for a given stream configuration.
+ *The necessary information for the application, given the model above, is provided via + * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }. + * These are used to determine the maximum frame rate / minimum frame duration that is + * possible for a given stream configuration.
*Specifically, the application can use the following rules to * determine the minimum frame duration it can request from the camera * device:
*S
.S
, by looking
- * it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
- * (with its respective size/format). Let this set of frame durations be
- * called F
.R
, the minimum frame duration allowed
- * for R
is the maximum out of all values in F
. Let the streams
- * used in R
be called S_r
.S
.S
, by looking it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
+ * (with its respective size/format). Let this set of frame durations be called F
.R
, the minimum frame duration allowed for R
is the maximum
+ * out of all values in F
. Let the streams used in R
be called S_r
.If none of the streams in S_r
have a stall time (listed in {@link
- * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
- * using its respective size/format), then the frame duration in F
- * determines the steady state frame rate that the application will get
- * if it uses R
as a repeating request. Let this special kind of
- * request be called Rsimple
.
A repeating request Rsimple
can be occasionally interleaved
- * by a single capture of a new request Rstall
(which has at least
- * one in-use stream with a non-0 stall time) and if Rstall
has the
- * same minimum frame duration this will not cause a frame rate loss
- * if all buffers from the previous Rstall
have already been
- * delivered.
For more details about stalling, see - * {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.
+ *If none of the streams in S_r
have a stall time (listed in {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }
+ * using its respective size/format), then the frame duration in F
determines the steady
+ * state frame rate that the application will get if it uses R
as a repeating request. Let
+ * this special kind of request be called Rsimple
.
A repeating request Rsimple
can be occasionally interleaved by a single capture of a
+ * new request Rstall
(which has at least one in-use stream with a non-0 stall time) and if
+ * Rstall
has the same minimum frame duration this will not cause a frame rate loss if all
+ * buffers from the previous Rstall
have already been delivered.
For more details about stalling, see {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }.
*This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to * OFF; otherwise the auto-exposure algorithm will override this value.
* @@ -3567,14 +3551,12 @@ typedef enum acamera_metadata_tag { * timestamps for other captures from the same camera device, but are * not guaranteed to be comparable to any other time source. *When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE ==
REALTIME, the
- * timestamps measure time in the same timebase as
- * elapsedRealtimeNanos
- * (or CLOCK_BOOTTIME), and they can
+ * timestamps measure time in the same timebase as SystemClock#elapsedRealtimeNanos, and they can
* be compared to other timestamps from other subsystems that
* are using that base.
For reprocessing, the timestamp will match the start of exposure of - * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the - * timestamp} in the TotalCaptureResult that was used to create the + * the input image, i.e. the + * timestamp in the TotalCaptureResult that was used to create the * reprocess capture request.
* * @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE @@ -3775,7 +3757,6 @@ typedef enum acamera_metadata_tag { * optically shielded pixel areas. By blocking light, these pixels * provides a reliable black reference for black level compensation * in active array region. - *The data representation is int[4], which maps to (left, top, width, height).
*This key provides a list of disjoint rectangles specifying the * regions of optically shielded (with metal shield) black pixel * regions if the camera device is capable of reading out these black @@ -3785,6 +3766,7 @@ typedef enum acamera_metadata_tag { * black level of each captured raw images.
*When this key is reported, the ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL and * ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL will also be reported.
+ *The data representation is int[4]
, which maps to (left, top, width, height)
.
This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is - * available or the camera device advertises this key via - * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
+ *This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is available or the + * camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
* * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT @@ -3853,7 +3834,7 @@ typedef enum acamera_metadata_tag { * estimated white level for each frame. *This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is * available or the camera device advertises this key via - * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
+ * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }. * * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL @@ -3882,13 +3863,13 @@ typedef enum acamera_metadata_tag { *This rectangle is defined relative to the full pixel array; (0,0) is the top-left of * the full pixel array, and the size of the full pixel array is given by * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
- *The data representation is int[4], which maps to (left, top, width, height).
*The coordinate system for most other keys that list pixel coordinates, including
* ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
* this field, with (0, 0)
being the top-left of this rectangle.
The active array may be smaller than the full pixel array, since the full array may * include black calibration pixels or other inactive regions, and geometric correction * resulting in scaling or cropping may have been applied.
+ *The data representation is int[4]
, which maps to (left, top, width, height)
.
Attempting to use frame durations beyond the maximum will result in the frame * duration being clipped to the maximum. See that control for a full definition of frame * durations.
- *Refer to {@link - * ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} + *
Refer to {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS } * for the minimum frame duration values.
*/ ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION = // int64 @@ -4000,9 +3980,9 @@ typedef enum acamera_metadata_tag { * the raw buffers produced by this sensor. *If a camera device supports raw sensor formats, either this or * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is the maximum dimensions for the raw - * output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on - * whether or not the image sensor returns buffers containing pixels that are not - * part of the active array region for blacklevel calibration or other purposes).
+ * output formats listed in {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS } + * (this depends on whether or not the image sensor returns buffers containing pixels that + * are not part of the active array region for blacklevel calibration or other purposes). *Some parts of the full pixel array may not receive light from the scene, * or be otherwise inactive. The ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE key * defines the rectangle of active pixels that will be included in processed image @@ -4092,7 +4072,6 @@ typedef enum acamera_metadata_tag { *
The data representation is int[4], which maps to (left, top, width, height).
*This is the rectangle representing the size of the active region of the sensor (i.e. * the region that actually receives light from the scene) before any geometric correction * has been applied, and should be treated as the active region rectangle for any of the @@ -4143,6 +4122,7 @@ typedef enum acamera_metadata_tag { * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
*The pre-correction active array may be smaller than the full pixel array, since the * full array may include black calibration pixels or other inactive regions.
+ *The data representation is int[4]
, which maps to (left, top, width, height)
.
The data representation is int[4], which maps to (left, top, width, height).
*The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
* (0, 0)
being the top-left pixel of the active array.
Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF
+ *Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF
+ * The data representation is int[4]
, which maps to (left, top, width, height)
.
See the individual level enums for full descriptions of the supported capabilities. The * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a * finer-grain level, if needed. In addition, many controls have their available settings or - * ranges defined in individual metadata tag entries in this document.
+ * ranges defined in individual entries from {@link ACameraManager_getCameraCharacteristics }. *Some features are not part of any particular hardware level or capability and must be * queried separately. These include:
*Edge enhancement is applied at different levels for different output streams, - * based on resolution. Streams at maximum recording resolution (see {@link - * ACameraDevice_createCaptureSession}) or below have - * edge enhancement applied, while higher-resolution streams have no edge enhancement - * applied. The level of edge enhancement for low-resolution streams is tuned so that - * frame rate is not impacted, and the quality is equal to or better than FAST (since it - * is only applied to lower-resolution outputs, quality may improve from FAST).
+ *Edge enhancement is applied at different + * levels for different output streams, based on resolution. Streams at maximum recording + * resolution (see {@link ACameraDevice_createCaptureSession }) + * or below have edge enhancement applied, while higher-resolution streams have no edge + * enhancement applied. The level of edge enhancement for low-resolution streams is tuned + * so that frame rate is not impacted, and the quality is equal to or better than FAST + * (since it is only applied to lower-resolution outputs, quality may improve from FAST).
*This mode is intended to be used by applications operating in a zero-shutter-lag mode * with YUV or PRIVATE reprocessing, where the application continuously captures * high-resolution intermediate buffers into a circular buffer, from which a final image is @@ -6412,13 +6392,12 @@ typedef enum acamera_metadata_enum_acamera_noise_reduction_mode { /** *
Noise reduction is applied at different levels for different output streams, - * based on resolution. Streams at maximum recording resolution (see {@link - * ACameraDevice_createCaptureSession}) or below have noise - * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no - * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction - * for low-resolution streams is tuned so that frame rate is not impacted, and the quality - * is equal to or better than FAST (since it is only applied to lower-resolution outputs, - * quality may improve from FAST).
+ * based on resolution. Streams at maximum recording resolution (see {@link ACameraDevice_createCaptureSession }) + * or below have noise reduction applied, while higher-resolution streams have MINIMAL (if + * supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of + * noise reduction for low-resolution streams is tuned so that frame rate is not impacted, + * and the quality is equal to or better than FAST (since it is only applied to + * lower-resolution outputs, quality may improve from FAST). *This mode is intended to be used by applications operating in a zero-shutter-lag mode * with YUV or PRIVATE reprocessing, where the application continuously captures * high-resolution intermediate buffers into a circular buffer, from which a final image is @@ -6635,18 +6614,16 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities { * to FAST. Additionally, maximum-resolution images can be captured at >= 10 frames * per second. Here, 'high resolution' means at least 8 megapixels, or the maximum * resolution of the device, whichever is smaller.
- *More specifically, this means that at least one output {@link - * AIMAGE_FORMAT_YUV_420_888} size listed in - * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the - * 'high resolution' defined above, and can be captured at at least 20 fps. - * For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in - * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this - * size for at least 10 frames per second. - * Also the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range - * where the minimum FPS is >= 1 / minimumFrameDuration for the largest YUV_420_888 size.
- *If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link - * AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate - * as the maximum-size YUV_420_888 resolution is.
+ *More specifically, this means that at least one output {@link AIMAGE_FORMAT_YUV_420_888 } size listed in + * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS } + * is larger or equal to the 'high resolution' defined above, and can be captured at at + * least 20 fps. For the largest {@link AIMAGE_FORMAT_YUV_420_888 } size listed in + * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS }, + * camera device can capture this size for at least 10 frames per second. Also the + * ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range where + * the minimum FPS is >= 1 / minimumFrameDuration for the largest YUV_420_888 size.
+ *If the device supports the {@link AIMAGE_FORMAT_RAW10 }, {@link AIMAGE_FORMAT_RAW12 }, then those can also be + * captured at the same rate as the maximum-size YUV_420_888 resolution is.
*In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
* and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
* are also guaranteed to be true
so burst capture with these two locks ON yields
@@ -6663,13 +6640,13 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
*
The camera device can produce depth measurements from its field of view.
*This capability requires the camera device to support the following:
*Generally, depth output operates at a slower frame rate than standard color capture, * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that - * should be accounted for (see - * {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}). + * should be accounted for (see {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS }). * On a device that supports both depth and color-based output, to enable smooth preview, * using a repeating burst is recommended, where a depth-output target is only included * once every N frames, where N is the ratio between preview output rate and depth output @@ -6918,8 +6894,8 @@ typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source { /** *
Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as - * elapsedRealtimeNanos - * (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.
+ * SystemClock#elapsedRealtimeNanos, + * and they can be compared to other timestamps using that base. * * @see ACAMERA_SENSOR_TIMESTAMP */ @@ -7104,7 +7080,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level { *This camera device does not have enough capabilities to qualify as a FULL
device or
* better.
Only the stream configurations listed in the LEGACY
and LIMITED
tables in the
- * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
All LIMITED
devices support the BACKWARDS_COMPATIBLE
capability, indicating basic
* support for color image capture. The only exception is that the device may
* alternatively support only the DEPTH_OUTPUT
capability, if it can only output depth
@@ -7130,7 +7106,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
/**
*
This camera device is capable of supporting advanced imaging applications.
*The stream configurations listed in the FULL
, LEGACY
and LIMITED
tables in the
- * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
A FULL
device will support below capabilities:
BURST_CAPTURE
capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -7157,8 +7133,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
/**
* This camera device is running in backward compatibility mode.
- *Only the stream configurations listed in the LEGACY
table in the {@link
- * ACameraDevice_createCaptureSession} documentation are supported.
Only the stream configurations listed in the LEGACY
table in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are supported.
A LEGACY
device does not support per-frame control, manual sensor control, manual
* post-processing, arbitrary cropping regions, and has relaxed performance constraints.
* No additional capabilities beyond BACKWARD_COMPATIBLE
will ever be listed by a
@@ -7179,9 +7154,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
*
This camera device is capable of YUV reprocessing and RAW data capture, in addition to * FULL-level capabilities.
*The stream configurations listed in the LEVEL_3
, RAW
, FULL
, LEGACY
and
- * LIMITED
tables in the {@link
- * ACameraDevice_createCaptureSession}
- * documentation are guaranteed to be supported.
LIMITED
tables in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.
* The following additional capabilities are guaranteed to be supported:
*YUV_REPROCESSING
capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains