Project import
diff --git a/media/CleanSpec.mk b/media/CleanSpec.mk
new file mode 100644
index 0000000..1096f5d
--- /dev/null
+++ b/media/CleanSpec.mk
@@ -0,0 +1,59 @@
+# Copyright (C) 2007 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# If you don't need to do a full clean build but would like to touch
+# a file or delete some intermediate files, add a clean step to the end
+# of the list.  These steps will only be run once, if they haven't been
+# run before.
+#
+# E.g.:
+#     $(call add-clean-step, touch -c external/sqlite/sqlite3.h)
+#     $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates)
+#
+# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with
+# files that are missing or have been moved.
+#
+# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory.
+# Use $(OUT_DIR) to refer to the "out" directory.
+#
+# If you need to re-do something that's already mentioned, just copy
+# the command and add it to the bottom of the list.  E.g., if a change
+# that you made last week required touching a file and a change you
+# made today requires touching the same file, just copy the old
+# touch step and add it to the end of the list.
+#
+# ************************************************
+# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
+# ************************************************
+
+# For example:
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates)
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
+#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
+#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/lib/libOpenMAXAL.so)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/lib/libOpenSLES.so)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/filterfw_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/filterpack_imageproc_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/filterpack_text_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/filterpack_ui_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/filterpack_videosrc_intermediates)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/SHARED_LIBRARIES/libaudioutils_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/filterfw_intermediates)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/framework/filterfw.jar)
+
+# ************************************************
+# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
+# ************************************************
diff --git a/media/MODULE_LICENSE_APACHE2 b/media/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/MODULE_LICENSE_APACHE2
diff --git a/media/NOTICE b/media/NOTICE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/media/NOTICE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/media/alsa_utils/Android.mk b/media/alsa_utils/Android.mk
new file mode 100644
index 0000000..4b84a93
--- /dev/null
+++ b/media/alsa_utils/Android.mk
@@ -0,0 +1,33 @@
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libalsautils
+LOCAL_SRC_FILES := \
+	alsa_device_profile.c \
+	alsa_device_proxy.c \
+	alsa_logging.c \
+	alsa_format.c
+LOCAL_C_INCLUDES += \
+	external/tinyalsa/include
+LOCAL_EXPORT_C_INCLUDE_DIRS := system/media/alsa_utils/include
+LOCAL_SHARED_LIBRARIES := liblog libcutils libtinyalsa libaudioutils
+LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS := -Wno-unused-parameter
+
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/media/alsa_utils/alsa_device_profile.c b/media/alsa_utils/alsa_device_profile.c
new file mode 100644
index 0000000..3ee0f4b
--- /dev/null
+++ b/media/alsa_utils/alsa_device_profile.c
@@ -0,0 +1,554 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "alsa_device_profile"
+/*#define LOG_NDEBUG 0*/
+/*#define LOG_PCM_PARAMS 0*/
+
+#include <errno.h>
+#include <inttypes.h>
+#include <stdint.h>
+#include <stdlib.h>
+#include <cutils/properties.h>
+
+#include <log/log.h>
+
+#include "include/alsa_device_profile.h"
+#include "include/alsa_format.h"
+#include "include/alsa_logging.h"
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
+
+#define PERIOD_SIZE_US (5 * 1000)
+
+#define DEFAULT_PERIOD_SIZE 1024
+
+static const char * const format_string_map[] = {
+    "AUDIO_FORMAT_PCM_16_BIT",      /* "PCM_FORMAT_S16_LE", */
+    "AUDIO_FORMAT_PCM_32_BIT",      /* "PCM_FORMAT_S32_LE", */
+    "AUDIO_FORMAT_PCM_8_BIT",       /* "PCM_FORMAT_S8", */
+    "AUDIO_FORMAT_PCM_8_24_BIT",    /* "PCM_FORMAT_S24_LE", */
+    "AUDIO_FORMAT_PCM_24_BIT_PACKED"/* "PCM_FORMAT_S24_3LE" */
+};
+
+extern int8_t const pcm_format_value_map[50];
+
+/* Sort these in terms of preference (best first).
+   192 kHz is not first because it requires significant resources for possibly worse
+   quality and driver instability (depends on device).
+   The order here determines the default sample rate for the device.
+   AudioPolicyManager may not respect this ordering when picking sample rates.
+   Update MAX_PROFILE_SAMPLE_RATES after changing the array size.
+
+   TODO: remove 32000, 22050, 12000, 11025?  Each sample rate check
+   requires opening the device which may cause pops. */
+static const unsigned std_sample_rates[] =
+    {96000, 88200, 192000, 176400, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000};
+
+static void profile_reset(alsa_device_profile* profile)
+{
+    profile->card = profile->device = -1;
+
+    /* terminate the attribute arrays with invalid values */
+    profile->formats[0] = PCM_FORMAT_INVALID;
+    profile->sample_rates[0] = 0;
+    profile->channel_counts[0] = 0;
+
+    profile->min_period_size = profile->max_period_size = 0;
+    profile->min_channel_count = profile->max_channel_count = DEFAULT_CHANNEL_COUNT;
+
+    profile->is_valid = false;
+}
+
+void profile_init(alsa_device_profile* profile, int direction)
+{
+    profile->direction = direction;
+    profile_reset(profile);
+}
+
+bool profile_is_initialized(alsa_device_profile* profile)
+{
+    return profile->card >= 0 && profile->device >= 0;
+}
+
+bool profile_is_valid(alsa_device_profile* profile) {
+    return profile->is_valid;
+}
+
+bool profile_is_cached_for(alsa_device_profile* profile, int card, int device) {
+    return card == profile->card && device == profile->device;
+}
+
+void profile_decache(alsa_device_profile* profile) {
+    profile_reset(profile);
+}
+
+/*
+ * Returns the supplied value rounded up to the next even multiple of 16
+ */
+static unsigned int round_to_16_mult(unsigned int size)
+{
+    return (size + 15) & ~15;   /* 0xFFFFFFF0; */
+}
+
+/*
+ * Returns the system defined minimum period size based on the supplied sample rate.
+ */
+unsigned profile_calc_min_period_size(alsa_device_profile* profile, unsigned sample_rate)
+{
+    ALOGV("profile_calc_min_period_size(%p, rate:%d)", profile, sample_rate);
+    if (profile == NULL) {
+        return DEFAULT_PERIOD_SIZE;
+    } else {
+        unsigned period_us = property_get_int32("ro.audio.usb.period_us", PERIOD_SIZE_US);
+        unsigned num_sample_frames = ((uint64_t)sample_rate * period_us) / 1000000;
+
+        if (num_sample_frames < profile->min_period_size) {
+            num_sample_frames = profile->min_period_size;
+        }
+        return round_to_16_mult(num_sample_frames);
+    }
+}
+
+unsigned int profile_get_period_size(alsa_device_profile* profile, unsigned sample_rate)
+{
+    unsigned int period_size = profile_calc_min_period_size(profile, sample_rate);
+    ALOGV("profile_get_period_size(rate:%d) = %d", sample_rate, period_size);
+    return period_size;
+}
+
+/*
+ * Sample Rate
+ */
+unsigned profile_get_default_sample_rate(alsa_device_profile* profile)
+{
+    /*
+     * TODO this won't be right in general. we should store a preferred rate as we are scanning.
+     * But right now it will return the highest rate, which may be correct.
+     */
+    return profile_is_valid(profile) ? profile->sample_rates[0] : DEFAULT_SAMPLE_RATE;
+}
+
+bool profile_is_sample_rate_valid(alsa_device_profile* profile, unsigned rate)
+{
+    if (profile_is_valid(profile)) {
+        size_t index;
+        for (index = 0; profile->sample_rates[index] != 0; index++) {
+            if (profile->sample_rates[index] == rate) {
+                return true;
+            }
+        }
+
+        return false;
+    } else {
+        return rate == DEFAULT_SAMPLE_RATE;
+    }
+}
+
+/*
+ * Format
+ */
+enum pcm_format profile_get_default_format(alsa_device_profile* profile)
+{
+    /*
+     * TODO this won't be right in general. we should store a preferred format as we are scanning.
+     */
+    return profile_is_valid(profile) ? profile->formats[0] : DEFAULT_SAMPLE_FORMAT;
+}
+
+bool profile_is_format_valid(alsa_device_profile* profile, enum pcm_format fmt) {
+    if (profile_is_valid(profile)) {
+        size_t index;
+        for (index = 0; profile->formats[index] != PCM_FORMAT_INVALID; index++) {
+            if (profile->formats[index] == fmt) {
+                return true;
+            }
+        }
+
+        return false;
+    } else {
+        return fmt == DEFAULT_SAMPLE_FORMAT;
+    }
+}
+
+/*
+ * Channels
+ */
+unsigned profile_get_default_channel_count(alsa_device_profile* profile)
+{
+    return profile_is_valid(profile) ? profile->channel_counts[0] : DEFAULT_CHANNEL_COUNT;
+}
+
+bool profile_is_channel_count_valid(alsa_device_profile* profile, unsigned count)
+{
+    if (profile_is_initialized(profile)) {
+        return count >= profile->min_channel_count && count <= profile->max_channel_count;
+    } else {
+        return count == DEFAULT_CHANNEL_COUNT;
+    }
+}
+
+static bool profile_test_sample_rate(alsa_device_profile* profile, unsigned rate)
+{
+    struct pcm_config config = profile->default_config;
+    config.rate = rate;
+
+    bool works = false; /* let's be pessimistic */
+    struct pcm * pcm = pcm_open(profile->card, profile->device,
+                                profile->direction, &config);
+
+    if (pcm != NULL) {
+        works = pcm_is_ready(pcm);
+        pcm_close(pcm);
+    }
+
+    return works;
+}
+
+static unsigned profile_enum_sample_rates(alsa_device_profile* profile, unsigned min, unsigned max)
+{
+    unsigned num_entries = 0;
+    unsigned index;
+
+    for (index = 0; index < ARRAY_SIZE(std_sample_rates) &&
+                    num_entries < ARRAY_SIZE(profile->sample_rates) - 1;
+         index++) {
+        if (std_sample_rates[index] >= min && std_sample_rates[index] <= max
+                && profile_test_sample_rate(profile, std_sample_rates[index])) {
+            profile->sample_rates[num_entries++] = std_sample_rates[index];
+        }
+    }
+    profile->sample_rates[num_entries] = 0; /* terminate */
+    return num_entries; /* return # of supported rates */
+}
+
+static unsigned profile_enum_sample_formats(alsa_device_profile* profile, struct pcm_mask * mask)
+{
+    const int num_slots = ARRAY_SIZE(mask->bits);
+    const int bits_per_slot = sizeof(mask->bits[0]) * 8;
+
+    const int table_size = ARRAY_SIZE(pcm_format_value_map);
+
+    int slot_index, bit_index, table_index;
+    table_index = 0;
+    int num_written = 0;
+    for (slot_index = 0; slot_index < num_slots && table_index < table_size;
+            slot_index++) {
+        unsigned bit_mask = 1;
+        for (bit_index = 0;
+                bit_index < bits_per_slot && table_index < table_size;
+                bit_index++) {
+            if ((mask->bits[slot_index] & bit_mask) != 0) {
+                enum pcm_format format = pcm_format_value_map[table_index];
+                /* Never return invalid (unrecognized) or 8-bit */
+                if (format != PCM_FORMAT_INVALID && format != PCM_FORMAT_S8) {
+                    profile->formats[num_written++] = format;
+                    if (num_written == ARRAY_SIZE(profile->formats) - 1) {
+                        /* leave at least one PCM_FORMAT_INVALID at the end */
+                        goto end;
+                    }
+                }
+            }
+            bit_mask <<= 1;
+            table_index++;
+        }
+    }
+end:
+    profile->formats[num_written] = PCM_FORMAT_INVALID;
+    return num_written;
+}
+
+static unsigned profile_enum_channel_counts(alsa_device_profile* profile, unsigned min,
+        unsigned max)
+{
+    /* modify alsa_device_profile.h if you change the std_channel_counts[] array. */
+    static const unsigned std_channel_counts[] = {8, 7, 6, 5, 4, 3, 2, 1};
+
+    unsigned num_counts = 0;
+    unsigned index;
+    /* TODO write a profile_test_channel_count() */
+    /* Ensure there is at least one invalid channel count to terminate the channel counts array */
+    for (index = 0; index < ARRAY_SIZE(std_channel_counts) &&
+                    num_counts < ARRAY_SIZE(profile->channel_counts) - 1;
+         index++) {
+        /* TODO Do we want a channel counts test? */
+        if (std_channel_counts[index] >= min && std_channel_counts[index] <= max /* &&
+            profile_test_channel_count(profile, channel_counts[index])*/) {
+            profile->channel_counts[num_counts++] = std_channel_counts[index];
+        }
+    }
+    // if we have no match with the standard counts, we use the largest (preferred) std count.
+    if (num_counts == 0) {
+        ALOGW("usb device does not match std channel counts, setting to %d",
+                std_channel_counts[0]);
+        profile->channel_counts[num_counts++] = std_channel_counts[0];
+    }
+    profile->channel_counts[num_counts] = 0;
+    return num_counts; /* return # of supported counts */
+}
+
+/*
+ * Reads and decodes configuration info from the specified ALSA card/device.
+ */
+static int read_alsa_device_config(alsa_device_profile * profile, struct pcm_config * config)
+{
+    ALOGV("usb:audio_hw - read_alsa_device_config(c:%d d:%d t:0x%X)",
+          profile->card, profile->device, profile->direction);
+
+    if (profile->card < 0 || profile->device < 0) {
+        return -EINVAL;
+    }
+
+    struct pcm_params * alsa_hw_params =
+        pcm_params_get(profile->card, profile->device, profile->direction);
+    if (alsa_hw_params == NULL) {
+        return -EINVAL;
+    }
+
+    profile->min_period_size = pcm_params_get_min(alsa_hw_params, PCM_PARAM_PERIOD_SIZE);
+    profile->max_period_size = pcm_params_get_max(alsa_hw_params, PCM_PARAM_PERIOD_SIZE);
+
+    profile->min_channel_count = pcm_params_get_min(alsa_hw_params, PCM_PARAM_CHANNELS);
+    profile->max_channel_count = pcm_params_get_max(alsa_hw_params, PCM_PARAM_CHANNELS);
+
+    int ret = 0;
+
+    /*
+     * This Logging will be useful when testing new USB devices.
+     */
+#ifdef LOG_PCM_PARAMS
+    log_pcm_params(alsa_hw_params);
+#endif
+
+    config->channels = pcm_params_get_min(alsa_hw_params, PCM_PARAM_CHANNELS);
+    config->rate = pcm_params_get_min(alsa_hw_params, PCM_PARAM_RATE);
+    config->period_size = profile_calc_min_period_size(profile, config->rate);
+    config->period_count = pcm_params_get_min(alsa_hw_params, PCM_PARAM_PERIODS);
+    config->format = get_pcm_format_for_mask(pcm_params_get_mask(alsa_hw_params, PCM_PARAM_FORMAT));
+#ifdef LOG_PCM_PARAMS
+    log_pcm_config(config, "read_alsa_device_config");
+#endif
+    if (config->format == PCM_FORMAT_INVALID) {
+        ret = -EINVAL;
+    }
+
+    pcm_params_free(alsa_hw_params);
+
+    return ret;
+}
+
+bool profile_read_device_info(alsa_device_profile* profile)
+{
+    if (!profile_is_initialized(profile)) {
+        return false;
+    }
+
+    /* let's get some defaults */
+    read_alsa_device_config(profile, &profile->default_config);
+    ALOGV("default_config chans:%d rate:%d format:%d count:%d size:%d",
+          profile->default_config.channels, profile->default_config.rate,
+          profile->default_config.format, profile->default_config.period_count,
+          profile->default_config.period_size);
+
+    struct pcm_params * alsa_hw_params = pcm_params_get(profile->card,
+                                                        profile->device,
+                                                        profile->direction);
+    if (alsa_hw_params == NULL) {
+        return false;
+    }
+
+    /* Formats */
+    struct pcm_mask * format_mask = pcm_params_get_mask(alsa_hw_params, PCM_PARAM_FORMAT);
+    profile_enum_sample_formats(profile, format_mask);
+
+    /* Channels */
+    profile_enum_channel_counts(
+            profile, pcm_params_get_min(alsa_hw_params, PCM_PARAM_CHANNELS),
+            pcm_params_get_max(alsa_hw_params, PCM_PARAM_CHANNELS));
+
+    /* Sample Rates */
+    profile_enum_sample_rates(
+            profile, pcm_params_get_min(alsa_hw_params, PCM_PARAM_RATE),
+            pcm_params_get_max(alsa_hw_params, PCM_PARAM_RATE));
+
+    profile->is_valid = true;
+
+    return true;
+}
+
+char * profile_get_sample_rate_strs(alsa_device_profile* profile)
+{
+    /* if we assume that rate strings are about 5 characters (48000 is 5), plus ~1 for a
+     * delimiter "|" this buffer has room for about 22 rate strings which seems like
+     * way too much, but it's a stack variable so only temporary.
+     */
+    char buffer[128];
+    buffer[0] = '\0';
+    size_t buffSize = ARRAY_SIZE(buffer);
+    size_t curStrLen = 0;
+
+    char numBuffer[32];
+
+    size_t numEntries = 0;
+    size_t index;
+    for (index = 0; profile->sample_rates[index] != 0; index++) {
+        snprintf(numBuffer, sizeof(numBuffer), "%u", profile->sample_rates[index]);
+        // account for both the null, and potentially the bar.
+        if (buffSize - curStrLen < strlen(numBuffer) + (numEntries != 0 ? 2 : 1)) {
+            /* we don't have room for another, so bail at this point rather than
+             * return a malformed rate string
+             */
+            break;
+        }
+        if (numEntries++ != 0) {
+            strlcat(buffer, "|", buffSize);
+        }
+        curStrLen = strlcat(buffer, numBuffer, buffSize);
+    }
+
+    return strdup(buffer);
+}
+
+char * profile_get_format_strs(alsa_device_profile* profile)
+{
+    /* if we assume that format strings are about 24 characters (AUDIO_FORMAT_PCM_16_BIT is 23),
+     * plus ~1 for a delimiter "|" this buffer has room for about 10 format strings which seems
+     *  like way too much, but it's a stack variable so only temporary.
+     */
+    char buffer[256];
+    buffer[0] = '\0';
+    size_t buffSize = ARRAY_SIZE(buffer);
+    size_t curStrLen = 0;
+
+    size_t numEntries = 0;
+    size_t index = 0;
+    for (index = 0; profile->formats[index] != PCM_FORMAT_INVALID; index++) {
+        // account for both the null, and potentially the bar.
+        if (buffSize - curStrLen < strlen(format_string_map[profile->formats[index]])
+                                   + (numEntries != 0 ? 2 : 1)) {
+            /* we don't have room for another, so bail at this point rather than
+             * return a malformed rate string
+             */
+            break;
+        }
+        if (numEntries++ != 0) {
+            strlcat(buffer, "|", buffSize);
+        }
+        curStrLen = strlcat(buffer, format_string_map[profile->formats[index]], buffSize);
+    }
+
+    return strdup(buffer);
+}
+
+char * profile_get_channel_count_strs(alsa_device_profile* profile)
+{
+    // FIXME implicit fixed channel count assumption here (FCC_8).
+    // we use only the canonical even number channel position masks.
+    static const char * const out_chans_strs[] = {
+        /* 0 */"AUDIO_CHANNEL_NONE", /* will never be taken as this is a terminator */
+        /* 1 */"AUDIO_CHANNEL_OUT_MONO",
+        /* 2 */"AUDIO_CHANNEL_OUT_STEREO",
+        /* 3 */ /* "AUDIO_CHANNEL_OUT_STEREO|AUDIO_CHANNEL_OUT_FRONT_CENTER" */ NULL,
+        /* 4 */"AUDIO_CHANNEL_OUT_QUAD",
+        /* 5 */ /* "AUDIO_CHANNEL_OUT_QUAD|AUDIO_CHANNEL_OUT_FRONT_CENTER" */ NULL,
+        /* 6 */"AUDIO_CHANNEL_OUT_5POINT1",
+        /* 7 */ /* "AUDIO_CHANNEL_OUT_5POINT1|AUDIO_CHANNEL_OUT_BACK_CENTER" */ NULL,
+        /* 8 */"AUDIO_CHANNEL_OUT_7POINT1",
+        /* channel counts greater than this not considered */
+    };
+
+    static const char * const in_chans_strs[] = {
+        /* 0 */"AUDIO_CHANNEL_NONE", /* will never be taken as this is a terminator */
+        /* 1 */"AUDIO_CHANNEL_IN_MONO",
+        /* 2 */"AUDIO_CHANNEL_IN_STEREO",
+        /* channel counts greater than this not considered */
+    };
+
+    static const char * const index_chans_strs[] = {
+        /* 0 */"AUDIO_CHANNEL_NONE", /* will never be taken as this is a terminator */
+        /* 1 */"AUDIO_CHANNEL_INDEX_MASK_1",
+        /* 2 */"AUDIO_CHANNEL_INDEX_MASK_2",
+        /* 3 */"AUDIO_CHANNEL_INDEX_MASK_3",
+        /* 4 */"AUDIO_CHANNEL_INDEX_MASK_4",
+        /* 5 */"AUDIO_CHANNEL_INDEX_MASK_5",
+        /* 6 */"AUDIO_CHANNEL_INDEX_MASK_6",
+        /* 7 */"AUDIO_CHANNEL_INDEX_MASK_7",
+        /* 8 */"AUDIO_CHANNEL_INDEX_MASK_8",
+    };
+
+    const bool isOutProfile = profile->direction == PCM_OUT;
+
+    const char * const * const chans_strs = isOutProfile ? out_chans_strs : in_chans_strs;
+    const size_t chans_strs_size =
+            isOutProfile ? ARRAY_SIZE(out_chans_strs) : ARRAY_SIZE(in_chans_strs);
+
+    /*
+     * If we assume each channel string is 26 chars ("AUDIO_CHANNEL_INDEX_MASK_8" is 26) + 1 for,
+     * the "|" delimiter, then we allocate room for 16 strings.
+     */
+    char buffer[27 * 16 + 1]; /* caution, may need to be expanded */
+    buffer[0] = '\0';
+    size_t buffSize = ARRAY_SIZE(buffer);
+    size_t curStrLen = 0;
+
+    /* We currently support MONO and STEREO, and always report STEREO but some (many)
+     * USB Audio Devices may only announce support for MONO (a headset mic for example), or
+     * The total number of output channels. SO, if the device itself doesn't explicitly
+     * support STEREO, append to the channel config strings we are generating.
+     *
+     * The MONO and STEREO positional channel masks are provided for legacy compatibility.
+     * For multichannel (n > 2) we only expose channel index masks.
+     */
+    // Always support stereo
+    curStrLen = strlcat(buffer, chans_strs[2], buffSize);
+
+    size_t index;
+    unsigned channel_count;
+    for (index = 0;
+         (channel_count = profile->channel_counts[index]) != 0;
+         index++) {
+
+        /* we only show positional information for mono (stereo handled already) */
+        if (channel_count < chans_strs_size
+                && chans_strs[channel_count] != NULL
+                && channel_count < 2 /* positional only for fewer than 2 channels */) {
+            // account for the '|' and the '\0'
+            if (buffSize - curStrLen < strlen(chans_strs[channel_count]) + 2) {
+                /* we don't have room for another, so bail at this point rather than
+                 * return a malformed rate string
+                 */
+                break;
+            }
+
+            strlcat(buffer, "|", buffSize);
+            curStrLen = strlcat(buffer, chans_strs[channel_count], buffSize);
+        }
+
+        // handle channel index masks for both input and output
+        // +2 to account for the '|' and the '\0'
+         if (buffSize - curStrLen < strlen(index_chans_strs[channel_count]) + 2) {
+             /* we don't have room for another, so bail at this point rather than
+              * return a malformed rate string
+              */
+             break;
+         }
+
+         strlcat(buffer, "|", buffSize);
+         curStrLen = strlcat(buffer, index_chans_strs[channel_count], buffSize);
+    }
+
+    return strdup(buffer);
+}
diff --git a/media/alsa_utils/alsa_device_proxy.c b/media/alsa_utils/alsa_device_proxy.c
new file mode 100644
index 0000000..ac948f1
--- /dev/null
+++ b/media/alsa_utils/alsa_device_proxy.c
@@ -0,0 +1,205 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "alsa_device_proxy"
+/*#define LOG_NDEBUG 0*/
+/*#define LOG_PCM_PARAMS 0*/
+
+#include <log/log.h>
+
+#include <errno.h>
+
+#include "include/alsa_device_proxy.h"
+
+#include "include/alsa_logging.h"
+
+#define DEFAULT_PERIOD_SIZE     1024
+#define DEFAULT_PERIOD_COUNT    2
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
+
+static const unsigned format_byte_size_map[] = {
+    2, /* PCM_FORMAT_S16_LE */
+    4, /* PCM_FORMAT_S32_LE */
+    1, /* PCM_FORMAT_S8 */
+    4, /* PCM_FORMAT_S24_LE */
+    3, /* PCM_FORMAT_S24_3LE */
+};
+
+void proxy_prepare(alsa_device_proxy * proxy, alsa_device_profile* profile,
+                   struct pcm_config * config)
+{
+    ALOGV("proxy_prepare(c:%d, d:%d)", profile->card, profile->device);
+
+    proxy->profile = profile;
+
+#ifdef LOG_PCM_PARAMS
+    log_pcm_config(config, "proxy_setup()");
+#endif
+
+    proxy->alsa_config.format =
+        config->format != PCM_FORMAT_INVALID && profile_is_format_valid(profile, config->format)
+            ? config->format : profile->default_config.format;
+    proxy->alsa_config.rate =
+        config->rate != 0 && profile_is_sample_rate_valid(profile, config->rate)
+            ? config->rate : profile->default_config.rate;
+    proxy->alsa_config.channels =
+        config->channels != 0 && profile_is_channel_count_valid(profile, config->channels)
+            ? config->channels : profile->default_config.channels;
+
+    proxy->alsa_config.period_count = profile->default_config.period_count;
+    proxy->alsa_config.period_size =
+            profile_get_period_size(proxy->profile, proxy->alsa_config.rate);
+
+    // Hack for USB accessory audio.
+    // Here we set the correct value for period_count if tinyalsa fails to get it from the
+    // f_audio_source driver.
+    if (proxy->alsa_config.period_count == 0) {
+        proxy->alsa_config.period_count = 4;
+    }
+
+    proxy->pcm = NULL;
+    // config format should be checked earlier against profile.
+    if (config->format >= 0 && (size_t)config->format < ARRAY_SIZE(format_byte_size_map)) {
+        proxy->frame_size = format_byte_size_map[config->format] * proxy->alsa_config.channels;
+    } else {
+        proxy->frame_size = 1;
+    }
+}
+
+int proxy_open(alsa_device_proxy * proxy)
+{
+    alsa_device_profile* profile = proxy->profile;
+    ALOGV("proxy_open(card:%d device:%d %s)", profile->card, profile->device,
+          profile->direction == PCM_OUT ? "PCM_OUT" : "PCM_IN");
+
+    if (profile->card < 0 || profile->device < 0) {
+        return -EINVAL;
+    }
+
+    proxy->pcm = pcm_open(profile->card, profile->device,
+            profile->direction | PCM_MONOTONIC, &proxy->alsa_config);
+    if (proxy->pcm == NULL) {
+        return -ENOMEM;
+    }
+
+    if (!pcm_is_ready(proxy->pcm)) {
+        ALOGE("  proxy_open() pcm_open() failed: %s", pcm_get_error(proxy->pcm));
+#if defined(LOG_PCM_PARAMS)
+        log_pcm_config(&proxy->alsa_config, "config");
+#endif
+        pcm_close(proxy->pcm);
+        proxy->pcm = NULL;
+        return -ENOMEM;
+    }
+
+    return 0;
+}
+
+void proxy_close(alsa_device_proxy * proxy)
+{
+    ALOGV("proxy_close() [pcm:%p]", proxy->pcm);
+
+    if (proxy->pcm != NULL) {
+        pcm_close(proxy->pcm);
+        proxy->pcm = NULL;
+    }
+}
+
+/*
+ * Sample Rate
+ */
+unsigned proxy_get_sample_rate(const alsa_device_proxy * proxy)
+{
+    return proxy->alsa_config.rate;
+}
+
+/*
+ * Format
+ */
+enum pcm_format proxy_get_format(const alsa_device_proxy * proxy)
+{
+    return proxy->alsa_config.format;
+}
+
+/*
+ * Channel Count
+ */
+unsigned proxy_get_channel_count(const alsa_device_proxy * proxy)
+{
+    return proxy->alsa_config.channels;
+}
+
+/*
+ * Other
+ */
+unsigned int proxy_get_period_size(const alsa_device_proxy * proxy)
+{
+    return proxy->alsa_config.period_size;
+}
+
+unsigned int proxy_get_period_count(const alsa_device_proxy * proxy)
+{
+    return proxy->alsa_config.period_count;
+}
+
+unsigned proxy_get_latency(const alsa_device_proxy * proxy)
+{
+    return (proxy_get_period_size(proxy) * proxy_get_period_count(proxy) * 1000)
+               / proxy_get_sample_rate(proxy);
+}
+
+int proxy_get_presentation_position(const alsa_device_proxy * proxy,
+        uint64_t *frames, struct timespec *timestamp)
+{
+    int ret = -EPERM; // -1
+    unsigned int avail;
+    if (proxy->pcm != NULL
+            && pcm_get_htimestamp(proxy->pcm, &avail, timestamp) == 0) {
+        const size_t kernel_buffer_size =
+                proxy->alsa_config.period_size * proxy->alsa_config.period_count;
+        if (avail > kernel_buffer_size) {
+            ALOGE("available frames(%u) > buffer size(%zu)", avail, kernel_buffer_size);
+        } else {
+            int64_t signed_frames = proxy->transferred - kernel_buffer_size + avail;
+            // It is possible to compensate for additional driver and device delay
+            // by changing signed_frames.  Example:
+            // signed_frames -= 20 /* ms */ * proxy->alsa_config.rate / 1000;
+            if (signed_frames >= 0) {
+                *frames = signed_frames;
+                ret = 0;
+            }
+        }
+    }
+    return ret;
+}
+
+/*
+ * I/O
+ */
+int proxy_write(alsa_device_proxy * proxy, const void *data, unsigned int count)
+{
+    int ret = pcm_write(proxy->pcm, data, count);
+    if (ret == 0) {
+        proxy->transferred += count / proxy->frame_size;
+    }
+    return ret;
+}
+
+int proxy_read(const alsa_device_proxy * proxy, void *data, unsigned int count)
+{
+    return pcm_read(proxy->pcm, data, count);
+}
diff --git a/media/alsa_utils/alsa_format.c b/media/alsa_utils/alsa_format.c
new file mode 100644
index 0000000..38f25c4
--- /dev/null
+++ b/media/alsa_utils/alsa_format.c
@@ -0,0 +1,110 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "alsa_format"
+/*#define LOG_NDEBUG 0*/
+
+#include "include/alsa_format.h"
+
+#include <tinyalsa/asoundlib.h>
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
+
+/*
+ * Maps from bit position in pcm_mask to PCM_ format constants.
+ */
+int8_t const pcm_format_value_map[50] = {
+    PCM_FORMAT_S8,          /* 00 - SNDRV_PCM_FORMAT_S8 */
+    PCM_FORMAT_INVALID,     /* 01 - SNDRV_PCM_FORMAT_U8 */
+    PCM_FORMAT_S16_LE,      /* 02 - SNDRV_PCM_FORMAT_S16_LE */
+    PCM_FORMAT_INVALID,     /* 03 - SNDRV_PCM_FORMAT_S16_BE */
+    PCM_FORMAT_INVALID,     /* 04 - SNDRV_PCM_FORMAT_U16_LE */
+    PCM_FORMAT_INVALID,     /* 05 - SNDRV_PCM_FORMAT_U16_BE */
+    PCM_FORMAT_S24_LE,      /* 06 - SNDRV_PCM_FORMAT_S24_LE */
+    PCM_FORMAT_INVALID,     /* 07 - SNDRV_PCM_FORMAT_S24_BE */
+    PCM_FORMAT_INVALID,     /* 08 - SNDRV_PCM_FORMAT_U24_LE */
+    PCM_FORMAT_INVALID,     /* 09 - SNDRV_PCM_FORMAT_U24_BE */
+    PCM_FORMAT_S32_LE,      /* 10 - SNDRV_PCM_FORMAT_S32_LE */
+    PCM_FORMAT_INVALID,     /* 11 - SNDRV_PCM_FORMAT_S32_BE */
+    PCM_FORMAT_INVALID,     /* 12 - SNDRV_PCM_FORMAT_U32_LE */
+    PCM_FORMAT_INVALID,     /* 13 - SNDRV_PCM_FORMAT_U32_BE */
+    PCM_FORMAT_INVALID,     /* 14 - SNDRV_PCM_FORMAT_FLOAT_LE */
+    PCM_FORMAT_INVALID,     /* 15 - SNDRV_PCM_FORMAT_FLOAT_BE */
+    PCM_FORMAT_INVALID,     /* 16 - SNDRV_PCM_FORMAT_FLOAT64_LE */
+    PCM_FORMAT_INVALID,     /* 17 - SNDRV_PCM_FORMAT_FLOAT64_BE */
+    PCM_FORMAT_INVALID,     /* 18 - SNDRV_PCM_FORMAT_IEC958_SUBFRAME_LE */
+    PCM_FORMAT_INVALID,     /* 19 - SNDRV_PCM_FORMAT_IEC958_SUBFRAME_BE */
+    PCM_FORMAT_INVALID,     /* 20 - SNDRV_PCM_FORMAT_MU_LAW */
+    PCM_FORMAT_INVALID,     /* 21 - SNDRV_PCM_FORMAT_A_LAW */
+    PCM_FORMAT_INVALID,     /* 22 - SNDRV_PCM_FORMAT_IMA_ADPCM */
+    PCM_FORMAT_INVALID,     /* 23 - SNDRV_PCM_FORMAT_MPEG */
+    PCM_FORMAT_INVALID,     /* 24 - SNDRV_PCM_FORMAT_GSM */
+    PCM_FORMAT_INVALID,     /* 25 -> 30 (not assigned) */
+    PCM_FORMAT_INVALID,
+    PCM_FORMAT_INVALID,
+    PCM_FORMAT_INVALID,
+    PCM_FORMAT_INVALID,
+    PCM_FORMAT_INVALID,
+    PCM_FORMAT_INVALID,     /* 31 - SNDRV_PCM_FORMAT_SPECIAL */
+    PCM_FORMAT_S24_3LE,     /* 32 - SNDRV_PCM_FORMAT_S24_3LE */
+    PCM_FORMAT_INVALID,     /* 33 - SNDRV_PCM_FORMAT_S24_3BE */
+    PCM_FORMAT_INVALID,     /* 34 - SNDRV_PCM_FORMAT_U24_3LE */
+    PCM_FORMAT_INVALID,     /* 35 - SNDRV_PCM_FORMAT_U24_3BE */
+    PCM_FORMAT_INVALID,     /* 36 - SNDRV_PCM_FORMAT_S20_3LE */
+    PCM_FORMAT_INVALID,     /* 37 - SNDRV_PCM_FORMAT_S20_3BE */
+    PCM_FORMAT_INVALID,     /* 38 - SNDRV_PCM_FORMAT_U20_3LE */
+    PCM_FORMAT_INVALID,     /* 39 - SNDRV_PCM_FORMAT_U20_3BE */
+    PCM_FORMAT_INVALID,     /* 40 - SNDRV_PCM_FORMAT_S18_3LE */
+    PCM_FORMAT_INVALID,     /* 41 - SNDRV_PCM_FORMAT_S18_3BE */
+    PCM_FORMAT_INVALID,     /* 42 - SNDRV_PCM_FORMAT_U18_3LE */
+    PCM_FORMAT_INVALID,     /* 43 - SNDRV_PCM_FORMAT_U18_3BE */
+    PCM_FORMAT_INVALID,     /* 44 - SNDRV_PCM_FORMAT_G723_24 */
+    PCM_FORMAT_INVALID,     /* 45 - SNDRV_PCM_FORMAT_G723_24_1B */
+    PCM_FORMAT_INVALID,     /* 46 - SNDRV_PCM_FORMAT_G723_40 */
+    PCM_FORMAT_INVALID,     /* 47 - SNDRV_PCM_FORMAT_G723_40_1B */
+    PCM_FORMAT_INVALID,     /* 48 - SNDRV_PCM_FORMAT_DSD_U8 */
+    PCM_FORMAT_INVALID      /* 49 - SNDRV_PCM_FORMAT_DSD_U16_LE */
+};
+
+/*
+ * Scans the provided format mask and returns the first non-8 bit sample
+ * format supported by the devices.
+ */
+enum pcm_format get_pcm_format_for_mask(struct pcm_mask* mask)
+{
+    int num_slots = ARRAY_SIZE(mask->bits);
+    int bits_per_slot = sizeof(mask->bits[0]) * 8;
+
+    int table_size = ARRAY_SIZE(pcm_format_value_map);
+
+    int slot_index, bit_index, table_index;
+    table_index = 0;
+    int num_written = 0;
+    for (slot_index = 0; slot_index < num_slots && table_index < table_size; slot_index++) {
+        unsigned bit_mask = 1;
+        for (bit_index = 0; bit_index < bits_per_slot && table_index < table_size; bit_index++) {
+            /* skip any 8-bit formats */
+            if (table_index >= 2 && (mask->bits[slot_index] & bit_mask) != 0) {
+                /* just return the first one which will be at least 16-bit */
+                return (int)pcm_format_value_map[table_index];
+            }
+            bit_mask <<= 1;
+            table_index++;
+        }
+    }
+
+    return PCM_FORMAT_INVALID;
+}
diff --git a/media/alsa_utils/alsa_logging.c b/media/alsa_utils/alsa_logging.c
new file mode 100644
index 0000000..e90797d
--- /dev/null
+++ b/media/alsa_utils/alsa_logging.c
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "alsa_logging"
+/*#define LOG_NDEBUG 0*/
+
+#include <string.h>
+
+#include <log/log.h>
+
+#include "include/alsa_logging.h"
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
+
+/*
+ * Logging
+ */
+void log_pcm_mask(const char* mask_name, struct pcm_mask* mask)
+{
+    const size_t num_slots = ARRAY_SIZE(mask->bits);
+    const size_t bits_per_slot = (sizeof(mask->bits[0]) * 8);
+    const size_t chars_per_slot = (bits_per_slot + 1); /* comma */
+
+    const size_t BUFF_SIZE =
+            (num_slots * chars_per_slot + 2 + 1);  /* brackets and null-terminator */
+    char buff[BUFF_SIZE];
+    buff[0] = '\0';
+
+    size_t slot_index, bit_index;
+    strcat(buff, "[");
+    for (slot_index = 0; slot_index < num_slots; slot_index++) {
+        unsigned bit_mask = 1;
+        for (bit_index = 0; bit_index < bits_per_slot; bit_index++) {
+            strcat(buff, (mask->bits[slot_index] & bit_mask) != 0 ? "1" : "0");
+            bit_mask <<= 1;
+        }
+        if (slot_index < num_slots - 1) {
+            strcat(buff, ",");
+        }
+    }
+    strcat(buff, "]");
+
+    ALOGV("%s: mask:%s", mask_name, buff);
+}
+
+void log_pcm_params(struct pcm_params * alsa_hw_params)
+{
+    ALOGV("usb:audio_hw - PCM_PARAM_SAMPLE_BITS min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_SAMPLE_BITS),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_SAMPLE_BITS));
+    ALOGV("usb:audio_hw - PCM_PARAM_FRAME_BITS min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_FRAME_BITS),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_FRAME_BITS));
+    log_pcm_mask("PCM_PARAM_FORMAT",
+                 pcm_params_get_mask(alsa_hw_params, PCM_PARAM_FORMAT));
+    log_pcm_mask("PCM_PARAM_SUBFORMAT",
+                 pcm_params_get_mask(alsa_hw_params, PCM_PARAM_SUBFORMAT));
+    ALOGV("usb:audio_hw - PCM_PARAM_CHANNELS min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_CHANNELS),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_CHANNELS));
+    ALOGV("usb:audio_hw - PCM_PARAM_RATE min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_RATE),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_RATE));
+    ALOGV("usb:audio_hw - PCM_PARAM_PERIOD_TIME min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_PERIOD_TIME),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_PERIOD_TIME));
+    ALOGV("usb:audio_hw - PCM_PARAM_PERIOD_SIZE min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_PERIOD_SIZE),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_PERIOD_SIZE));
+    ALOGV("usb:audio_hw - PCM_PARAM_PERIOD_BYTES min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_PERIOD_BYTES),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_PERIOD_BYTES));
+    ALOGV("usb:audio_hw - PCM_PARAM_PERIODS min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_PERIODS),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_PERIODS));
+    ALOGV("usb:audio_hw - PCM_PARAM_BUFFER_TIME min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_BUFFER_TIME),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_BUFFER_TIME));
+    ALOGV("usb:audio_hw - PCM_PARAM_BUFFER_SIZE min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_BUFFER_SIZE),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_BUFFER_SIZE));
+    ALOGV("usb:audio_hw - PCM_PARAM_BUFFER_BYTES min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_BUFFER_BYTES),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_BUFFER_BYTES));
+    ALOGV("usb:audio_hw - PCM_PARAM_TICK_TIME min:%u, max:%u",
+          pcm_params_get_min(alsa_hw_params, PCM_PARAM_TICK_TIME),
+          pcm_params_get_max(alsa_hw_params, PCM_PARAM_TICK_TIME));
+}
+
+void log_pcm_config(struct pcm_config * config, const char* label) {
+    ALOGV("log_pcm_config() - %s", label);
+    ALOGV("  channels:%d", config->channels);
+    ALOGV("  rate:%d", config->rate);
+    ALOGV("  period_size:%d", config->period_size);
+    ALOGV("  period_count:%d", config->period_count);
+    ALOGV("  format:%d", config->format);
+#if 0
+    /* Values to use for the ALSA start, stop and silence thresholds.  Setting
+     * any one of these values to 0 will cause the default tinyalsa values to be
+     * used instead.  Tinyalsa defaults are as follows.
+     *
+     * start_threshold   : period_count * period_size
+     * stop_threshold    : period_count * period_size
+     * silence_threshold : 0
+     */
+    unsigned int start_threshold;
+    unsigned int stop_threshold;
+    unsigned int silence_threshold;
+
+    /* Minimum number of frames available before pcm_mmap_write() will actually
+     * write into the kernel buffer. Only used if the stream is opened in mmap mode
+     * (pcm_open() called with PCM_MMAP flag set).   Use 0 for default.
+     */
+    int avail_min;
+#endif
+}
diff --git a/media/alsa_utils/include/alsa_device_profile.h b/media/alsa_utils/include/alsa_device_profile.h
new file mode 100644
index 0000000..5520b8a
--- /dev/null
+++ b/media/alsa_utils/include/alsa_device_profile.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_DEVICE_PROFILE_H
+#define ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_DEVICE_PROFILE_H
+
+#include <stdbool.h>
+
+#include <tinyalsa/asoundlib.h>
+
+#define MAX_PROFILE_FORMATS         6  /* We long support the 5 standard formats defined
+                                        * in asound.h, so we just need this to be 1 more
+                                        * than that */
+#define MAX_PROFILE_SAMPLE_RATES    14 /* this number needs to be 1 more than the number of
+                                        * sample rates in std_sample_rates[]
+                                        * (in alsa_device_profile.c) */
+#define MAX_PROFILE_CHANNEL_COUNTS  9  /* this number need to be 1 more than the number of
+                                        * standard channel formats in std_channel_counts[]
+                                        * (in alsa_device_profile.c) */
+
+#define DEFAULT_SAMPLE_RATE         44100
+#define DEFAULT_SAMPLE_FORMAT       PCM_FORMAT_S16_LE
+#define DEFAULT_CHANNEL_COUNT       2
+
+typedef struct  {
+    int card;
+    int device;
+    int direction; /* PCM_OUT or PCM_IN */
+
+    enum pcm_format formats[MAX_PROFILE_FORMATS];
+
+    unsigned sample_rates[MAX_PROFILE_SAMPLE_RATES];
+
+    unsigned channel_counts[MAX_PROFILE_CHANNEL_COUNTS];
+
+    bool is_valid;
+
+    /* read from the hardware device */
+    struct pcm_config default_config;
+
+    unsigned min_period_size;
+    unsigned max_period_size;
+
+    unsigned min_channel_count;
+    unsigned max_channel_count;
+} alsa_device_profile;
+
+void profile_init(alsa_device_profile* profile, int direction);
+bool profile_is_initialized(alsa_device_profile* profile);
+bool profile_is_valid(alsa_device_profile* profile);
+bool profile_is_cached_for(alsa_device_profile* profile, int card, int device);
+void profile_decache(alsa_device_profile* profile);
+
+bool profile_read_device_info(alsa_device_profile* profile);
+
+/* Audio Config Strings Methods */
+char * profile_get_sample_rate_strs(alsa_device_profile* profile);
+char * profile_get_format_strs(alsa_device_profile* profile);
+char * profile_get_channel_count_strs(alsa_device_profile* profile);
+
+/* Sample Rate Methods */
+unsigned profile_get_default_sample_rate(alsa_device_profile* profile);
+bool profile_is_sample_rate_valid(alsa_device_profile* profile, unsigned rate);
+
+/* Format Methods */
+enum pcm_format profile_get_default_format(alsa_device_profile* profile);
+bool profile_is_format_valid(alsa_device_profile* profile, enum pcm_format fmt);
+
+/* Channel Methods */
+unsigned profile_get_default_channel_count(alsa_device_profile* profile);
+bool profile_is_channel_count_valid(alsa_device_profile* profile, unsigned count);
+
+/* Utility */
+unsigned profile_calc_min_period_size(alsa_device_profile* profile, unsigned sample_rate);
+unsigned int profile_get_period_size(alsa_device_profile* profile, unsigned sample_rate);
+
+#endif /* ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_DEVICE_PROFILE_H */
diff --git a/media/alsa_utils/include/alsa_device_proxy.h b/media/alsa_utils/include/alsa_device_proxy.h
new file mode 100644
index 0000000..e1ff8f5
--- /dev/null
+++ b/media/alsa_utils/include/alsa_device_proxy.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_DEVICE_PROXY_H
+#define ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_DEVICE_PROXY_H
+
+#include <tinyalsa/asoundlib.h>
+
+#include "alsa_device_profile.h"
+
+typedef struct {
+    alsa_device_profile* profile;
+
+    struct pcm_config alsa_config;
+
+    struct pcm * pcm;
+
+    size_t frame_size;    /* valid after proxy_prepare(), the frame size in bytes */
+    uint64_t transferred; /* the total frames transferred, not cleared on standby */
+} alsa_device_proxy;
+
+void proxy_prepare(alsa_device_proxy * proxy, alsa_device_profile * profile,
+                   struct pcm_config * config);
+
+unsigned proxy_get_sample_rate(const alsa_device_proxy * proxy);
+enum pcm_format proxy_get_format(const alsa_device_proxy * proxy);
+unsigned proxy_get_channel_count(const alsa_device_proxy * proxy);
+
+unsigned int proxy_get_period_size(const alsa_device_proxy * proxy);
+
+unsigned proxy_get_latency(const alsa_device_proxy * proxy);
+
+int proxy_get_presentation_position(const alsa_device_proxy * proxy,
+        uint64_t *frames, struct timespec *timestamp);
+
+int proxy_open(alsa_device_proxy * proxy);
+void proxy_close(alsa_device_proxy * proxy);
+
+int proxy_write(alsa_device_proxy * proxy, const void *data, unsigned int count);
+int proxy_read(const alsa_device_proxy * proxy, void *data, unsigned int count);
+
+#endif /* ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_DEVICE_PROXY_H */
diff --git a/media/alsa_utils/include/alsa_format.h b/media/alsa_utils/include/alsa_format.h
new file mode 100644
index 0000000..e07f836
--- /dev/null
+++ b/media/alsa_utils/include/alsa_format.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_FORMAT_H
+#define ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_FORMAT_H
+
+#include <system/audio.h>
+
+#include <tinyalsa/asoundlib.h>
+
+enum pcm_format get_pcm_format_for_mask(struct pcm_mask* mask);
+
+#endif /* ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_FORMAT_H */
diff --git a/media/alsa_utils/include/alsa_logging.h b/media/alsa_utils/include/alsa_logging.h
new file mode 100644
index 0000000..1b0731e
--- /dev/null
+++ b/media/alsa_utils/include/alsa_logging.h
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_LOGGING_H
+#define ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_LOGGING_H
+
+#include <tinyalsa/asoundlib.h>
+
+void log_pcm_mask(const char* mask_name, struct pcm_mask* mask);
+void log_pcm_params(struct pcm_params * alsa_hw_params);
+void log_pcm_config(struct pcm_config * config, const char* label);
+
+#endif /* ANDROID_SYSTEM_MEDIA_ALSA_UTILS_ALSA_LOGGING_H */
diff --git a/media/audio/include/system/audio.h b/media/audio/include/system/audio.h
new file mode 100644
index 0000000..0707ed7
--- /dev/null
+++ b/media/audio/include/system/audio.h
@@ -0,0 +1,1540 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_AUDIO_CORE_H
+#define ANDROID_AUDIO_CORE_H
+
+#include <stdbool.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <sys/cdefs.h>
+#include <sys/types.h>
+
+#include <cutils/bitops.h>
+
+__BEGIN_DECLS
+
+/* The enums were moved here mostly from
+ * frameworks/base/include/media/AudioSystem.h
+ */
+
+/* device address used to refer to the standard remote submix */
+#define AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS "0"
+
+/* AudioFlinger and AudioPolicy services use I/O handles to identify audio sources and sinks */
+typedef int audio_io_handle_t;
+#define AUDIO_IO_HANDLE_NONE    0
+
+/* Audio stream types */
+typedef enum {
+    /* These values must kept in sync with
+     * frameworks/base/media/java/android/media/AudioSystem.java
+     */
+    AUDIO_STREAM_DEFAULT          = -1,
+    AUDIO_STREAM_MIN              = 0,
+    AUDIO_STREAM_VOICE_CALL       = 0,
+    AUDIO_STREAM_SYSTEM           = 1,
+    AUDIO_STREAM_RING             = 2,
+    AUDIO_STREAM_MUSIC            = 3,
+    AUDIO_STREAM_ALARM            = 4,
+    AUDIO_STREAM_NOTIFICATION     = 5,
+    AUDIO_STREAM_BLUETOOTH_SCO    = 6,
+    AUDIO_STREAM_ENFORCED_AUDIBLE = 7, /* Sounds that cannot be muted by user
+                                        * and must be routed to speaker
+                                        */
+    AUDIO_STREAM_DTMF             = 8,
+    AUDIO_STREAM_TTS              = 9,  /* Transmitted Through Speaker.
+                                         * Plays over speaker only, silent on other devices.
+                                         */
+    AUDIO_STREAM_ACCESSIBILITY    = 10, /* For accessibility talk back prompts */
+    AUDIO_STREAM_REROUTING        = 11, /* For dynamic policy output mixes */
+    AUDIO_STREAM_PATCH            = 12, /* For internal audio flinger tracks. Fixed volume */
+    AUDIO_STREAM_PUBLIC_CNT       = AUDIO_STREAM_TTS + 1,
+    AUDIO_STREAM_CNT              = AUDIO_STREAM_PATCH + 1,
+} audio_stream_type_t;
+
+/* Do not change these values without updating their counterparts
+ * in frameworks/base/media/java/android/media/AudioAttributes.java
+ */
+typedef enum {
+    AUDIO_CONTENT_TYPE_UNKNOWN      = 0,
+    AUDIO_CONTENT_TYPE_SPEECH       = 1,
+    AUDIO_CONTENT_TYPE_MUSIC        = 2,
+    AUDIO_CONTENT_TYPE_MOVIE        = 3,
+    AUDIO_CONTENT_TYPE_SONIFICATION = 4,
+
+    AUDIO_CONTENT_TYPE_CNT,
+    AUDIO_CONTENT_TYPE_MAX          = AUDIO_CONTENT_TYPE_CNT - 1,
+} audio_content_type_t;
+
+/* Do not change these values without updating their counterparts
+ * in frameworks/base/media/java/android/media/AudioAttributes.java
+ */
+typedef enum {
+    AUDIO_USAGE_UNKNOWN                            = 0,
+    AUDIO_USAGE_MEDIA                              = 1,
+    AUDIO_USAGE_VOICE_COMMUNICATION                = 2,
+    AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING     = 3,
+    AUDIO_USAGE_ALARM                              = 4,
+    AUDIO_USAGE_NOTIFICATION                       = 5,
+    AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE    = 6,
+    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST = 7,
+    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT = 8,
+    AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED = 9,
+    AUDIO_USAGE_NOTIFICATION_EVENT                 = 10,
+    AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY           = 11,
+    AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE     = 12,
+    AUDIO_USAGE_ASSISTANCE_SONIFICATION            = 13,
+    AUDIO_USAGE_GAME                               = 14,
+    AUDIO_USAGE_VIRTUAL_SOURCE                     = 15,
+
+    AUDIO_USAGE_CNT,
+    AUDIO_USAGE_MAX                                = AUDIO_USAGE_CNT - 1,
+} audio_usage_t;
+
+typedef uint32_t audio_flags_mask_t;
+
+/* Do not change these values without updating their counterparts
+ * in frameworks/base/media/java/android/media/AudioAttributes.java
+ */
+enum {
+    AUDIO_FLAG_AUDIBILITY_ENFORCED        = 0x1,
+    AUDIO_FLAG_SECURE                     = 0x2,
+    AUDIO_FLAG_SCO                        = 0x4,
+    AUDIO_FLAG_BEACON                     = 0x8,
+    AUDIO_FLAG_HW_AV_SYNC                 = 0x10,
+    AUDIO_FLAG_HW_HOTWORD                 = 0x20,
+    AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY = 0x40,
+    AUDIO_FLAG_BYPASS_MUTE                = 0x80,
+};
+
+/* Do not change these values without updating their counterparts
+ * in frameworks/base/media/java/android/media/MediaRecorder.java,
+ * frameworks/av/services/audiopolicy/AudioPolicyService.cpp,
+ * and system/media/audio_effects/include/audio_effects/audio_effects_conf.h!
+ */
+typedef enum {
+    AUDIO_SOURCE_DEFAULT             = 0,
+    AUDIO_SOURCE_MIC                 = 1,
+    AUDIO_SOURCE_VOICE_UPLINK        = 2,
+    AUDIO_SOURCE_VOICE_DOWNLINK      = 3,
+    AUDIO_SOURCE_VOICE_CALL          = 4,
+    AUDIO_SOURCE_CAMCORDER           = 5,
+    AUDIO_SOURCE_VOICE_RECOGNITION   = 6,
+    AUDIO_SOURCE_VOICE_COMMUNICATION = 7,
+    AUDIO_SOURCE_REMOTE_SUBMIX       = 8, /* Source for the mix to be presented remotely.      */
+                                          /* An example of remote presentation is Wifi Display */
+                                          /*  where a dongle attached to a TV can be used to   */
+                                          /*  play the mix captured by this audio source.	*/
+    AUDIO_SOURCE_UNPROCESSED         = 9, /* Source for unprocessed sound.
+                                             Usage examples include level measurement and raw
+                                             signal analysis. */
+    AUDIO_SOURCE_CNT,
+    AUDIO_SOURCE_MAX                 = AUDIO_SOURCE_CNT - 1,
+    AUDIO_SOURCE_FM_TUNER            = 1998,
+    AUDIO_SOURCE_HOTWORD             = 1999, /* A low-priority, preemptible audio source for
+                                                for background software hotword detection.
+                                                Same tuning as AUDIO_SOURCE_VOICE_RECOGNITION.
+                                                Used only internally to the framework. Not exposed
+                                                at the audio HAL. */
+} audio_source_t;
+
+/* Audio attributes */
+#define AUDIO_ATTRIBUTES_TAGS_MAX_SIZE 256
+typedef struct {
+    audio_content_type_t content_type;
+    audio_usage_t        usage;
+    audio_source_t       source;
+    audio_flags_mask_t   flags;
+    char                 tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE]; /* UTF8 */
+} audio_attributes_t;
+
+/* special audio session values
+ * (XXX: should this be living in the audio effects land?)
+ */
+typedef enum {
+    /* session for effects attached to a particular output stream
+     * (value must be less than 0)
+     */
+    AUDIO_SESSION_OUTPUT_STAGE = -1,
+
+    /* session for effects applied to output mix. These effects can
+     * be moved by audio policy manager to another output stream
+     * (value must be 0)
+     */
+    AUDIO_SESSION_OUTPUT_MIX = 0,
+
+    /* application does not specify an explicit session ID to be used,
+     * and requests a new session ID to be allocated
+     * TODO use unique values for AUDIO_SESSION_OUTPUT_MIX and AUDIO_SESSION_ALLOCATE,
+     * after all uses have been updated from 0 to the appropriate symbol, and have been tested.
+     */
+    AUDIO_SESSION_ALLOCATE = 0,
+} audio_session_t;
+
+/* a unique ID allocated by AudioFlinger for use as a audio_io_handle_t or audio_session_t */
+typedef int audio_unique_id_t;
+
+#define AUDIO_UNIQUE_ID_ALLOCATE AUDIO_SESSION_ALLOCATE
+
+/* Audio sub formats (see enum audio_format). */
+
+/* PCM sub formats */
+typedef enum {
+    /* All of these are in native byte order */
+    AUDIO_FORMAT_PCM_SUB_16_BIT          = 0x1, /* DO NOT CHANGE - PCM signed 16 bits */
+    AUDIO_FORMAT_PCM_SUB_8_BIT           = 0x2, /* DO NOT CHANGE - PCM unsigned 8 bits */
+    AUDIO_FORMAT_PCM_SUB_32_BIT          = 0x3, /* PCM signed .31 fixed point */
+    AUDIO_FORMAT_PCM_SUB_8_24_BIT        = 0x4, /* PCM signed 8.23 fixed point */
+    AUDIO_FORMAT_PCM_SUB_FLOAT           = 0x5, /* PCM single-precision floating point */
+    AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED   = 0x6, /* PCM signed .23 fixed point packed in 3 bytes */
+} audio_format_pcm_sub_fmt_t;
+
+/* The audio_format_*_sub_fmt_t declarations are not currently used */
+
+/* MP3 sub format field definition : can use 11 LSBs in the same way as MP3
+ * frame header to specify bit rate, stereo mode, version...
+ */
+typedef enum {
+    AUDIO_FORMAT_MP3_SUB_NONE            = 0x0,
+} audio_format_mp3_sub_fmt_t;
+
+/* AMR NB/WB sub format field definition: specify frame block interleaving,
+ * bandwidth efficient or octet aligned, encoding mode for recording...
+ */
+typedef enum {
+    AUDIO_FORMAT_AMR_SUB_NONE            = 0x0,
+} audio_format_amr_sub_fmt_t;
+
+/* AAC sub format field definition: specify profile or bitrate for recording... */
+typedef enum {
+    AUDIO_FORMAT_AAC_SUB_MAIN            = 0x1,
+    AUDIO_FORMAT_AAC_SUB_LC              = 0x2,
+    AUDIO_FORMAT_AAC_SUB_SSR             = 0x4,
+    AUDIO_FORMAT_AAC_SUB_LTP             = 0x8,
+    AUDIO_FORMAT_AAC_SUB_HE_V1           = 0x10,
+    AUDIO_FORMAT_AAC_SUB_SCALABLE        = 0x20,
+    AUDIO_FORMAT_AAC_SUB_ERLC            = 0x40,
+    AUDIO_FORMAT_AAC_SUB_LD              = 0x80,
+    AUDIO_FORMAT_AAC_SUB_HE_V2           = 0x100,
+    AUDIO_FORMAT_AAC_SUB_ELD             = 0x200,
+} audio_format_aac_sub_fmt_t;
+
+/* VORBIS sub format field definition: specify quality for recording... */
+typedef enum {
+    AUDIO_FORMAT_VORBIS_SUB_NONE         = 0x0,
+} audio_format_vorbis_sub_fmt_t;
+
+
+/* Audio format consists of a main format field (upper 8 bits) and a sub format
+ * field (lower 24 bits).
+ *
+ * The main format indicates the main codec type. The sub format field
+ * indicates options and parameters for each format. The sub format is mainly
+ * used for record to indicate for instance the requested bitrate or profile.
+ * It can also be used for certain formats to give informations not present in
+ * the encoded audio stream (e.g. octet alignement for AMR).
+ */
+typedef enum {
+    AUDIO_FORMAT_INVALID             = 0xFFFFFFFFUL,
+    AUDIO_FORMAT_DEFAULT             = 0,
+    AUDIO_FORMAT_PCM                 = 0x00000000UL, /* DO NOT CHANGE */
+    AUDIO_FORMAT_MP3                 = 0x01000000UL,
+    AUDIO_FORMAT_AMR_NB              = 0x02000000UL,
+    AUDIO_FORMAT_AMR_WB              = 0x03000000UL,
+    AUDIO_FORMAT_AAC                 = 0x04000000UL,
+    AUDIO_FORMAT_HE_AAC_V1           = 0x05000000UL, /* Deprecated, Use AUDIO_FORMAT_AAC_HE_V1*/
+    AUDIO_FORMAT_HE_AAC_V2           = 0x06000000UL, /* Deprecated, Use AUDIO_FORMAT_AAC_HE_V2*/
+    AUDIO_FORMAT_VORBIS              = 0x07000000UL,
+    AUDIO_FORMAT_OPUS                = 0x08000000UL,
+    AUDIO_FORMAT_AC3                 = 0x09000000UL,
+    AUDIO_FORMAT_E_AC3               = 0x0A000000UL,
+    AUDIO_FORMAT_DTS                 = 0x0B000000UL,
+    AUDIO_FORMAT_DTS_HD              = 0x0C000000UL,
+    AUDIO_FORMAT_EVRC                = 0x10000000UL,
+    AUDIO_FORMAT_QCELP               = 0x11000000UL,
+    AUDIO_FORMAT_WMA                 = 0x12000000UL,
+    AUDIO_FORMAT_WMA_PRO             = 0x13000000UL,
+    AUDIO_FORMAT_AAC_ADIF            = 0x14000000UL,
+    AUDIO_FORMAT_EVRCB               = 0x15000000UL,
+    AUDIO_FORMAT_EVRCWB              = 0x16000000UL,
+    AUDIO_FORMAT_AMR_WB_PLUS         = 0x17000000UL,
+    AUDIO_FORMAT_MP2                 = 0x18000000UL,
+    AUDIO_FORMAT_EVRCNW              = 0x19000000UL,
+    AUDIO_FORMAT_PCM_OFFLOAD         = 0x1A000000UL,
+    AUDIO_FORMAT_FLAC                = 0x1B000000UL,
+    AUDIO_FORMAT_ALAC                = 0x1C000000UL,
+    AUDIO_FORMAT_APE                 = 0x1D000000UL,
+    AUDIO_FORMAT_AAC_ADTS            = 0x1E000000UL,
+    AUDIO_FORMAT_MAIN_MASK           = 0xFF000000UL,
+    AUDIO_FORMAT_SUB_MASK            = 0x00FFFFFFUL,
+
+    /* Aliases */
+    /* note != AudioFormat.ENCODING_PCM_16BIT */
+    AUDIO_FORMAT_PCM_16_BIT          = (AUDIO_FORMAT_PCM |
+                                        AUDIO_FORMAT_PCM_SUB_16_BIT),
+    /* note != AudioFormat.ENCODING_PCM_8BIT */
+    AUDIO_FORMAT_PCM_8_BIT           = (AUDIO_FORMAT_PCM |
+                                        AUDIO_FORMAT_PCM_SUB_8_BIT),
+    AUDIO_FORMAT_PCM_32_BIT          = (AUDIO_FORMAT_PCM |
+                                        AUDIO_FORMAT_PCM_SUB_32_BIT),
+    AUDIO_FORMAT_PCM_8_24_BIT        = (AUDIO_FORMAT_PCM |
+                                        AUDIO_FORMAT_PCM_SUB_8_24_BIT),
+    AUDIO_FORMAT_PCM_FLOAT           = (AUDIO_FORMAT_PCM |
+                                        AUDIO_FORMAT_PCM_SUB_FLOAT),
+    AUDIO_FORMAT_PCM_24_BIT_PACKED   = (AUDIO_FORMAT_PCM |
+                                        AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED),
+    AUDIO_FORMAT_AAC_MAIN            = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_MAIN),
+    AUDIO_FORMAT_AAC_LC              = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_LC),
+    AUDIO_FORMAT_AAC_SSR             = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_SSR),
+    AUDIO_FORMAT_AAC_LTP             = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_LTP),
+    AUDIO_FORMAT_AAC_HE_V1           = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_HE_V1),
+    AUDIO_FORMAT_AAC_SCALABLE        = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_SCALABLE),
+    AUDIO_FORMAT_AAC_ERLC            = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_ERLC),
+    AUDIO_FORMAT_AAC_LD              = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_LD),
+    AUDIO_FORMAT_AAC_HE_V2           = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_HE_V2),
+    AUDIO_FORMAT_AAC_ELD             = (AUDIO_FORMAT_AAC |
+                                        AUDIO_FORMAT_AAC_SUB_ELD),
+    AUDIO_FORMAT_AAC_ADTS_MAIN       = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_MAIN),
+    AUDIO_FORMAT_AAC_ADTS_LC         = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_LC),
+    AUDIO_FORMAT_AAC_ADTS_SSR        = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_SSR),
+    AUDIO_FORMAT_AAC_ADTS_LTP        = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_LTP),
+    AUDIO_FORMAT_AAC_ADTS_HE_V1      = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_HE_V1),
+    AUDIO_FORMAT_AAC_ADTS_SCALABLE   = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_SCALABLE),
+    AUDIO_FORMAT_AAC_ADTS_ERLC       = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_ERLC),
+    AUDIO_FORMAT_AAC_ADTS_LD         = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_LD),
+    AUDIO_FORMAT_AAC_ADTS_HE_V2      = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_HE_V2),
+    AUDIO_FORMAT_AAC_ADTS_ELD        = (AUDIO_FORMAT_AAC_ADTS |
+                                        AUDIO_FORMAT_AAC_SUB_ELD),
+    /*Offload PCM formats*/
+    AUDIO_FORMAT_PCM_16_BIT_OFFLOAD  = (AUDIO_FORMAT_PCM_OFFLOAD |
+                                        AUDIO_FORMAT_PCM_SUB_16_BIT),
+    AUDIO_FORMAT_PCM_24_BIT_OFFLOAD  = (AUDIO_FORMAT_PCM_OFFLOAD |
+                                        AUDIO_FORMAT_PCM_SUB_8_24_BIT),
+} audio_format_t;
+
+/* For the channel mask for position assignment representation */
+enum {
+
+/* These can be a complete audio_channel_mask_t. */
+
+    AUDIO_CHANNEL_NONE                      = 0x0,
+    AUDIO_CHANNEL_INVALID                   = 0xC0000000,
+
+/* These can be the bits portion of an audio_channel_mask_t
+ * with representation AUDIO_CHANNEL_REPRESENTATION_POSITION.
+ * Using these bits as a complete audio_channel_mask_t is deprecated.
+ */
+
+    /* output channels */
+    AUDIO_CHANNEL_OUT_FRONT_LEFT            = 0x1,
+    AUDIO_CHANNEL_OUT_FRONT_RIGHT           = 0x2,
+    AUDIO_CHANNEL_OUT_FRONT_CENTER          = 0x4,
+    AUDIO_CHANNEL_OUT_LOW_FREQUENCY         = 0x8,
+    AUDIO_CHANNEL_OUT_BACK_LEFT             = 0x10,
+    AUDIO_CHANNEL_OUT_BACK_RIGHT            = 0x20,
+    AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER  = 0x40,
+    AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80,
+    AUDIO_CHANNEL_OUT_BACK_CENTER           = 0x100,
+    AUDIO_CHANNEL_OUT_SIDE_LEFT             = 0x200,
+    AUDIO_CHANNEL_OUT_SIDE_RIGHT            = 0x400,
+    AUDIO_CHANNEL_OUT_TOP_CENTER            = 0x800,
+    AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT        = 0x1000,
+    AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER      = 0x2000,
+    AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT       = 0x4000,
+    AUDIO_CHANNEL_OUT_TOP_BACK_LEFT         = 0x8000,
+    AUDIO_CHANNEL_OUT_TOP_BACK_CENTER       = 0x10000,
+    AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT        = 0x20000,
+
+/* TODO: should these be considered complete channel masks, or only bits? */
+
+    AUDIO_CHANNEL_OUT_MONO     = AUDIO_CHANNEL_OUT_FRONT_LEFT,
+    AUDIO_CHANNEL_OUT_STEREO   = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT),
+    AUDIO_CHANNEL_OUT_2POINT1  = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER),
+    AUDIO_CHANNEL_OUT_QUAD     = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_BACK_LEFT |
+                                  AUDIO_CHANNEL_OUT_BACK_RIGHT),
+    AUDIO_CHANNEL_OUT_QUAD_BACK = AUDIO_CHANNEL_OUT_QUAD,
+    /* like AUDIO_CHANNEL_OUT_QUAD_BACK with *_SIDE_* instead of *_BACK_* */
+    AUDIO_CHANNEL_OUT_QUAD_SIDE = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_SIDE_LEFT |
+                                  AUDIO_CHANNEL_OUT_SIDE_RIGHT),
+    AUDIO_CHANNEL_OUT_SURROUND = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER |
+                                  AUDIO_CHANNEL_OUT_BACK_CENTER),
+    AUDIO_CHANNEL_OUT_PENTA =    (AUDIO_CHANNEL_OUT_QUAD |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER),
+    AUDIO_CHANNEL_OUT_5POINT1  = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER |
+                                  AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                                  AUDIO_CHANNEL_OUT_BACK_LEFT |
+                                  AUDIO_CHANNEL_OUT_BACK_RIGHT),
+    AUDIO_CHANNEL_OUT_5POINT1_BACK = AUDIO_CHANNEL_OUT_5POINT1,
+    /* like AUDIO_CHANNEL_OUT_5POINT1_BACK with *_SIDE_* instead of *_BACK_* */
+    AUDIO_CHANNEL_OUT_5POINT1_SIDE = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER |
+                                  AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                                  AUDIO_CHANNEL_OUT_SIDE_LEFT |
+                                  AUDIO_CHANNEL_OUT_SIDE_RIGHT),
+    AUDIO_CHANNEL_OUT_6POINT1  = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER |
+                                  AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                                  AUDIO_CHANNEL_OUT_BACK_LEFT |
+                                  AUDIO_CHANNEL_OUT_BACK_RIGHT |
+                                  AUDIO_CHANNEL_OUT_BACK_CENTER),
+    // matches the correct AudioFormat.CHANNEL_OUT_7POINT1_SURROUND definition for 7.1
+    AUDIO_CHANNEL_OUT_7POINT1  = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER |
+                                  AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                                  AUDIO_CHANNEL_OUT_BACK_LEFT |
+                                  AUDIO_CHANNEL_OUT_BACK_RIGHT |
+                                  AUDIO_CHANNEL_OUT_SIDE_LEFT |
+                                  AUDIO_CHANNEL_OUT_SIDE_RIGHT),
+    AUDIO_CHANNEL_OUT_ALL      = (AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_CENTER |
+                                  AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                                  AUDIO_CHANNEL_OUT_BACK_LEFT |
+                                  AUDIO_CHANNEL_OUT_BACK_RIGHT |
+                                  AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER |
+                                  AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER |
+                                  AUDIO_CHANNEL_OUT_BACK_CENTER|
+                                  AUDIO_CHANNEL_OUT_SIDE_LEFT|
+                                  AUDIO_CHANNEL_OUT_SIDE_RIGHT|
+                                  AUDIO_CHANNEL_OUT_TOP_CENTER|
+                                  AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT|
+                                  AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER|
+                                  AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT|
+                                  AUDIO_CHANNEL_OUT_TOP_BACK_LEFT|
+                                  AUDIO_CHANNEL_OUT_TOP_BACK_CENTER|
+                                  AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT),
+
+/* These are bits only, not complete values */
+
+    /* input channels */
+    AUDIO_CHANNEL_IN_LEFT            = 0x4,
+    AUDIO_CHANNEL_IN_RIGHT           = 0x8,
+    AUDIO_CHANNEL_IN_FRONT           = 0x10,
+    AUDIO_CHANNEL_IN_BACK            = 0x20,
+    AUDIO_CHANNEL_IN_LEFT_PROCESSED  = 0x40,
+    AUDIO_CHANNEL_IN_RIGHT_PROCESSED = 0x80,
+    AUDIO_CHANNEL_IN_FRONT_PROCESSED = 0x100,
+    AUDIO_CHANNEL_IN_BACK_PROCESSED  = 0x200,
+    AUDIO_CHANNEL_IN_PRESSURE        = 0x400,
+    AUDIO_CHANNEL_IN_X_AXIS          = 0x800,
+    AUDIO_CHANNEL_IN_Y_AXIS          = 0x1000,
+    AUDIO_CHANNEL_IN_Z_AXIS          = 0x2000,
+    AUDIO_CHANNEL_IN_VOICE_UPLINK    = 0x4000,
+    AUDIO_CHANNEL_IN_VOICE_DNLINK    = 0x8000,
+
+/* TODO: should these be considered complete channel masks, or only bits, or deprecated? */
+
+    AUDIO_CHANNEL_IN_MONO   = AUDIO_CHANNEL_IN_FRONT,
+    AUDIO_CHANNEL_IN_STEREO = (AUDIO_CHANNEL_IN_LEFT | AUDIO_CHANNEL_IN_RIGHT),
+    AUDIO_CHANNEL_IN_FRONT_BACK = (AUDIO_CHANNEL_IN_FRONT | AUDIO_CHANNEL_IN_BACK),
+    AUDIO_CHANNEL_IN_5POINT1 = (AUDIO_CHANNEL_IN_LEFT |
+                               AUDIO_CHANNEL_IN_RIGHT |
+                               AUDIO_CHANNEL_IN_FRONT |
+                               AUDIO_CHANNEL_IN_BACK |
+                               AUDIO_CHANNEL_IN_LEFT_PROCESSED |
+                               AUDIO_CHANNEL_IN_RIGHT_PROCESSED),
+    AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO = (AUDIO_CHANNEL_IN_VOICE_UPLINK | AUDIO_CHANNEL_IN_MONO),
+    AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO = (AUDIO_CHANNEL_IN_VOICE_DNLINK | AUDIO_CHANNEL_IN_MONO),
+    AUDIO_CHANNEL_IN_VOICE_CALL_MONO   = (AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO | AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO),
+    AUDIO_CHANNEL_IN_ALL    = (AUDIO_CHANNEL_IN_LEFT |
+                               AUDIO_CHANNEL_IN_RIGHT |
+                               AUDIO_CHANNEL_IN_FRONT |
+                               AUDIO_CHANNEL_IN_BACK|
+                               AUDIO_CHANNEL_IN_LEFT_PROCESSED |
+                               AUDIO_CHANNEL_IN_RIGHT_PROCESSED |
+                               AUDIO_CHANNEL_IN_FRONT_PROCESSED |
+                               AUDIO_CHANNEL_IN_BACK_PROCESSED|
+                               AUDIO_CHANNEL_IN_PRESSURE |
+                               AUDIO_CHANNEL_IN_X_AXIS |
+                               AUDIO_CHANNEL_IN_Y_AXIS |
+                               AUDIO_CHANNEL_IN_Z_AXIS |
+                               AUDIO_CHANNEL_IN_VOICE_UPLINK |
+                               AUDIO_CHANNEL_IN_VOICE_DNLINK),
+};
+
+/* A channel mask per se only defines the presence or absence of a channel, not the order.
+ * But see AUDIO_INTERLEAVE_* below for the platform convention of order.
+ *
+ * audio_channel_mask_t is an opaque type and its internal layout should not
+ * be assumed as it may change in the future.
+ * Instead, always use the functions declared in this header to examine.
+ *
+ * These are the current representations:
+ *
+ *   AUDIO_CHANNEL_REPRESENTATION_POSITION
+ *     is a channel mask representation for position assignment.
+ *     Each low-order bit corresponds to the spatial position of a transducer (output),
+ *     or interpretation of channel (input).
+ *     The user of a channel mask needs to know the context of whether it is for output or input.
+ *     The constants AUDIO_CHANNEL_OUT_* or AUDIO_CHANNEL_IN_* apply to the bits portion.
+ *     It is not permitted for no bits to be set.
+ *
+ *   AUDIO_CHANNEL_REPRESENTATION_INDEX
+ *     is a channel mask representation for index assignment.
+ *     Each low-order bit corresponds to a selected channel.
+ *     There is no platform interpretation of the various bits.
+ *     There is no concept of output or input.
+ *     It is not permitted for no bits to be set.
+ *
+ * All other representations are reserved for future use.
+ *
+ * Warning: current representation distinguishes between input and output, but this will not the be
+ * case in future revisions of the platform. Wherever there is an ambiguity between input and output
+ * that is currently resolved by checking the channel mask, the implementer should look for ways to
+ * fix it with additional information outside of the mask.
+ */
+typedef uint32_t audio_channel_mask_t;
+
+/* Maximum number of channels for all representations */
+#define AUDIO_CHANNEL_COUNT_MAX             30
+
+/* log(2) of maximum number of representations, not part of public API */
+#define AUDIO_CHANNEL_REPRESENTATION_LOG2   2
+
+/* Representations */
+typedef enum {
+    AUDIO_CHANNEL_REPRESENTATION_POSITION    = 0,    // must be zero for compatibility
+    // 1 is reserved for future use
+    AUDIO_CHANNEL_REPRESENTATION_INDEX       = 2,
+    // 3 is reserved for future use
+} audio_channel_representation_t;
+
+/* The channel index masks defined here are the canonical masks for 1 to 8 channel
+ * endpoints and apply to both source and sink.
+ */
+enum {
+    AUDIO_CHANNEL_INDEX_HDR  = AUDIO_CHANNEL_REPRESENTATION_INDEX << AUDIO_CHANNEL_COUNT_MAX,
+    AUDIO_CHANNEL_INDEX_MASK_1 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 1) - 1,
+    AUDIO_CHANNEL_INDEX_MASK_2 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 2) - 1,
+    AUDIO_CHANNEL_INDEX_MASK_3 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 3) - 1,
+    AUDIO_CHANNEL_INDEX_MASK_4 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 4) - 1,
+    AUDIO_CHANNEL_INDEX_MASK_5 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 5) - 1,
+    AUDIO_CHANNEL_INDEX_MASK_6 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 6) - 1,
+    AUDIO_CHANNEL_INDEX_MASK_7 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 7) - 1,
+    AUDIO_CHANNEL_INDEX_MASK_8 =  AUDIO_CHANNEL_INDEX_HDR | (1 << 8) - 1,
+    // FIXME FCC_8
+};
+
+/* The return value is undefined if the channel mask is invalid. */
+static inline uint32_t audio_channel_mask_get_bits(audio_channel_mask_t channel)
+{
+    return channel & ((1 << AUDIO_CHANNEL_COUNT_MAX) - 1);
+}
+
+/* The return value is undefined if the channel mask is invalid. */
+static inline audio_channel_representation_t audio_channel_mask_get_representation(
+        audio_channel_mask_t channel)
+{
+    // The right shift should be sufficient, but also "and" for safety in case mask is not 32 bits
+    return (audio_channel_representation_t)
+            ((channel >> AUDIO_CHANNEL_COUNT_MAX) & ((1 << AUDIO_CHANNEL_REPRESENTATION_LOG2) - 1));
+}
+
+/* Returns true if the channel mask is valid,
+ * or returns false for AUDIO_CHANNEL_NONE, AUDIO_CHANNEL_INVALID, and other invalid values.
+ * This function is unable to determine whether a channel mask for position assignment
+ * is invalid because an output mask has an invalid output bit set,
+ * or because an input mask has an invalid input bit set.
+ * All other APIs that take a channel mask assume that it is valid.
+ */
+static inline bool audio_channel_mask_is_valid(audio_channel_mask_t channel)
+{
+    uint32_t bits = audio_channel_mask_get_bits(channel);
+    audio_channel_representation_t representation = audio_channel_mask_get_representation(channel);
+    switch (representation) {
+    case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+    case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+        break;
+    default:
+        bits = 0;
+        break;
+    }
+    return bits != 0;
+}
+
+/* Not part of public API */
+static inline audio_channel_mask_t audio_channel_mask_from_representation_and_bits(
+        audio_channel_representation_t representation, uint32_t bits)
+{
+    return (audio_channel_mask_t) ((representation << AUDIO_CHANNEL_COUNT_MAX) | bits);
+}
+
+/* Expresses the convention when stereo audio samples are stored interleaved
+ * in an array.  This should improve readability by allowing code to use
+ * symbolic indices instead of hard-coded [0] and [1].
+ *
+ * For multi-channel beyond stereo, the platform convention is that channels
+ * are interleaved in order from least significant channel mask bit
+ * to most significant channel mask bit, with unused bits skipped.
+ * Any exceptions to this convention will be noted at the appropriate API.
+ */
+enum {
+    AUDIO_INTERLEAVE_LEFT   = 0,
+    AUDIO_INTERLEAVE_RIGHT  = 1,
+};
+
+typedef enum {
+    AUDIO_MODE_INVALID          = -2,
+    AUDIO_MODE_CURRENT          = -1,
+    AUDIO_MODE_NORMAL           = 0,
+    AUDIO_MODE_RINGTONE         = 1,
+    AUDIO_MODE_IN_CALL          = 2,
+    AUDIO_MODE_IN_COMMUNICATION = 3,
+
+    AUDIO_MODE_CNT,
+    AUDIO_MODE_MAX              = AUDIO_MODE_CNT - 1,
+} audio_mode_t;
+
+/* This enum is deprecated */
+typedef enum {
+    AUDIO_IN_ACOUSTICS_NONE          = 0,
+    AUDIO_IN_ACOUSTICS_AGC_ENABLE    = 0x0001,
+    AUDIO_IN_ACOUSTICS_AGC_DISABLE   = 0,
+    AUDIO_IN_ACOUSTICS_NS_ENABLE     = 0x0002,
+    AUDIO_IN_ACOUSTICS_NS_DISABLE    = 0,
+    AUDIO_IN_ACOUSTICS_TX_IIR_ENABLE = 0x0004,
+    AUDIO_IN_ACOUSTICS_TX_DISABLE    = 0,
+} audio_in_acoustics_t;
+
+enum {
+    AUDIO_DEVICE_NONE                          = 0x0,
+    /* reserved bits */
+    AUDIO_DEVICE_BIT_IN                        = 0x80000000,
+    AUDIO_DEVICE_BIT_DEFAULT                   = 0x40000000,
+    /* output devices */
+    AUDIO_DEVICE_OUT_EARPIECE                  = 0x1,
+    AUDIO_DEVICE_OUT_SPEAKER                   = 0x2,
+    AUDIO_DEVICE_OUT_WIRED_HEADSET             = 0x4,
+    AUDIO_DEVICE_OUT_WIRED_HEADPHONE           = 0x8,
+    AUDIO_DEVICE_OUT_BLUETOOTH_SCO             = 0x10,
+    AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET     = 0x20,
+    AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT      = 0x40,
+    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP            = 0x80,
+    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100,
+    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER    = 0x200,
+    AUDIO_DEVICE_OUT_AUX_DIGITAL               = 0x400,
+    AUDIO_DEVICE_OUT_HDMI                      = AUDIO_DEVICE_OUT_AUX_DIGITAL,
+    /* uses an analog connection (multiplexed over the USB connector pins for instance) */
+    AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET         = 0x800,
+    AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET         = 0x1000,
+    /* USB accessory mode: your Android device is a USB device and the dock is a USB host */
+    AUDIO_DEVICE_OUT_USB_ACCESSORY             = 0x2000,
+    /* USB host mode: your Android device is a USB host and the dock is a USB device */
+    AUDIO_DEVICE_OUT_USB_DEVICE                = 0x4000,
+    AUDIO_DEVICE_OUT_REMOTE_SUBMIX             = 0x8000,
+    /* Telephony voice TX path */
+    AUDIO_DEVICE_OUT_TELEPHONY_TX              = 0x10000,
+    /* Analog jack with line impedance detected */
+    AUDIO_DEVICE_OUT_LINE                      = 0x20000,
+    /* HDMI Audio Return Channel */
+    AUDIO_DEVICE_OUT_HDMI_ARC                  = 0x40000,
+    /* S/PDIF out */
+    AUDIO_DEVICE_OUT_SPDIF                     = 0x80000,
+    /* FM transmitter out */
+    AUDIO_DEVICE_OUT_FM                        = 0x100000,
+    /* Line out for av devices */
+    AUDIO_DEVICE_OUT_AUX_LINE                  = 0x200000,
+    /* limited-output speaker device for acoustic safety */
+    AUDIO_DEVICE_OUT_SPEAKER_SAFE              = 0x400000,
+    AUDIO_DEVICE_OUT_IP                        = 0x800000,
+    AUDIO_DEVICE_OUT_DEFAULT                   = AUDIO_DEVICE_BIT_DEFAULT,
+    AUDIO_DEVICE_OUT_ALL      = (AUDIO_DEVICE_OUT_EARPIECE |
+                                 AUDIO_DEVICE_OUT_SPEAKER |
+                                 AUDIO_DEVICE_OUT_WIRED_HEADSET |
+                                 AUDIO_DEVICE_OUT_WIRED_HEADPHONE |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_SCO |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_A2DP |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER |
+                                 AUDIO_DEVICE_OUT_HDMI |
+                                 AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET |
+                                 AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET |
+                                 AUDIO_DEVICE_OUT_USB_ACCESSORY |
+                                 AUDIO_DEVICE_OUT_USB_DEVICE |
+                                 AUDIO_DEVICE_OUT_REMOTE_SUBMIX |
+                                 AUDIO_DEVICE_OUT_TELEPHONY_TX |
+                                 AUDIO_DEVICE_OUT_LINE |
+                                 AUDIO_DEVICE_OUT_HDMI_ARC |
+                                 AUDIO_DEVICE_OUT_SPDIF |
+                                 AUDIO_DEVICE_OUT_FM |
+                                 AUDIO_DEVICE_OUT_AUX_LINE |
+                                 AUDIO_DEVICE_OUT_SPEAKER_SAFE |
+                                 AUDIO_DEVICE_OUT_IP |
+                                 AUDIO_DEVICE_OUT_DEFAULT),
+    AUDIO_DEVICE_OUT_ALL_A2DP = (AUDIO_DEVICE_OUT_BLUETOOTH_A2DP |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER),
+    AUDIO_DEVICE_OUT_ALL_SCO  = (AUDIO_DEVICE_OUT_BLUETOOTH_SCO |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
+                                 AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT),
+    AUDIO_DEVICE_OUT_ALL_USB  = (AUDIO_DEVICE_OUT_USB_ACCESSORY |
+                                 AUDIO_DEVICE_OUT_USB_DEVICE),
+    /* input devices */
+    AUDIO_DEVICE_IN_COMMUNICATION         = AUDIO_DEVICE_BIT_IN | 0x1,
+    AUDIO_DEVICE_IN_AMBIENT               = AUDIO_DEVICE_BIT_IN | 0x2,
+    AUDIO_DEVICE_IN_BUILTIN_MIC           = AUDIO_DEVICE_BIT_IN | 0x4,
+    AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET = AUDIO_DEVICE_BIT_IN | 0x8,
+    AUDIO_DEVICE_IN_WIRED_HEADSET         = AUDIO_DEVICE_BIT_IN | 0x10,
+    AUDIO_DEVICE_IN_AUX_DIGITAL           = AUDIO_DEVICE_BIT_IN | 0x20,
+    AUDIO_DEVICE_IN_HDMI                  = AUDIO_DEVICE_IN_AUX_DIGITAL,
+    /* Telephony voice RX path */
+    AUDIO_DEVICE_IN_VOICE_CALL            = AUDIO_DEVICE_BIT_IN | 0x40,
+    AUDIO_DEVICE_IN_TELEPHONY_RX          = AUDIO_DEVICE_IN_VOICE_CALL,
+    AUDIO_DEVICE_IN_BACK_MIC              = AUDIO_DEVICE_BIT_IN | 0x80,
+    AUDIO_DEVICE_IN_REMOTE_SUBMIX         = AUDIO_DEVICE_BIT_IN | 0x100,
+    AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET     = AUDIO_DEVICE_BIT_IN | 0x200,
+    AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET     = AUDIO_DEVICE_BIT_IN | 0x400,
+    AUDIO_DEVICE_IN_USB_ACCESSORY         = AUDIO_DEVICE_BIT_IN | 0x800,
+    AUDIO_DEVICE_IN_USB_DEVICE            = AUDIO_DEVICE_BIT_IN | 0x1000,
+    /* FM tuner input */
+    AUDIO_DEVICE_IN_FM_TUNER              = AUDIO_DEVICE_BIT_IN | 0x2000,
+    /* TV tuner input */
+    AUDIO_DEVICE_IN_TV_TUNER              = AUDIO_DEVICE_BIT_IN | 0x4000,
+    /* Analog jack with line impedance detected */
+    AUDIO_DEVICE_IN_LINE                  = AUDIO_DEVICE_BIT_IN | 0x8000,
+    /* S/PDIF in */
+    AUDIO_DEVICE_IN_SPDIF                 = AUDIO_DEVICE_BIT_IN | 0x10000,
+    AUDIO_DEVICE_IN_BLUETOOTH_A2DP        = AUDIO_DEVICE_BIT_IN | 0x20000,
+    AUDIO_DEVICE_IN_LOOPBACK              = AUDIO_DEVICE_BIT_IN | 0x40000,
+    AUDIO_DEVICE_IN_IP                    = AUDIO_DEVICE_BIT_IN | 0x80000,
+    AUDIO_DEVICE_IN_DEFAULT               = AUDIO_DEVICE_BIT_IN | AUDIO_DEVICE_BIT_DEFAULT,
+
+    AUDIO_DEVICE_IN_ALL     = (AUDIO_DEVICE_IN_COMMUNICATION |
+                               AUDIO_DEVICE_IN_AMBIENT |
+                               AUDIO_DEVICE_IN_BUILTIN_MIC |
+                               AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET |
+                               AUDIO_DEVICE_IN_WIRED_HEADSET |
+                               AUDIO_DEVICE_IN_HDMI |
+                               AUDIO_DEVICE_IN_TELEPHONY_RX |
+                               AUDIO_DEVICE_IN_BACK_MIC |
+                               AUDIO_DEVICE_IN_REMOTE_SUBMIX |
+                               AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET |
+                               AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET |
+                               AUDIO_DEVICE_IN_USB_ACCESSORY |
+                               AUDIO_DEVICE_IN_USB_DEVICE |
+                               AUDIO_DEVICE_IN_FM_TUNER |
+                               AUDIO_DEVICE_IN_TV_TUNER |
+                               AUDIO_DEVICE_IN_LINE |
+                               AUDIO_DEVICE_IN_SPDIF |
+                               AUDIO_DEVICE_IN_BLUETOOTH_A2DP |
+                               AUDIO_DEVICE_IN_LOOPBACK |
+                               AUDIO_DEVICE_IN_IP |
+                               AUDIO_DEVICE_IN_DEFAULT),
+    AUDIO_DEVICE_IN_ALL_SCO = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET,
+    AUDIO_DEVICE_IN_ALL_USB  = (AUDIO_DEVICE_IN_USB_ACCESSORY |
+                                AUDIO_DEVICE_IN_USB_DEVICE),
+};
+
+typedef uint32_t audio_devices_t;
+
+/* the audio output flags serve two purposes:
+ * - when an AudioTrack is created they indicate a "wish" to be connected to an
+ * output stream with attributes corresponding to the specified flags
+ * - when present in an output profile descriptor listed for a particular audio
+ * hardware module, they indicate that an output stream can be opened that
+ * supports the attributes indicated by the flags.
+ * the audio policy manager will try to match the flags in the request
+ * (when getOuput() is called) to an available output stream.
+ */
+typedef enum {
+    AUDIO_OUTPUT_FLAG_NONE = 0x0,       // no attributes
+    AUDIO_OUTPUT_FLAG_DIRECT = 0x1,     // this output directly connects a track
+                                        // to one output stream: no software mixer
+    AUDIO_OUTPUT_FLAG_PRIMARY = 0x2,    // this output is the primary output of
+                                        // the device. It is unique and must be
+                                        // present. It is opened by default and
+                                        // receives routing, audio mode and volume
+                                        // controls related to voice calls.
+    AUDIO_OUTPUT_FLAG_FAST = 0x4,       // output supports "fast tracks",
+                                        // defined elsewhere
+    AUDIO_OUTPUT_FLAG_DEEP_BUFFER = 0x8, // use deep audio buffers
+    AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD = 0x10,  // offload playback of compressed
+                                                // streams to hardware codec
+    AUDIO_OUTPUT_FLAG_NON_BLOCKING = 0x20, // use non-blocking write
+    AUDIO_OUTPUT_FLAG_HW_AV_SYNC = 0x40,   // output uses a hardware A/V synchronization source
+    AUDIO_OUTPUT_FLAG_TTS = 0x80,          // output for streams transmitted through speaker
+                                           // at a sample rate high enough to accommodate
+                                           // lower-range ultrasonic playback
+    AUDIO_OUTPUT_FLAG_RAW = 0x100,         // minimize signal processing
+    AUDIO_OUTPUT_FLAG_SYNC = 0x200,        // synchronize I/O streams
+
+    AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO = 0x400, // Audio stream contains compressed audio in
+                                               // SPDIF data bursts, not PCM.
+    AUDIO_OUTPUT_FLAG_VOIP_RX = 0x800,  // use this flag in combination with DIRECT to
+                                         // start voip over voice path.
+    AUDIO_OUTPUT_FLAG_COMPRESS_PASSTHROUGH = 0x1000, // flag for HDMI compressed passthrough
+    AUDIO_OUTPUT_FLAG_DIRECT_PCM = 0x2000, // flag for Direct PCM
+} audio_output_flags_t;
+
+/* The audio input flags are analogous to audio output flags.
+ * Currently they are used only when an AudioRecord is created,
+ * to indicate a preference to be connected to an input stream with
+ * attributes corresponding to the specified flags.
+ */
+typedef enum {
+    AUDIO_INPUT_FLAG_NONE       = 0x0,  // no attributes
+    AUDIO_INPUT_FLAG_FAST       = 0x1,  // prefer an input that supports "fast tracks"
+    AUDIO_INPUT_FLAG_HW_HOTWORD = 0x2,  // prefer an input that captures from hw hotword source
+    AUDIO_INPUT_FLAG_RAW        = 0x4,  // minimize signal processing
+    AUDIO_INPUT_FLAG_SYNC       = 0x8,  // synchronize I/O streams
+    AUDIO_INPUT_FLAG_TIMESTAMP  = 0x10, // timestamp metadata mode
+
+} audio_input_flags_t;
+
+/* Additional information about compressed streams offloaded to
+ * hardware playback
+ * The version and size fields must be initialized by the caller by using
+ * one of the constants defined here.
+ */
+typedef struct {
+    uint16_t version;                   // version of the info structure
+    uint16_t size;                      // total size of the structure including version and size
+    uint32_t sample_rate;               // sample rate in Hz
+    audio_channel_mask_t channel_mask;  // channel mask
+    audio_format_t format;              // audio format
+    audio_stream_type_t stream_type;    // stream type
+    uint32_t bit_rate;                  // bit rate in bits per second
+    int64_t duration_us;                // duration in microseconds, -1 if unknown
+    bool has_video;                     // true if stream is tied to a video stream
+    bool is_streaming;                  // true if streaming, false if local playback
+    uint32_t bit_width;
+    uint32_t offload_buffer_size;       // offload fragment size
+    audio_usage_t usage;
+} audio_offload_info_t;
+
+#define AUDIO_MAKE_OFFLOAD_INFO_VERSION(maj,min) \
+            ((((maj) & 0xff) << 8) | ((min) & 0xff))
+
+#define AUDIO_OFFLOAD_INFO_VERSION_0_1 AUDIO_MAKE_OFFLOAD_INFO_VERSION(0, 1)
+#define AUDIO_OFFLOAD_INFO_VERSION_CURRENT AUDIO_OFFLOAD_INFO_VERSION_0_1
+
+static const audio_offload_info_t AUDIO_INFO_INITIALIZER = {
+    version: AUDIO_OFFLOAD_INFO_VERSION_CURRENT,
+    size: sizeof(audio_offload_info_t),
+    sample_rate: 0,
+    channel_mask: 0,
+    format: AUDIO_FORMAT_DEFAULT,
+    stream_type: AUDIO_STREAM_VOICE_CALL,
+    bit_rate: 0,
+    duration_us: 0,
+    has_video: false,
+    is_streaming: false,
+    bit_width: 16,
+    offload_buffer_size: 0,
+    usage: AUDIO_USAGE_UNKNOWN,
+};
+
+/* common audio stream configuration parameters
+ * You should memset() the entire structure to zero before use to
+ * ensure forward compatibility
+ */
+struct audio_config {
+    uint32_t sample_rate;
+    audio_channel_mask_t channel_mask;
+    audio_format_t  format;
+    audio_offload_info_t offload_info;
+    size_t frame_count;
+};
+typedef struct audio_config audio_config_t;
+
+static const audio_config_t AUDIO_CONFIG_INITIALIZER = {
+    sample_rate: 0,
+    channel_mask: AUDIO_CHANNEL_NONE,
+    format: AUDIO_FORMAT_DEFAULT,
+    offload_info: {
+        version: AUDIO_OFFLOAD_INFO_VERSION_CURRENT,
+        size: sizeof(audio_offload_info_t),
+        sample_rate: 0,
+        channel_mask: 0,
+        format: AUDIO_FORMAT_DEFAULT,
+        stream_type: AUDIO_STREAM_VOICE_CALL,
+        bit_rate: 0,
+        duration_us: 0,
+        has_video: false,
+        is_streaming: false
+    },
+    frame_count: 0,
+};
+
+
+/* audio hw module handle functions or structures referencing a module */
+typedef int audio_module_handle_t;
+
+/******************************
+ *  Volume control
+ *****************************/
+
+/* If the audio hardware supports gain control on some audio paths,
+ * the platform can expose them in the audio_policy.conf file. The audio HAL
+ * will then implement gain control functions that will use the following data
+ * structures. */
+
+/* Type of gain control exposed by an audio port */
+#define AUDIO_GAIN_MODE_JOINT     0x1 /* supports joint channel gain control */
+#define AUDIO_GAIN_MODE_CHANNELS  0x2 /* supports separate channel gain control */
+#define AUDIO_GAIN_MODE_RAMP      0x4 /* supports gain ramps */
+
+typedef uint32_t audio_gain_mode_t;
+
+
+/* An audio_gain struct is a representation of a gain stage.
+ * A gain stage is always attached to an audio port. */
+struct audio_gain  {
+    audio_gain_mode_t    mode;          /* e.g. AUDIO_GAIN_MODE_JOINT */
+    audio_channel_mask_t channel_mask;  /* channels which gain an be controlled.
+                                           N/A if AUDIO_GAIN_MODE_CHANNELS is not supported */
+    int                  min_value;     /* minimum gain value in millibels */
+    int                  max_value;     /* maximum gain value in millibels */
+    int                  default_value; /* default gain value in millibels */
+    unsigned int         step_value;    /* gain step in millibels */
+    unsigned int         min_ramp_ms;   /* minimum ramp duration in ms */
+    unsigned int         max_ramp_ms;   /* maximum ramp duration in ms */
+};
+
+/* The gain configuration structure is used to get or set the gain values of a
+ * given port */
+struct audio_gain_config  {
+    int                  index;             /* index of the corresponding audio_gain in the
+                                               audio_port gains[] table */
+    audio_gain_mode_t    mode;              /* mode requested for this command */
+    audio_channel_mask_t channel_mask;      /* channels which gain value follows.
+                                               N/A in joint mode */
+
+    // note this "8" is not FCC_8, so it won't need to be changed for > 8 channels
+    int                  values[sizeof(audio_channel_mask_t) * 8]; /* gain values in millibels
+                                               for each channel ordered from LSb to MSb in
+                                               channel mask. The number of values is 1 in joint
+                                               mode or popcount(channel_mask) */
+    unsigned int         ramp_duration_ms; /* ramp duration in ms */
+};
+
+/******************************
+ *  Routing control
+ *****************************/
+
+/* Types defined here are used to describe an audio source or sink at internal
+ * framework interfaces (audio policy, patch panel) or at the audio HAL.
+ * Sink and sources are grouped in a concept of “audio port” representing an
+ * audio end point at the edge of the system managed by the module exposing
+ * the interface. */
+
+/* Audio port role: either source or sink */
+typedef enum {
+    AUDIO_PORT_ROLE_NONE,
+    AUDIO_PORT_ROLE_SOURCE,
+    AUDIO_PORT_ROLE_SINK,
+} audio_port_role_t;
+
+/* Audio port type indicates if it is a session (e.g AudioTrack),
+ * a mix (e.g PlaybackThread output) or a physical device
+ * (e.g AUDIO_DEVICE_OUT_SPEAKER) */
+typedef enum {
+    AUDIO_PORT_TYPE_NONE,
+    AUDIO_PORT_TYPE_DEVICE,
+    AUDIO_PORT_TYPE_MIX,
+    AUDIO_PORT_TYPE_SESSION,
+} audio_port_type_t;
+
+/* Each port has a unique ID or handle allocated by policy manager */
+typedef int audio_port_handle_t;
+#define AUDIO_PORT_HANDLE_NONE 0
+
+/* the maximum length for the human-readable device name */
+#define AUDIO_PORT_MAX_NAME_LEN 128
+
+/* maximum audio device address length */
+#define AUDIO_DEVICE_MAX_ADDRESS_LEN 32
+
+/* extension for audio port configuration structure when the audio port is a
+ * hardware device */
+struct audio_port_config_device_ext {
+    audio_module_handle_t hw_module;                /* module the device is attached to */
+    audio_devices_t       type;                     /* device type (e.g AUDIO_DEVICE_OUT_SPEAKER) */
+    char                  address[AUDIO_DEVICE_MAX_ADDRESS_LEN]; /* device address. "" if N/A */
+};
+
+/* extension for audio port configuration structure when the audio port is a
+ * sub mix */
+struct audio_port_config_mix_ext {
+    audio_module_handle_t hw_module;    /* module the stream is attached to */
+    audio_io_handle_t handle;           /* I/O handle of the input/output stream */
+    union {
+        //TODO: change use case for output streams: use strategy and mixer attributes
+        audio_stream_type_t stream;
+        audio_source_t      source;
+    } usecase;
+};
+
+/* extension for audio port configuration structure when the audio port is an
+ * audio session */
+struct audio_port_config_session_ext {
+    audio_session_t   session; /* audio session */
+};
+
+/* Flags indicating which fields are to be considered in struct audio_port_config */
+#define AUDIO_PORT_CONFIG_SAMPLE_RATE  0x1
+#define AUDIO_PORT_CONFIG_CHANNEL_MASK 0x2
+#define AUDIO_PORT_CONFIG_FORMAT       0x4
+#define AUDIO_PORT_CONFIG_GAIN         0x8
+#define AUDIO_PORT_CONFIG_ALL (AUDIO_PORT_CONFIG_SAMPLE_RATE | \
+                               AUDIO_PORT_CONFIG_CHANNEL_MASK | \
+                               AUDIO_PORT_CONFIG_FORMAT | \
+                               AUDIO_PORT_CONFIG_GAIN)
+
+/* audio port configuration structure used to specify a particular configuration of
+ * an audio port */
+struct audio_port_config {
+    audio_port_handle_t      id;           /* port unique ID */
+    audio_port_role_t        role;         /* sink or source */
+    audio_port_type_t        type;         /* device, mix ... */
+    unsigned int             config_mask;  /* e.g AUDIO_PORT_CONFIG_ALL */
+    unsigned int             sample_rate;  /* sampling rate in Hz */
+    audio_channel_mask_t     channel_mask; /* channel mask if applicable */
+    audio_format_t           format;       /* format if applicable */
+    struct audio_gain_config gain;         /* gain to apply if applicable */
+    union {
+        struct audio_port_config_device_ext  device;  /* device specific info */
+        struct audio_port_config_mix_ext     mix;     /* mix specific info */
+        struct audio_port_config_session_ext session; /* session specific info */
+    } ext;
+};
+
+
+/* max number of sampling rates in audio port */
+#define AUDIO_PORT_MAX_SAMPLING_RATES 16
+/* max number of channel masks in audio port */
+#define AUDIO_PORT_MAX_CHANNEL_MASKS 16
+/* max number of audio formats in audio port */
+#define AUDIO_PORT_MAX_FORMATS 16
+/* max number of gain controls in audio port */
+#define AUDIO_PORT_MAX_GAINS 16
+
+/* extension for audio port structure when the audio port is a hardware device */
+struct audio_port_device_ext {
+    audio_module_handle_t hw_module;    /* module the device is attached to */
+    audio_devices_t       type;         /* device type (e.g AUDIO_DEVICE_OUT_SPEAKER) */
+    char                  address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+};
+
+/* Latency class of the audio mix */
+typedef enum {
+    AUDIO_LATENCY_LOW,
+    AUDIO_LATENCY_NORMAL,
+} audio_mix_latency_class_t;
+
+/* extension for audio port structure when the audio port is a sub mix */
+struct audio_port_mix_ext {
+    audio_module_handle_t     hw_module;     /* module the stream is attached to */
+    audio_io_handle_t         handle;        /* I/O handle of the input.output stream */
+    audio_mix_latency_class_t latency_class; /* latency class */
+    // other attributes: routing strategies
+};
+
+/* extension for audio port structure when the audio port is an audio session */
+struct audio_port_session_ext {
+    audio_session_t   session; /* audio session */
+};
+
+struct audio_port {
+    audio_port_handle_t      id;                /* port unique ID */
+    audio_port_role_t        role;              /* sink or source */
+    audio_port_type_t        type;              /* device, mix ... */
+    char                     name[AUDIO_PORT_MAX_NAME_LEN];
+    unsigned int             num_sample_rates;  /* number of sampling rates in following array */
+    unsigned int             sample_rates[AUDIO_PORT_MAX_SAMPLING_RATES];
+    unsigned int             num_channel_masks; /* number of channel masks in following array */
+    audio_channel_mask_t     channel_masks[AUDIO_PORT_MAX_CHANNEL_MASKS];
+    unsigned int             num_formats;       /* number of formats in following array */
+    audio_format_t           formats[AUDIO_PORT_MAX_FORMATS];
+    unsigned int             num_gains;         /* number of gains in following array */
+    struct audio_gain        gains[AUDIO_PORT_MAX_GAINS];
+    struct audio_port_config active_config;     /* current audio port configuration */
+    union {
+        struct audio_port_device_ext  device;
+        struct audio_port_mix_ext     mix;
+        struct audio_port_session_ext session;
+    } ext;
+};
+
+/* An audio patch represents a connection between one or more source ports and
+ * one or more sink ports. Patches are connected and disconnected by audio policy manager or by
+ * applications via framework APIs.
+ * Each patch is identified by a handle at the interface used to create that patch. For instance,
+ * when a patch is created by the audio HAL, the HAL allocates and returns a handle.
+ * This handle is unique to a given audio HAL hardware module.
+ * But the same patch receives another system wide unique handle allocated by the framework.
+ * This unique handle is used for all transactions inside the framework.
+ */
+typedef int audio_patch_handle_t;
+#define AUDIO_PATCH_HANDLE_NONE 0
+
+#define AUDIO_PATCH_PORTS_MAX   16
+
+struct audio_patch {
+    audio_patch_handle_t id;            /* patch unique ID */
+    unsigned int      num_sources;      /* number of sources in following array */
+    struct audio_port_config sources[AUDIO_PATCH_PORTS_MAX];
+    unsigned int      num_sinks;        /* number of sinks in following array */
+    struct audio_port_config sinks[AUDIO_PATCH_PORTS_MAX];
+};
+
+
+
+/* a HW synchronization source returned by the audio HAL */
+typedef uint32_t audio_hw_sync_t;
+
+/* an invalid HW synchronization source indicating an error */
+#define AUDIO_HW_SYNC_INVALID 0
+
+static inline bool audio_is_output_device(audio_devices_t device)
+{
+    if (((device & AUDIO_DEVICE_BIT_IN) == 0) &&
+            (popcount(device) == 1) && ((device & ~AUDIO_DEVICE_OUT_ALL) == 0))
+        return true;
+    else
+        return false;
+}
+
+static inline bool audio_is_input_device(audio_devices_t device)
+{
+    if ((device & AUDIO_DEVICE_BIT_IN) != 0) {
+        device &= ~AUDIO_DEVICE_BIT_IN;
+        if ((popcount(device) == 1) && ((device & ~AUDIO_DEVICE_IN_ALL) == 0))
+            return true;
+    }
+    return false;
+}
+
+static inline bool audio_is_output_devices(audio_devices_t device)
+{
+    return (device & AUDIO_DEVICE_BIT_IN) == 0;
+}
+
+static inline bool audio_is_a2dp_in_device(audio_devices_t device)
+{
+    if ((device & AUDIO_DEVICE_BIT_IN) != 0) {
+        device &= ~AUDIO_DEVICE_BIT_IN;
+        if ((popcount(device) == 1) && (device & AUDIO_DEVICE_IN_BLUETOOTH_A2DP))
+            return true;
+    }
+    return false;
+}
+
+static inline bool audio_is_a2dp_out_device(audio_devices_t device)
+{
+    if ((popcount(device) == 1) && (device & AUDIO_DEVICE_OUT_ALL_A2DP))
+        return true;
+    else
+        return false;
+}
+
+// Deprecated - use audio_is_a2dp_out_device() instead
+static inline bool audio_is_a2dp_device(audio_devices_t device)
+{
+    return audio_is_a2dp_out_device(device);
+}
+
+static inline bool audio_is_bluetooth_sco_device(audio_devices_t device)
+{
+    if ((device & AUDIO_DEVICE_BIT_IN) == 0) {
+        if ((popcount(device) == 1) && ((device & ~AUDIO_DEVICE_OUT_ALL_SCO) == 0))
+            return true;
+    } else {
+        device &= ~AUDIO_DEVICE_BIT_IN;
+        if ((popcount(device) == 1) && ((device & ~AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) == 0))
+            return true;
+    }
+
+    return false;
+}
+
+static inline bool audio_is_usb_out_device(audio_devices_t device)
+{
+    return ((popcount(device) == 1) && (device & AUDIO_DEVICE_OUT_ALL_USB));
+}
+
+static inline bool audio_is_usb_in_device(audio_devices_t device)
+{
+    if ((device & AUDIO_DEVICE_BIT_IN) != 0) {
+        device &= ~AUDIO_DEVICE_BIT_IN;
+        if (popcount(device) == 1 && (device & AUDIO_DEVICE_IN_ALL_USB) != 0)
+            return true;
+    }
+    return false;
+}
+
+/* OBSOLETE - use audio_is_usb_out_device() instead. */
+static inline bool audio_is_usb_device(audio_devices_t device)
+{
+    return audio_is_usb_out_device(device);
+}
+
+static inline bool audio_is_remote_submix_device(audio_devices_t device)
+{
+    if ((audio_is_output_devices(device) &&
+         (device & AUDIO_DEVICE_OUT_REMOTE_SUBMIX) == AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
+        || (!audio_is_output_devices(device) &&
+         (device & AUDIO_DEVICE_IN_REMOTE_SUBMIX) == AUDIO_DEVICE_IN_REMOTE_SUBMIX))
+        return true;
+    else
+        return false;
+}
+
+/* Returns true if:
+ *  representation is valid, and
+ *  there is at least one channel bit set which _could_ correspond to an input channel, and
+ *  there are no channel bits set which could _not_ correspond to an input channel.
+ * Otherwise returns false.
+ */
+static inline bool audio_is_input_channel(audio_channel_mask_t channel)
+{
+    uint32_t bits = audio_channel_mask_get_bits(channel);
+    switch (audio_channel_mask_get_representation(channel)) {
+    case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+        if (bits & ~AUDIO_CHANNEL_IN_ALL) {
+            bits = 0;
+        }
+        // fall through
+    case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+        return bits != 0;
+    default:
+        return false;
+    }
+}
+
+/* Returns true if:
+ *  representation is valid, and
+ *  there is at least one channel bit set which _could_ correspond to an output channel, and
+ *  there are no channel bits set which could _not_ correspond to an output channel.
+ * Otherwise returns false.
+ */
+static inline bool audio_is_output_channel(audio_channel_mask_t channel)
+{
+    uint32_t bits = audio_channel_mask_get_bits(channel);
+    switch (audio_channel_mask_get_representation(channel)) {
+    case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+        if (bits & ~AUDIO_CHANNEL_OUT_ALL) {
+            bits = 0;
+        }
+        // fall through
+    case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+        return bits != 0;
+    default:
+        return false;
+    }
+}
+
+/* Returns the number of channels from an input channel mask,
+ * used in the context of audio input or recording.
+ * If a channel bit is set which could _not_ correspond to an input channel,
+ * it is excluded from the count.
+ * Returns zero if the representation is invalid.
+ */
+static inline uint32_t audio_channel_count_from_in_mask(audio_channel_mask_t channel)
+{
+    uint32_t bits = audio_channel_mask_get_bits(channel);
+    switch (audio_channel_mask_get_representation(channel)) {
+    case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+        // TODO: We can now merge with from_out_mask and remove anding
+        bits &= AUDIO_CHANNEL_IN_ALL;
+        // fall through
+    case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+        return popcount(bits);
+    default:
+        return 0;
+    }
+}
+
+/* Returns the number of channels from an output channel mask,
+ * used in the context of audio output or playback.
+ * If a channel bit is set which could _not_ correspond to an output channel,
+ * it is excluded from the count.
+ * Returns zero if the representation is invalid.
+ */
+static inline uint32_t audio_channel_count_from_out_mask(audio_channel_mask_t channel)
+{
+    uint32_t bits = audio_channel_mask_get_bits(channel);
+    switch (audio_channel_mask_get_representation(channel)) {
+    case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+        // TODO: We can now merge with from_in_mask and remove anding
+        bits &= AUDIO_CHANNEL_OUT_ALL;
+        // fall through
+    case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+        return popcount(bits);
+    default:
+        return 0;
+    }
+}
+
+/* Derive a channel mask for index assignment from a channel count.
+ * Returns the matching channel mask,
+ * or AUDIO_CHANNEL_NONE if the channel count is zero,
+ * or AUDIO_CHANNEL_INVALID if the channel count exceeds AUDIO_CHANNEL_COUNT_MAX.
+ */
+static inline audio_channel_mask_t audio_channel_mask_for_index_assignment_from_count(
+        uint32_t channel_count)
+{
+    if (channel_count == 0) {
+        return AUDIO_CHANNEL_NONE;
+    }
+    if (channel_count > AUDIO_CHANNEL_COUNT_MAX) {
+        return AUDIO_CHANNEL_INVALID;
+    }
+    uint32_t bits = (1 << channel_count) - 1;
+    return audio_channel_mask_from_representation_and_bits(
+            AUDIO_CHANNEL_REPRESENTATION_INDEX, bits);
+}
+
+/* Derive an output channel mask for position assignment from a channel count.
+ * This is to be used when the content channel mask is unknown. The 1, 2, 4, 5, 6, 7 and 8 channel
+ * cases are mapped to the standard game/home-theater layouts, but note that 4 is mapped to quad,
+ * and not stereo + FC + mono surround. A channel count of 3 is arbitrarily mapped to stereo + FC
+ * for continuity with stereo.
+ * Returns the matching channel mask,
+ * or AUDIO_CHANNEL_NONE if the channel count is zero,
+ * or AUDIO_CHANNEL_INVALID if the channel count exceeds that of the
+ * configurations for which a default output channel mask is defined.
+ */
+static inline audio_channel_mask_t audio_channel_out_mask_from_count(uint32_t channel_count)
+{
+    uint32_t bits;
+    switch (channel_count) {
+    case 0:
+        return AUDIO_CHANNEL_NONE;
+    case 1:
+        bits = AUDIO_CHANNEL_OUT_MONO;
+        break;
+    case 2:
+        bits = AUDIO_CHANNEL_OUT_STEREO;
+        break;
+    case 3:
+        bits = AUDIO_CHANNEL_OUT_STEREO | AUDIO_CHANNEL_OUT_FRONT_CENTER;
+        break;
+    case 4: // 4.0
+        bits = AUDIO_CHANNEL_OUT_QUAD;
+        break;
+    case 5: // 5.0
+        bits = AUDIO_CHANNEL_OUT_QUAD | AUDIO_CHANNEL_OUT_FRONT_CENTER;
+        break;
+    case 6: // 5.1
+        bits = AUDIO_CHANNEL_OUT_5POINT1;
+        break;
+    case 7: // 6.1
+        bits = AUDIO_CHANNEL_OUT_5POINT1 | AUDIO_CHANNEL_OUT_BACK_CENTER;
+        break;
+    case 8:
+        bits = AUDIO_CHANNEL_OUT_7POINT1;
+        break;
+    // FIXME FCC_8
+    default:
+        return AUDIO_CHANNEL_INVALID;
+    }
+    return audio_channel_mask_from_representation_and_bits(
+            AUDIO_CHANNEL_REPRESENTATION_POSITION, bits);
+}
+
+/* Derive a default input channel mask from a channel count.
+ * Assumes a position mask for mono and stereo, or an index mask for channel counts > 2.
+ * Returns the matching channel mask,
+ * or AUDIO_CHANNEL_NONE if the channel count is zero,
+ * or AUDIO_CHANNEL_INVALID if the channel count exceeds that of the
+ * configurations for which a default input channel mask is defined.
+ */
+static inline audio_channel_mask_t audio_channel_in_mask_from_count(uint32_t channel_count)
+{
+    uint32_t bits;
+    switch (channel_count) {
+    case 0:
+        return AUDIO_CHANNEL_NONE;
+    case 1:
+        bits = AUDIO_CHANNEL_IN_MONO;
+        break;
+    case 2:
+        bits = AUDIO_CHANNEL_IN_STEREO;
+        break;
+    case 3:
+    case 4:
+    case 5:
+    case 6:
+    case 7:
+    case 8:
+        // FIXME FCC_8
+        return audio_channel_mask_for_index_assignment_from_count(channel_count);
+    default:
+        return AUDIO_CHANNEL_INVALID;
+    }
+    return audio_channel_mask_from_representation_and_bits(
+            AUDIO_CHANNEL_REPRESENTATION_POSITION, bits);
+}
+
+static inline bool audio_is_valid_format(audio_format_t format)
+{
+    switch (format & AUDIO_FORMAT_MAIN_MASK) {
+    case AUDIO_FORMAT_PCM:
+        switch (format) {
+        case AUDIO_FORMAT_PCM_16_BIT:
+        case AUDIO_FORMAT_PCM_8_BIT:
+        case AUDIO_FORMAT_PCM_32_BIT:
+        case AUDIO_FORMAT_PCM_8_24_BIT:
+        case AUDIO_FORMAT_PCM_FLOAT:
+        case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+            return true;
+        default:
+            return false;
+        }
+        /* not reached */
+    case AUDIO_FORMAT_MP3:
+    case AUDIO_FORMAT_AMR_NB:
+    case AUDIO_FORMAT_AMR_WB:
+    case AUDIO_FORMAT_AAC:
+    case AUDIO_FORMAT_AAC_ADTS:
+    case AUDIO_FORMAT_HE_AAC_V1:
+    case AUDIO_FORMAT_HE_AAC_V2:
+    case AUDIO_FORMAT_VORBIS:
+    case AUDIO_FORMAT_OPUS:
+    case AUDIO_FORMAT_AC3:
+    case AUDIO_FORMAT_E_AC3:
+    case AUDIO_FORMAT_DTS:
+    case AUDIO_FORMAT_DTS_HD:
+    case AUDIO_FORMAT_QCELP:
+    case AUDIO_FORMAT_EVRC:
+    case AUDIO_FORMAT_EVRCB:
+    case AUDIO_FORMAT_EVRCWB:
+    case AUDIO_FORMAT_AAC_ADIF:
+    case AUDIO_FORMAT_AMR_WB_PLUS:
+    case AUDIO_FORMAT_MP2:
+    case AUDIO_FORMAT_EVRCNW:
+    case AUDIO_FORMAT_FLAC:
+    case AUDIO_FORMAT_ALAC:
+    case AUDIO_FORMAT_APE:
+    case AUDIO_FORMAT_WMA:
+    case AUDIO_FORMAT_WMA_PRO:
+        return true;
+    case AUDIO_FORMAT_PCM_OFFLOAD:
+        if (format != AUDIO_FORMAT_PCM_16_BIT_OFFLOAD &&
+                format != AUDIO_FORMAT_PCM_24_BIT_OFFLOAD) {
+            return false;
+        }
+        return true;
+    default:
+        return false;
+    }
+}
+
+static inline bool audio_is_linear_pcm(audio_format_t format)
+{
+    return ((format & AUDIO_FORMAT_MAIN_MASK) == AUDIO_FORMAT_PCM);
+}
+
+static inline size_t audio_bytes_per_sample(audio_format_t format)
+{
+    size_t size = 0;
+
+    switch (format) {
+    case AUDIO_FORMAT_PCM_32_BIT:
+    case AUDIO_FORMAT_PCM_8_24_BIT:
+        size = sizeof(int32_t);
+        break;
+    case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+        size = sizeof(uint8_t) * 3;
+        break;
+    case AUDIO_FORMAT_PCM_16_BIT:
+        size = sizeof(int16_t);
+        break;
+    case AUDIO_FORMAT_PCM_8_BIT:
+        size = sizeof(uint8_t);
+        break;
+    case AUDIO_FORMAT_PCM_FLOAT:
+        size = sizeof(float);
+        break;
+    default:
+        break;
+    }
+    return size;
+}
+
+/* converts device address to string sent to audio HAL via set_parameters */
+static inline char *audio_device_address_to_parameter(audio_devices_t device, const char *address)
+{
+    const size_t kSize = AUDIO_DEVICE_MAX_ADDRESS_LEN + sizeof("a2dp_sink_address=");
+    char param[kSize];
+
+    if (device & AUDIO_DEVICE_OUT_ALL_A2DP)
+        snprintf(param, kSize, "%s=%s", "a2dp_sink_address", address);
+    else if (device & AUDIO_DEVICE_OUT_REMOTE_SUBMIX)
+        snprintf(param, kSize, "%s=%s", "mix", address);
+    else
+        snprintf(param, kSize, "%s", address);
+
+    return strdup(param);
+}
+
+static inline bool audio_device_is_digital(audio_devices_t device) {
+    if ((device & AUDIO_DEVICE_BIT_IN) != 0) {
+        // input
+        return (~AUDIO_DEVICE_BIT_IN & device & (AUDIO_DEVICE_IN_ALL_USB |
+                          AUDIO_DEVICE_IN_HDMI |
+                          AUDIO_DEVICE_IN_SPDIF |
+                          AUDIO_DEVICE_IN_IP)) != 0;
+    } else {
+        // output
+        return (device & (AUDIO_DEVICE_OUT_ALL_USB |
+                          AUDIO_DEVICE_OUT_HDMI |
+                          AUDIO_DEVICE_OUT_HDMI_ARC |
+                          AUDIO_DEVICE_OUT_SPDIF |
+                          AUDIO_DEVICE_OUT_IP)) != 0;
+    }
+}
+
+__END_DECLS
+
+#endif  // ANDROID_AUDIO_CORE_H
diff --git a/media/audio/include/system/audio_policy.h b/media/audio/include/system/audio_policy.h
new file mode 100644
index 0000000..2881104
--- /dev/null
+++ b/media/audio/include/system/audio_policy.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_AUDIO_POLICY_CORE_H
+#define ANDROID_AUDIO_POLICY_CORE_H
+
+#include <stdint.h>
+#include <sys/cdefs.h>
+#include <sys/types.h>
+
+#include <cutils/bitops.h>
+
+__BEGIN_DECLS
+
+/* The enums were moved here mostly from
+ * frameworks/base/include/media/AudioSystem.h
+ */
+
+/* device categories used for audio_policy->set_force_use() */
+typedef enum {
+    AUDIO_POLICY_FORCE_NONE,
+    AUDIO_POLICY_FORCE_SPEAKER,
+    AUDIO_POLICY_FORCE_HEADPHONES,
+    AUDIO_POLICY_FORCE_BT_SCO,
+    AUDIO_POLICY_FORCE_BT_A2DP,
+    AUDIO_POLICY_FORCE_WIRED_ACCESSORY,
+    AUDIO_POLICY_FORCE_BT_CAR_DOCK,
+    AUDIO_POLICY_FORCE_BT_DESK_DOCK,
+    AUDIO_POLICY_FORCE_ANALOG_DOCK,
+    AUDIO_POLICY_FORCE_DIGITAL_DOCK,
+    AUDIO_POLICY_FORCE_NO_BT_A2DP, /* A2DP sink is not preferred to speaker or wired HS */
+    AUDIO_POLICY_FORCE_SYSTEM_ENFORCED,
+    AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED,
+
+    AUDIO_POLICY_FORCE_CFG_CNT,
+    AUDIO_POLICY_FORCE_CFG_MAX = AUDIO_POLICY_FORCE_CFG_CNT - 1,
+
+    AUDIO_POLICY_FORCE_DEFAULT = AUDIO_POLICY_FORCE_NONE,
+} audio_policy_forced_cfg_t;
+
+/* usages used for audio_policy->set_force_use() */
+typedef enum {
+    AUDIO_POLICY_FORCE_FOR_COMMUNICATION,
+    AUDIO_POLICY_FORCE_FOR_MEDIA,
+    AUDIO_POLICY_FORCE_FOR_RECORD,
+    AUDIO_POLICY_FORCE_FOR_DOCK,
+    AUDIO_POLICY_FORCE_FOR_SYSTEM,
+    AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO,
+
+    AUDIO_POLICY_FORCE_USE_CNT,
+    AUDIO_POLICY_FORCE_USE_MAX = AUDIO_POLICY_FORCE_USE_CNT - 1,
+} audio_policy_force_use_t;
+
+/* device connection states used for audio_policy->set_device_connection_state()
+ */
+typedef enum {
+    AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+    AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+
+    AUDIO_POLICY_DEVICE_STATE_CNT,
+    AUDIO_POLICY_DEVICE_STATE_MAX = AUDIO_POLICY_DEVICE_STATE_CNT - 1,
+} audio_policy_dev_state_t;
+
+typedef enum {
+    /* Used to generate a tone to notify the user of a
+     * notification/alarm/ringtone while they are in a call. */
+    AUDIO_POLICY_TONE_IN_CALL_NOTIFICATION = 0,
+
+    AUDIO_POLICY_TONE_CNT,
+    AUDIO_POLICY_TONE_MAX                  = AUDIO_POLICY_TONE_CNT - 1,
+} audio_policy_tone_t;
+
+
+static inline bool audio_is_low_visibility(audio_stream_type_t stream)
+{
+    switch (stream) {
+    case AUDIO_STREAM_SYSTEM:
+    case AUDIO_STREAM_NOTIFICATION:
+    case AUDIO_STREAM_RING:
+        return true;
+    default:
+        return false;
+    }
+}
+
+
+__END_DECLS
+
+#endif  // ANDROID_AUDIO_POLICY_CORE_H
diff --git a/media/audio/include/system/sound_trigger.h b/media/audio/include/system/sound_trigger.h
new file mode 100644
index 0000000..c44953f
--- /dev/null
+++ b/media/audio/include/system/sound_trigger.h
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SOUND_TRIGGER_H
+#define ANDROID_SOUND_TRIGGER_H
+
+#include <stdbool.h>
+#include <system/audio.h>
+
+#define SOUND_TRIGGER_MAX_STRING_LEN 64 /* max length of strings in properties or
+                                           descriptor structs */
+#define SOUND_TRIGGER_MAX_LOCALE_LEN 6  /* max length of locale string. e.g en_US */
+#define SOUND_TRIGGER_MAX_USERS 10      /* max number of concurrent users */
+#define SOUND_TRIGGER_MAX_PHRASES 10    /* max number of concurrent phrases */
+
+typedef enum {
+    SOUND_TRIGGER_STATE_NO_INIT = -1,   /* The sound trigger service is not initialized */
+    SOUND_TRIGGER_STATE_ENABLED = 0,    /* The sound trigger service is enabled */
+    SOUND_TRIGGER_STATE_DISABLED = 1    /* The sound trigger service is disabled */
+} sound_trigger_service_state_t;
+
+#define RECOGNITION_MODE_VOICE_TRIGGER 0x1       /* simple voice trigger */
+#define RECOGNITION_MODE_USER_IDENTIFICATION 0x2 /* trigger only if one user in model identified */
+#define RECOGNITION_MODE_USER_AUTHENTICATION 0x4 /* trigger only if one user in mode
+                                                    authenticated */
+#define RECOGNITION_STATUS_SUCCESS 0
+#define RECOGNITION_STATUS_ABORT 1
+#define RECOGNITION_STATUS_FAILURE 2
+
+#define SOUND_MODEL_STATUS_UPDATED 0
+
+typedef enum {
+    SOUND_MODEL_TYPE_UNKNOWN = -1,    /* use for unspecified sound model type */
+    SOUND_MODEL_TYPE_KEYPHRASE = 0    /* use for key phrase sound models */
+} sound_trigger_sound_model_type_t;
+
+typedef struct sound_trigger_uuid_s {
+    unsigned int   timeLow;
+    unsigned short timeMid;
+    unsigned short timeHiAndVersion;
+    unsigned short clockSeq;
+    unsigned char  node[6];
+} sound_trigger_uuid_t;
+
+/*
+ * sound trigger implementation descriptor read by the framework via get_properties().
+ * Used by SoundTrigger service to report to applications and manage concurrency and policy.
+ */
+struct sound_trigger_properties {
+    char                 implementor[SOUND_TRIGGER_MAX_STRING_LEN]; /* implementor name */
+    char                 description[SOUND_TRIGGER_MAX_STRING_LEN]; /* implementation description */
+    unsigned int         version;               /* implementation version */
+    sound_trigger_uuid_t uuid;                  /* unique implementation ID.
+                                                   Must change with version each version */
+    unsigned int         max_sound_models;      /* maximum number of concurrent sound models
+                                                   loaded */
+    unsigned int         max_key_phrases;       /* maximum number of key phrases */
+    unsigned int         max_users;             /* maximum number of concurrent users detected */
+    unsigned int         recognition_modes;     /* all supported modes.
+                                                   e.g RECOGNITION_MODE_VOICE_TRIGGER */
+    bool                 capture_transition;    /* supports seamless transition from detection
+                                                   to capture */
+    unsigned int         max_buffer_ms;         /* maximum buffering capacity in ms if
+                                                   capture_transition is true*/
+    bool                 concurrent_capture;    /* supports capture by other use cases while
+                                                   detection is active */
+    bool                 trigger_in_event;      /* returns the trigger capture in event */
+    unsigned int         power_consumption_mw;  /* Rated power consumption when detection is active
+                                                   with TDB silence/sound/speech ratio */
+};
+
+typedef int sound_trigger_module_handle_t;
+
+struct sound_trigger_module_descriptor {
+    sound_trigger_module_handle_t   handle;
+    struct sound_trigger_properties properties;
+};
+
+typedef int sound_model_handle_t;
+
+/*
+ * Generic sound model descriptor. This struct is the header of a larger block passed to
+ * load_sound_model() and containing the binary data of the sound model.
+ * Proprietary representation of users in binary data must match information indicated
+ * by users field
+ */
+struct sound_trigger_sound_model {
+    sound_trigger_sound_model_type_t type;        /* model type. e.g. SOUND_MODEL_TYPE_KEYPHRASE */
+    sound_trigger_uuid_t             uuid;        /* unique sound model ID. */
+    sound_trigger_uuid_t             vendor_uuid; /* unique vendor ID. Identifies the engine the
+                                                  sound model was build for */
+    unsigned int                     data_size;   /* size of opaque model data */
+    unsigned int                     data_offset; /* offset of opaque data start from head of struct
+                                                    (e.g sizeof struct sound_trigger_sound_model) */
+};
+
+/* key phrase descriptor */
+struct sound_trigger_phrase {
+    unsigned int id;                /* keyphrase ID */
+    unsigned int recognition_mode;  /* recognition modes supported by this key phrase */
+    unsigned int num_users;         /* number of users in the key phrase */
+    unsigned int users[SOUND_TRIGGER_MAX_USERS]; /* users ids: (not uid_t but sound trigger
+                                                 specific IDs */
+    char         locale[SOUND_TRIGGER_MAX_LOCALE_LEN]; /* locale - JAVA Locale style (e.g. en_US) */
+    char         text[SOUND_TRIGGER_MAX_STRING_LEN];   /* phrase text in UTF-8 format. */
+};
+
+/*
+ * Specialized sound model for key phrase detection.
+ * Proprietary representation of key phrases in binary data must match information indicated
+ * by phrases field
+ */
+struct sound_trigger_phrase_sound_model {
+    struct sound_trigger_sound_model common;
+    unsigned int                     num_phrases;   /* number of key phrases in model */
+    struct sound_trigger_phrase      phrases[SOUND_TRIGGER_MAX_PHRASES];
+};
+
+
+/*
+ * Generic recognition event sent via recognition callback
+ */
+struct sound_trigger_recognition_event {
+    int                              status;            /* recognition status e.g.
+                                                           RECOGNITION_STATUS_SUCCESS */
+    sound_trigger_sound_model_type_t type;              /* event type, same as sound model type.
+                                                           e.g. SOUND_MODEL_TYPE_KEYPHRASE */
+    sound_model_handle_t             model;             /* loaded sound model that triggered the
+                                                           event */
+    bool                             capture_available; /* it is possible to capture audio from this
+                                                           utterance buffered by the
+                                                           implementation */
+    int                              capture_session;   /* audio session ID. framework use */
+    int                              capture_delay_ms;  /* delay in ms between end of model
+                                                           detection and start of audio available
+                                                           for capture. A negative value is possible
+                                                           (e.g. if key phrase is also available for
+                                                           capture */
+    int                              capture_preamble_ms; /* duration in ms of audio captured
+                                                            before the start of the trigger.
+                                                            0 if none. */
+    bool                             trigger_in_data; /* the opaque data is the capture of
+                                                            the trigger sound */
+    audio_config_t                   audio_config;        /* audio format of either the trigger in
+                                                             event data or to use for capture of the
+                                                             rest of the utterance */
+    uint64_t                         timestamp;       /* time stamp at the time of detection */
+
+    unsigned int                     data_size;         /* size of opaque event data */
+    unsigned int                     data_offset;       /* offset of opaque data start from start of
+                                                          this struct (e.g sizeof struct
+                                                          sound_trigger_phrase_recognition_event) */
+};
+
+/*
+ * Confidence level for each user in struct sound_trigger_phrase_recognition_extra
+ */
+struct sound_trigger_confidence_level {
+    unsigned int user_id;   /* user ID */
+    unsigned int level;     /* confidence level in percent (0 - 100).
+                               - min level for recognition configuration
+                               - detected level for recognition event */
+};
+
+/*
+ * Specialized recognition event for key phrase detection
+ */
+struct sound_trigger_phrase_recognition_extra {
+    unsigned int id;                /* keyphrase ID */
+    unsigned int recognition_modes; /* recognition modes used for this keyphrase */
+    unsigned int confidence_level;  /* confidence level for mode RECOGNITION_MODE_VOICE_TRIGGER */
+    unsigned int num_levels;        /* number of user confidence levels */
+    struct sound_trigger_confidence_level levels[SOUND_TRIGGER_MAX_USERS];
+};
+
+struct sound_trigger_phrase_recognition_event {
+    struct sound_trigger_recognition_event common;
+    unsigned int                           num_phrases;
+    struct sound_trigger_phrase_recognition_extra phrase_extras[SOUND_TRIGGER_MAX_PHRASES];
+};
+
+/*
+ * configuration for sound trigger capture session passed to start_recognition()
+ */
+struct sound_trigger_recognition_config {
+    audio_io_handle_t    capture_handle;    /* IO handle that will be used for capture.
+                                            N/A if capture_requested is false */
+    audio_devices_t      capture_device;    /* input device requested for detection capture */
+    bool                 capture_requested; /* capture and buffer audio for this recognition
+                                            instance */
+    unsigned int         num_phrases;   /* number of key phrases recognition extras */
+    struct sound_trigger_phrase_recognition_extra phrases[SOUND_TRIGGER_MAX_PHRASES];
+                                           /* configuration for each key phrase */
+    unsigned int        data_size;         /* size of opaque capture configuration data */
+    unsigned int        data_offset;       /* offset of opaque data start from start of this struct
+                                           (e.g sizeof struct sound_trigger_recognition_config) */
+};
+
+/*
+ * Event sent via load sound model callback
+ */
+struct sound_trigger_model_event {
+    int                  status;      /* sound model status e.g. SOUND_MODEL_STATUS_UPDATED */
+    sound_model_handle_t model;       /* loaded sound model that triggered the event */
+    unsigned int         data_size;   /* size of event data if any. Size of updated sound model if
+                                       status is SOUND_MODEL_STATUS_UPDATED */
+    unsigned int         data_offset; /* offset of data start from start of this struct
+                                       (e.g sizeof struct sound_trigger_model_event) */
+};
+
+
+#endif  // ANDROID_SOUND_TRIGGER_H
diff --git a/media/audio_effects/include/audio_effects/audio_effects_conf.h b/media/audio_effects/include/audio_effects/audio_effects_conf.h
new file mode 100755
index 0000000..d462c08
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/audio_effects_conf.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_AUDIO_EFFECTS_CONF_H
+#define ANDROID_AUDIO_EFFECTS_CONF_H
+
+
+/////////////////////////////////////////////////
+//      Definitions for effects configuration file (audio_effects.conf)
+/////////////////////////////////////////////////
+
+#define AUDIO_EFFECT_DEFAULT_CONFIG_FILE "/system/etc/audio_effects.conf"
+#define AUDIO_EFFECT_VENDOR_CONFIG_FILE "/vendor/etc/audio_effects.conf"
+#define LIBRARIES_TAG "libraries"
+#define PATH_TAG "path"
+
+#define EFFECTS_TAG "effects"
+#define LIBRARY_TAG "library"
+#define UUID_TAG "uuid"
+
+#define PREPROCESSING_TAG "pre_processing"
+#define OUTPUT_SESSION_PROCESSING_TAG "output_session_processing"
+
+#define PARAM_TAG "param"
+#define VALUE_TAG "value"
+#define INT_TAG "int"
+#define SHORT_TAG "short"
+#define FLOAT_TAG "float"
+#define BOOL_TAG "bool"
+#define STRING_TAG "string"
+
+// audio_source_t
+#define MIC_SRC_TAG "mic"                           // AUDIO_SOURCE_MIC
+#define VOICE_UL_SRC_TAG "voice_uplink"             // AUDIO_SOURCE_VOICE_UPLINK
+#define VOICE_DL_SRC_TAG "voice_downlink"           // AUDIO_SOURCE_VOICE_DOWNLINK
+#define VOICE_CALL_SRC_TAG "voice_call"             // AUDIO_SOURCE_VOICE_CALL
+#define CAMCORDER_SRC_TAG "camcorder"               // AUDIO_SOURCE_CAMCORDER
+#define VOICE_REC_SRC_TAG "voice_recognition"       // AUDIO_SOURCE_VOICE_RECOGNITION
+#define VOICE_COMM_SRC_TAG "voice_communication"    // AUDIO_SOURCE_VOICE_COMMUNICATION
+
+// audio_stream_type_t
+#define AUDIO_STREAM_DEFAULT_TAG "default"
+#define AUDIO_STREAM_VOICE_CALL_TAG "voice_call"
+#define AUDIO_STREAM_SYSTEM_TAG "system"
+#define AUDIO_STREAM_RING_TAG "ring"
+#define AUDIO_STREAM_MUSIC_TAG "music"
+#define AUDIO_STREAM_ALARM_TAG "alarm"
+#define AUDIO_STREAM_NOTIFICATION_TAG "notification"
+#define AUDIO_STREAM_BLUETOOTH_SCO_TAG "bluetooth_sco"
+#define AUDIO_STREAM_ENFORCED_AUDIBLE_TAG "enforced_audible"
+#define AUDIO_STREAM_DTMF_TAG "dtmf"
+#define AUDIO_STREAM_TTS_TAG "tts"
+
+#endif  // ANDROID_AUDIO_EFFECTS_CONF_H
diff --git a/media/audio_effects/include/audio_effects/effect_aec.h b/media/audio_effects/include/audio_effects/effect_aec.h
new file mode 100644
index 0000000..a0e1ca0
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_aec.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_AEC_H_
+#define ANDROID_EFFECT_AEC_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+// The AEC type UUID is not defined by OpenSL ES and has been generated from
+// http://www.itu.int/ITU-T/asn1/uuid.html
+static const effect_uuid_t FX_IID_AEC_ =
+    { 0x7b491460, 0x8d4d, 0x11e0, 0xbd61, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const FX_IID_AEC = &FX_IID_AEC_;
+
+typedef enum
+{
+    AEC_PARAM_ECHO_DELAY,           // echo delay in microseconds
+    AEC_PARAM_PROPERTIES
+} t_aec_params;
+
+//t_equalizer_settings groups all current aec settings for backup and restore.
+typedef struct s_aec_settings {
+    uint32_t echoDelay;
+} t_aec_settings;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_AEC_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_agc.h b/media/audio_effects/include/audio_effects/effect_agc.h
new file mode 100644
index 0000000..eddac67
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_agc.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_AGC_H_
+#define ANDROID_EFFECT_AGC_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+// The AGC type UUID is not defined by OpenSL ES and has been generated from
+// http://www.itu.int/ITU-T/asn1/uuid.html
+static const effect_uuid_t FX_IID_AGC_ =
+    { 0x0a8abfe0, 0x654c, 0x11e0, 0xba26, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const FX_IID_AGC = &FX_IID_AGC_;
+
+
+typedef enum
+{
+    AGC_PARAM_TARGET_LEVEL,      // target output level in millibel
+    AGC_PARAM_COMP_GAIN,         // gain in the compression range in millibel
+    AGC_PARAM_LIMITER_ENA,       // enable or disable limiter (boolean)
+    AGC_PARAM_PROPERTIES
+} t_agc_params;
+
+
+//t_agc_settings groups all current agc settings for backup and restore.
+typedef struct s_agc_settings {
+    int16_t  targetLevel;
+    int16_t  compGain;
+    bool     limiterEnabled;
+} t_agc_settings;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_AGC_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_bassboost.h b/media/audio_effects/include/audio_effects/effect_bassboost.h
new file mode 100644
index 0000000..3735904
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_bassboost.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_BASSBOOST_H_
+#define ANDROID_EFFECT_BASSBOOST_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+#ifndef OPENSL_ES_H_
+static const effect_uuid_t SL_IID_BASSBOOST_ = { 0x0634f220, 0xddd4, 0x11db, 0xa0fc,
+        { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const SL_IID_BASSBOOST = &SL_IID_BASSBOOST_;
+#endif //OPENSL_ES_H_
+
+/* enumerated parameter settings for BassBoost effect */
+typedef enum
+{
+    BASSBOOST_PARAM_STRENGTH_SUPPORTED,
+    BASSBOOST_PARAM_STRENGTH
+} t_bassboost_params;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_BASSBOOST_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_downmix.h b/media/audio_effects/include/audio_effects/effect_downmix.h
new file mode 100644
index 0000000..0f6b073
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_downmix.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_DOWNMIX_H_
+#define ANDROID_EFFECT_DOWNMIX_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+#define EFFECT_UIID_DOWNMIX__ { 0x381e49cc, 0xa858, 0x4aa2, 0x87f6, \
+                              { 0xe8, 0x38, 0x8e, 0x76, 0x01, 0xb2 } }
+static const effect_uuid_t EFFECT_UIID_DOWNMIX_ = EFFECT_UIID_DOWNMIX__;
+const effect_uuid_t * const EFFECT_UIID_DOWNMIX = &EFFECT_UIID_DOWNMIX_;
+
+
+/* enumerated parameter settings for downmix effect */
+typedef enum {
+    DOWNMIX_PARAM_TYPE
+} downmix_params_t;
+
+
+typedef enum {
+    DOWNMIX_TYPE_INVALID                 = -1,
+    // throw away the extra channels
+    DOWNMIX_TYPE_STRIP                   = 0,
+    // mix the extra channels with FL/FR
+    DOWNMIX_TYPE_FOLD                    = 1,
+    DOWNMIX_TYPE_CNT,
+    DOWNMIX_TYPE_LAST = DOWNMIX_TYPE_CNT - 1
+} downmix_type_t;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_DOWNMIX_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_environmentalreverb.h b/media/audio_effects/include/audio_effects/effect_environmentalreverb.h
new file mode 100644
index 0000000..3acbd5c
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_environmentalreverb.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_ENVIRONMENTALREVERB_H_
+#define ANDROID_EFFECT_ENVIRONMENTALREVERB_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+#ifndef OPENSL_ES_H_
+static const effect_uuid_t SL_IID_ENVIRONMENTALREVERB_ = { 0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac,
+        { 0x4e, 0x23, 0x4d, 0x6, 0x83, 0x9e } };
+const effect_uuid_t * const SL_IID_ENVIRONMENTALREVERB = &SL_IID_ENVIRONMENTALREVERB_;
+#endif //OPENSL_ES_H_
+
+/* enumerated parameter settings for environmental reverb effect */
+typedef enum
+{
+    // Parameters below are as defined in OpenSL ES specification for environmental reverb interface
+    REVERB_PARAM_ROOM_LEVEL,            // in millibels,    range -6000 to 0
+    REVERB_PARAM_ROOM_HF_LEVEL,         // in millibels,    range -4000 to 0
+    REVERB_PARAM_DECAY_TIME,            // in milliseconds, range 100 to 20000
+    REVERB_PARAM_DECAY_HF_RATIO,        // in permilles,    range 100 to 1000
+    REVERB_PARAM_REFLECTIONS_LEVEL,     // in millibels,    range -6000 to 0
+    REVERB_PARAM_REFLECTIONS_DELAY,     // in milliseconds, range 0 to 65
+    REVERB_PARAM_REVERB_LEVEL,          // in millibels,    range -6000 to 0
+    REVERB_PARAM_REVERB_DELAY,          // in milliseconds, range 0 to 65
+    REVERB_PARAM_DIFFUSION,             // in permilles,    range 0 to 1000
+    REVERB_PARAM_DENSITY,               // in permilles,    range 0 to 1000
+    REVERB_PARAM_PROPERTIES,
+    REVERB_PARAM_BYPASS
+} t_env_reverb_params;
+
+//t_reverb_settings is equal to SLEnvironmentalReverbSettings defined in OpenSL ES specification.
+typedef struct s_reverb_settings {
+    int16_t     roomLevel;
+    int16_t     roomHFLevel;
+    uint32_t    decayTime;
+    int16_t     decayHFRatio;
+    int16_t     reflectionsLevel;
+    uint32_t    reflectionsDelay;
+    int16_t     reverbLevel;
+    uint32_t    reverbDelay;
+    int16_t     diffusion;
+    int16_t     density;
+} __attribute__((packed)) t_reverb_settings;
+
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_ENVIRONMENTALREVERB_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_equalizer.h b/media/audio_effects/include/audio_effects/effect_equalizer.h
new file mode 100644
index 0000000..17ee74f
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_equalizer.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_EQUALIZER_H_
+#define ANDROID_EFFECT_EQUALIZER_H_
+
+#include <hardware/audio_effect.h>
+
+#ifndef OPENSL_ES_H_
+static const effect_uuid_t SL_IID_EQUALIZER_ = { 0x0bed4300, 0xddd6, 0x11db, 0x8f34,
+        { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const SL_IID_EQUALIZER = &SL_IID_EQUALIZER_;
+#endif //OPENSL_ES_H_
+
+#if __cplusplus
+extern "C" {
+#endif
+
+/* enumerated parameters for Equalizer effect */
+typedef enum
+{
+    EQ_PARAM_NUM_BANDS,             // Gets the number of frequency bands that the equalizer
+                                    // supports.
+    EQ_PARAM_LEVEL_RANGE,           // Returns the minimum and maximum band levels supported.
+    EQ_PARAM_BAND_LEVEL,            // Gets/Sets the gain set for the given equalizer band.
+    EQ_PARAM_CENTER_FREQ,           // Gets the center frequency of the given band.
+    EQ_PARAM_BAND_FREQ_RANGE,       // Gets the frequency range of the given frequency band.
+    EQ_PARAM_GET_BAND,              // Gets the band that has the most effect on the given
+                                    // frequency.
+    EQ_PARAM_CUR_PRESET,            // Gets/Sets the current preset.
+    EQ_PARAM_GET_NUM_OF_PRESETS,    // Gets the total number of presets the equalizer supports.
+    EQ_PARAM_GET_PRESET_NAME,       // Gets the preset name based on the index.
+    EQ_PARAM_PROPERTIES             // Gets/Sets all parameters at a time.
+} t_equalizer_params;
+
+//t_equalizer_settings groups all current equalizer setting for backup and restore.
+typedef struct s_equalizer_settings {
+    uint16_t curPreset;
+    uint16_t numBands;
+    uint16_t bandLevels[];
+} t_equalizer_settings;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_EQUALIZER_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_loudnessenhancer.h b/media/audio_effects/include/audio_effects/effect_loudnessenhancer.h
new file mode 100644
index 0000000..c5bcaee
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_loudnessenhancer.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_LOUDNESS_ENHANCER_H_
+#define ANDROID_EFFECT_LOUDNESS_ENHANCER_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+// this effect is not defined in OpenSL ES as one of the standard effects
+static const effect_uuid_t FX_IID_LOUDNESS_ENHANCER_ =
+        {0xfe3199be, 0xaed0, 0x413f, 0x87bb, {0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}};
+const effect_uuid_t * const FX_IID_LOUDNESS_ENHANCER = &FX_IID_LOUDNESS_ENHANCER_;
+
+#define LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB 0 // mB
+
+// enumerated parameters for DRC effect
+// to keep in sync with frameworks/base/media/java/android/media/audiofx/LoudnessEnhancer.java
+typedef enum
+{
+    LOUDNESS_ENHANCER_PARAM_TARGET_GAIN_MB = 0,// target gain expressed in mB
+} t_level_monitor_params;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_LOUDNESS_ENHANCER_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_ns.h b/media/audio_effects/include/audio_effects/effect_ns.h
new file mode 100644
index 0000000..8cda094
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_ns.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_NS_H_
+#define ANDROID_EFFECT_NS_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+// The NS type UUID is not defined by OpenSL ES and has been generated from
+// http://www.itu.int/ITU-T/asn1/uuid.html
+static const effect_uuid_t FX_IID_NS_ =
+    { 0x58b4b260, 0x8e06, 0x11e0, 0xaa8e, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const FX_IID_NS = &FX_IID_NS_;
+
+typedef enum
+{
+    NS_PARAM_LEVEL,             // noise suppression level (t_ns_level)
+    NS_PARAM_PROPERTIES,
+    NS_PARAM_TYPE               // noise suppression type (t_ns_type)
+} t_ns_params;
+
+// noise suppression level
+typedef enum {
+    NS_LEVEL_LOW,
+    NS_LEVEL_MEDIUM,
+    NS_LEVEL_HIGH
+} t_ns_level;
+
+// noise suppression type
+typedef enum {
+    NS_TYPE_SINGLE_CHANNEL,
+    NS_TYPE_MULTI_CHANNEL
+} t_ns_type;
+
+// s_ns_settings groups all current ns settings for backup and restore.
+typedef struct s_ns_settings {
+    uint32_t  level;
+    uint32_t  type;
+} t_ns_settings;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_NS_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_presetreverb.h b/media/audio_effects/include/audio_effects/effect_presetreverb.h
new file mode 100644
index 0000000..ba1beae
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_presetreverb.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_PRESETREVERB_H_
+#define ANDROID_EFFECT_PRESETREVERB_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+#ifndef OPENSL_ES_H_
+static const effect_uuid_t SL_IID_PRESETREVERB_ = { 0x47382d60, 0xddd8, 0x11db, 0xbf3a,
+        { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const SL_IID_PRESETREVERB = &SL_IID_PRESETREVERB_;
+#endif //OPENSL_ES_H_
+
+/* enumerated parameter settings for preset reverb effect */
+typedef enum
+{
+    REVERB_PARAM_PRESET
+} t_preset_reverb_params;
+
+
+typedef enum
+{
+    REVERB_PRESET_NONE,
+    REVERB_PRESET_SMALLROOM,
+    REVERB_PRESET_MEDIUMROOM,
+    REVERB_PRESET_LARGEROOM,
+    REVERB_PRESET_MEDIUMHALL,
+    REVERB_PRESET_LARGEHALL,
+    REVERB_PRESET_PLATE,
+    REVERB_PRESET_LAST = REVERB_PRESET_PLATE
+} t_reverb_presets;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_PRESETREVERB_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_virtualizer.h b/media/audio_effects/include/audio_effects/effect_virtualizer.h
new file mode 100644
index 0000000..3374a35
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_virtualizer.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_VIRTUALIZER_H_
+#define ANDROID_EFFECT_VIRTUALIZER_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+#ifndef OPENSL_ES_H_
+static const effect_uuid_t SL_IID_VIRTUALIZER_ = { 0x37cc2c00, 0xdddd, 0x11db, 0x8577,
+        { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const SL_IID_VIRTUALIZER = &SL_IID_VIRTUALIZER_;
+#endif //OPENSL_ES_H_
+
+/* enumerated parameter settings for virtualizer effect */
+/* to keep in sync with frameworks/base/media/java/android/media/audiofx/Virtualizer.java */
+typedef enum
+{
+    VIRTUALIZER_PARAM_STRENGTH_SUPPORTED,
+    VIRTUALIZER_PARAM_STRENGTH,
+    // used with EFFECT_CMD_GET_PARAM
+    // format:
+    //   parameters int32_t              VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES
+    //              audio_channel_mask_t input channel mask
+    //              audio_devices_t      audio output device
+    //   output     int32_t*             an array of length 3 * the number of channels in the mask
+    //                                       where entries are the succession of the channel mask
+    //                                       of each speaker (i.e. a single bit is selected in the
+    //                                       channel mask) followed by the azimuth and the
+    //                                       elevation angles.
+    //   status     int -EINVAL  if configuration is not supported or invalid or not forcing
+    //                   0       if configuration is supported and the mode is forced
+    // notes:
+    // - all angles are expressed in degrees and are relative to the listener,
+    // - for azimuth: 0 is the direction the listener faces, 180 is behind the listener, and
+    //    -90 is to her/his left,
+    // - for elevation: 0 is the horizontal plane, +90 is above the listener, -90 is below.
+    VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES,
+    // used with EFFECT_CMD_SET_PARAM
+    // format:
+    //   parameters  int32_t           VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE
+    //               audio_devices_t   audio output device
+    //   status      int -EINVAL   if the device is not supported or invalid
+    //                   0         if the device is supported and the mode is forced, or forcing
+    //                               was disabled for the AUDIO_DEVICE_NONE audio device.
+    VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE,
+    // used with EFFECT_CMD_GET_PARAM
+    // format:
+    //   parameters int32_t              VIRTUALIZER_PARAM_VIRTUALIZATION_MODE
+    //   output     audio_device_t       audio device reflecting the current virtualization mode,
+    //                                   AUDIO_DEVICE_NONE when not virtualizing
+    //   status     int -EINVAL if an error occurred
+    //                  0       if the output value is successfully retrieved
+    VIRTUALIZER_PARAM_VIRTUALIZATION_MODE
+} t_virtualizer_params;
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_VIRTUALIZER_H_*/
diff --git a/media/audio_effects/include/audio_effects/effect_visualizer.h b/media/audio_effects/include/audio_effects/effect_visualizer.h
new file mode 100644
index 0000000..cfd99f5
--- /dev/null
+++ b/media/audio_effects/include/audio_effects/effect_visualizer.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_VISUALIZER_H_
+#define ANDROID_EFFECT_VISUALIZER_H_
+
+#include <hardware/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+#ifndef OPENSL_ES_H_
+static const effect_uuid_t SL_IID_VISUALIZATION_ =
+    { 0xe46b26a0, 0xdddd, 0x11db, 0x8afd, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } };
+const effect_uuid_t * const SL_IID_VISUALIZATION = &SL_IID_VISUALIZATION_;
+#endif //OPENSL_ES_H_
+
+#define VISUALIZER_CAPTURE_SIZE_MAX 1024  // maximum capture size in samples
+#define VISUALIZER_CAPTURE_SIZE_MIN 128   // minimum capture size in samples
+
+// to keep in sync with frameworks/base/media/java/android/media/audiofx/Visualizer.java
+#define VISUALIZER_SCALING_MODE_NORMALIZED 0
+#define VISUALIZER_SCALING_MODE_AS_PLAYED  1
+
+#define MEASUREMENT_MODE_NONE      0x0
+#define MEASUREMENT_MODE_PEAK_RMS  0x1
+
+#define MEASUREMENT_IDX_PEAK 0
+#define MEASUREMENT_IDX_RMS  1
+
+/* enumerated parameters for Visualizer effect */
+typedef enum
+{
+    VISUALIZER_PARAM_CAPTURE_SIZE, // Sets the number PCM samples in the capture.
+    VISUALIZER_PARAM_SCALING_MODE, // Sets the way the captured data is scaled
+    VISUALIZER_PARAM_LATENCY,      // Informs the visualizer about the downstream latency
+    VISUALIZER_PARAM_MEASUREMENT_MODE, // Sets which measurements are to be made
+} t_visualizer_params;
+
+/* commands */
+typedef enum
+{
+    VISUALIZER_CMD_CAPTURE = EFFECT_CMD_FIRST_PROPRIETARY, // Gets the latest PCM capture.
+    VISUALIZER_CMD_MEASURE, // Gets the current measurements
+}t_visualizer_cmds;
+
+// VISUALIZER_CMD_CAPTURE retrieves the latest PCM snapshot captured by the visualizer engine.
+// It returns the number of samples specified by VISUALIZER_PARAM_CAPTURE_SIZE
+// in 8 bit unsigned format (0 = 0x80)
+
+// VISUALIZER_CMD_MEASURE retrieves the lastest measurements as int32_t saved in the
+// MEASUREMENT_IDX_* array index order.
+
+#if __cplusplus
+}  // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_VISUALIZER_H_*/
diff --git a/media/audio_route/Android.mk b/media/audio_route/Android.mk
new file mode 100644
index 0000000..2fcdc94
--- /dev/null
+++ b/media/audio_route/Android.mk
@@ -0,0 +1,11 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES += \
+	external/tinyalsa/include \
+	external/expat/lib
+LOCAL_SRC_FILES:= audio_route.c
+LOCAL_MODULE := libaudioroute
+LOCAL_SHARED_LIBRARIES:= liblog libcutils libutils libexpat libtinyalsa
+LOCAL_MODULE_TAGS := optional
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/audio_route/MODULE_LICENSE_BSD b/media/audio_route/MODULE_LICENSE_BSD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/audio_route/MODULE_LICENSE_BSD
diff --git a/media/audio_route/NOTICE b/media/audio_route/NOTICE
new file mode 100644
index 0000000..91b6565
--- /dev/null
+++ b/media/audio_route/NOTICE
@@ -0,0 +1,25 @@
+Copyright 2013, The Android Open Source Project
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+      notice, this list of conditions and the following disclaimer in the
+      documentation and/or other materials provided with the distribution.
+    * Neither the name of The Android Open Source Project nor the names of
+      its contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY The Android Open Source Project ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL The Android Open Source Project BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
+
diff --git a/media/audio_route/audio_route.c b/media/audio_route/audio_route.c
new file mode 100644
index 0000000..8a8bb9b
--- /dev/null
+++ b/media/audio_route/audio_route.c
@@ -0,0 +1,912 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * Inspired by TinyHW, written by Mark Brown at Wolfson Micro
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_route"
+/*#define LOG_NDEBUG 0*/
+
+#include <errno.h>
+#include <expat.h>
+#include <stdbool.h>
+#include <stdio.h>
+#include <string.h>
+
+#include <cutils/log.h>
+
+#include <tinyalsa/asoundlib.h>
+
+#define BUF_SIZE 1024
+#define MIXER_XML_PATH "/system/etc/mixer_paths.xml"
+#define INITIAL_MIXER_PATH_SIZE 8
+
+union ctl_values {
+    int *integer;
+    void *ptr;
+    unsigned char *bytes;
+};
+
+struct mixer_state {
+    struct mixer_ctl *ctl;
+    unsigned int num_values;
+    union ctl_values old_value;
+    union ctl_values new_value;
+    union ctl_values reset_value;
+};
+
+struct mixer_setting {
+    unsigned int ctl_index;
+    unsigned int num_values;
+    unsigned int type;
+    union ctl_values value;
+};
+
+struct mixer_value {
+    unsigned int ctl_index;
+    int index;
+    int value;
+};
+
+struct mixer_path {
+    char *name;
+    unsigned int size;
+    unsigned int length;
+    struct mixer_setting *setting;
+};
+
+struct audio_route {
+    struct mixer *mixer;
+    unsigned int num_mixer_ctls;
+    struct mixer_state *mixer_state;
+
+    unsigned int mixer_path_size;
+    unsigned int num_mixer_paths;
+    struct mixer_path *mixer_path;
+};
+
+struct config_parse_state {
+    struct audio_route *ar;
+    struct mixer_path *path;
+    int level;
+};
+
+/* path functions */
+
+static bool is_supported_ctl_type(enum mixer_ctl_type type)
+{
+    switch (type) {
+    case MIXER_CTL_TYPE_BOOL:
+    case MIXER_CTL_TYPE_INT:
+    case MIXER_CTL_TYPE_ENUM:
+    case MIXER_CTL_TYPE_BYTE:
+        return true;
+    default:
+        return false;
+    }
+}
+
+static inline struct mixer_ctl *index_to_ctl(struct audio_route *ar,
+                                             unsigned int ctl_index)
+{
+    return ar->mixer_state[ctl_index].ctl;
+}
+
+static void path_print(struct audio_route *ar, struct mixer_path *path)
+{
+    unsigned int i;
+    unsigned int j;
+
+    ALOGE("Path: %s, length: %d", path->name, path->length);
+    for (i = 0; i < path->length; i++) {
+        struct mixer_ctl *ctl = index_to_ctl(ar, path->setting[i].ctl_index);
+
+        ALOGE("  id=%d: ctl=%s", i, mixer_ctl_get_name(ctl));
+        if (mixer_ctl_get_type(ctl) == MIXER_CTL_TYPE_BYTE) {
+            for (j = 0; j < path->setting[i].num_values; j++)
+                ALOGE("    id=%d value=0x%02x", j, path->setting[i].value.bytes[j]);
+        } else {
+            for (j = 0; j < path->setting[i].num_values; j++)
+                ALOGE("    id=%d value=%d", j, path->setting[i].value.integer[j]);
+        }
+    }
+}
+
+static void path_free(struct audio_route *ar)
+{
+    unsigned int i;
+
+    for (i = 0; i < ar->num_mixer_paths; i++) {
+        if (ar->mixer_path[i].name)
+            free(ar->mixer_path[i].name);
+        if (ar->mixer_path[i].setting) {
+            if (ar->mixer_path[i].setting->value.ptr)
+                free(ar->mixer_path[i].setting->value.ptr);
+            free(ar->mixer_path[i].setting);
+        }
+    }
+    free(ar->mixer_path);
+}
+
+static struct mixer_path *path_get_by_name(struct audio_route *ar,
+                                           const char *name)
+{
+    unsigned int i;
+
+    for (i = 0; i < ar->num_mixer_paths; i++)
+        if (strcmp(ar->mixer_path[i].name, name) == 0)
+            return &ar->mixer_path[i];
+
+    return NULL;
+}
+
+static struct mixer_path *path_create(struct audio_route *ar, const char *name)
+{
+    struct mixer_path *new_mixer_path = NULL;
+
+    if (path_get_by_name(ar, name)) {
+        ALOGE("Path name '%s' already exists", name);
+        return NULL;
+    }
+
+    /* check if we need to allocate more space for mixer paths */
+    if (ar->mixer_path_size <= ar->num_mixer_paths) {
+        if (ar->mixer_path_size == 0)
+            ar->mixer_path_size = INITIAL_MIXER_PATH_SIZE;
+        else
+            ar->mixer_path_size *= 2;
+
+        new_mixer_path = realloc(ar->mixer_path, ar->mixer_path_size *
+                                 sizeof(struct mixer_path));
+        if (new_mixer_path == NULL) {
+            ALOGE("Unable to allocate more paths");
+            return NULL;
+        } else {
+            ar->mixer_path = new_mixer_path;
+        }
+    }
+
+    /* initialise the new mixer path */
+    ar->mixer_path[ar->num_mixer_paths].name = strdup(name);
+    ar->mixer_path[ar->num_mixer_paths].size = 0;
+    ar->mixer_path[ar->num_mixer_paths].length = 0;
+    ar->mixer_path[ar->num_mixer_paths].setting = NULL;
+
+    /* return the mixer path just added, then increment number of them */
+    return &ar->mixer_path[ar->num_mixer_paths++];
+}
+
+static int find_ctl_index_in_path(struct mixer_path *path,
+                                  unsigned int ctl_index)
+{
+    unsigned int i;
+
+    for (i = 0; i < path->length; i++)
+        if (path->setting[i].ctl_index == ctl_index)
+            return i;
+
+    return -1;
+}
+
+static int alloc_path_setting(struct mixer_path *path)
+{
+    struct mixer_setting *new_path_setting;
+    int path_index;
+
+    /* check if we need to allocate more space for path settings */
+    if (path->size <= path->length) {
+        if (path->size == 0)
+            path->size = INITIAL_MIXER_PATH_SIZE;
+        else
+            path->size *= 2;
+
+        new_path_setting = realloc(path->setting,
+                                   path->size * sizeof(struct mixer_setting));
+        if (new_path_setting == NULL) {
+            ALOGE("Unable to allocate more path settings");
+            return -1;
+        } else {
+            path->setting = new_path_setting;
+        }
+    }
+
+    path_index = path->length;
+    path->length++;
+
+    return path_index;
+}
+
+static int path_add_setting(struct audio_route *ar, struct mixer_path *path,
+                            struct mixer_setting *setting)
+{
+    int path_index;
+    unsigned int value_sz = sizeof(int);
+
+    if (find_ctl_index_in_path(path, setting->ctl_index) != -1) {
+        struct mixer_ctl *ctl = index_to_ctl(ar, setting->ctl_index);
+
+        ALOGE("Control '%s' already exists in path '%s'",
+              mixer_ctl_get_name(ctl), path->name);
+        return -1;
+    }
+
+    path_index = alloc_path_setting(path);
+    if (path_index < 0)
+        return -1;
+
+    path->setting[path_index].ctl_index = setting->ctl_index;
+    path->setting[path_index].type = setting->type;
+    path->setting[path_index].num_values = setting->num_values;
+
+    if (setting->type == MIXER_CTL_TYPE_BYTE)
+        value_sz = sizeof(unsigned char);
+
+    path->setting[path_index].value.ptr = calloc(1, setting->num_values * value_sz);
+    /* copy all values */
+    memcpy(path->setting[path_index].value.ptr, setting->value.ptr,
+           setting->num_values * value_sz);
+
+    return 0;
+}
+
+static int path_add_value(struct audio_route *ar, struct mixer_path *path,
+                          struct mixer_value *mixer_value)
+{
+    unsigned int i;
+    int path_index;
+    unsigned int num_values;
+    unsigned int value_sz = sizeof(int);
+    struct mixer_ctl *ctl;
+
+    /* Check that mixer value index is within range */
+    ctl = index_to_ctl(ar, mixer_value->ctl_index);
+    num_values = mixer_ctl_get_num_values(ctl);
+    if (mixer_value->index >= (int)num_values) {
+        ALOGE("mixer index %d is out of range for '%s'", mixer_value->index,
+              mixer_ctl_get_name(ctl));
+        return -1;
+    }
+
+    path_index = find_ctl_index_in_path(path, mixer_value->ctl_index);
+    if (path_index < 0) {
+        /* New path */
+
+        path_index = alloc_path_setting(path);
+        if (path_index < 0)
+            return -1;
+
+        /* initialise the new path setting */
+        path->setting[path_index].ctl_index = mixer_value->ctl_index;
+        path->setting[path_index].num_values = num_values;
+        path->setting[path_index].type = mixer_ctl_get_type(ctl);
+
+        if (path->setting[path_index].type == MIXER_CTL_TYPE_BYTE)
+            value_sz = sizeof(unsigned char);
+
+        path->setting[path_index].value.ptr = calloc(1, num_values * value_sz);
+        if (path->setting[path_index].type == MIXER_CTL_TYPE_BYTE)
+            path->setting[path_index].value.bytes[0] = mixer_value->value;
+        else
+            path->setting[path_index].value.integer[0] = mixer_value->value;
+    }
+
+    if (mixer_value->index == -1) {
+        /* set all values the same */
+        if (path->setting[path_index].type == MIXER_CTL_TYPE_BYTE) {
+            for (i = 0; i < num_values; i++)
+                path->setting[path_index].value.bytes[i] = mixer_value->value;
+        } else {
+            for (i = 0; i < num_values; i++)
+                path->setting[path_index].value.integer[i] = mixer_value->value;
+        }
+    } else {
+        /* set only one value */
+        if (path->setting[path_index].type == MIXER_CTL_TYPE_BYTE)
+            path->setting[path_index].value.bytes[mixer_value->index] = mixer_value->value;
+        else
+            path->setting[path_index].value.integer[mixer_value->index] = mixer_value->value;
+    }
+
+    return 0;
+}
+
+static int path_add_path(struct audio_route *ar, struct mixer_path *path,
+                         struct mixer_path *sub_path)
+{
+    unsigned int i;
+
+    for (i = 0; i < sub_path->length; i++)
+        if (path_add_setting(ar, path, &sub_path->setting[i]) < 0)
+            return -1;
+
+    return 0;
+}
+
+static int path_apply(struct audio_route *ar, struct mixer_path *path)
+{
+    unsigned int i;
+    unsigned int value_sz;
+    unsigned int ctl_index;
+    struct mixer_ctl *ctl;
+    enum mixer_ctl_type type;
+
+    for (i = 0; i < path->length; i++) {
+        ctl_index = path->setting[i].ctl_index;
+        ctl = index_to_ctl(ar, ctl_index);
+        type = mixer_ctl_get_type(ctl);
+        if (!is_supported_ctl_type(type))
+            continue;
+
+        if (type == MIXER_CTL_TYPE_BYTE)
+            value_sz = sizeof(unsigned char);
+        else
+            value_sz = sizeof(int);
+
+        memcpy(ar->mixer_state[ctl_index].new_value.ptr, path->setting[i].value.ptr,
+                   path->setting[i].num_values * value_sz);
+    }
+
+    return 0;
+}
+
+static int path_reset(struct audio_route *ar, struct mixer_path *path)
+{
+    unsigned int i;
+    unsigned int j;
+    unsigned int value_sz;
+    unsigned int ctl_index;
+    struct mixer_ctl *ctl;
+    enum mixer_ctl_type type;
+
+    for (i = 0; i < path->length; i++) {
+        ctl_index = path->setting[i].ctl_index;
+        ctl = index_to_ctl(ar, ctl_index);
+        type = mixer_ctl_get_type(ctl);
+        if (!is_supported_ctl_type(type))
+            continue;
+
+        if (type == MIXER_CTL_TYPE_BYTE)
+            value_sz = sizeof(unsigned char);
+        else
+            value_sz = sizeof(int);
+
+        /* reset the value(s) */
+        memcpy(ar->mixer_state[ctl_index].new_value.ptr,
+               ar->mixer_state[ctl_index].reset_value.ptr,
+               ar->mixer_state[ctl_index].num_values * value_sz);
+    }
+
+    return 0;
+}
+
+/* mixer helper function */
+static int mixer_enum_string_to_value(struct mixer_ctl *ctl, const char *string)
+{
+    unsigned int i;
+
+    /* Search the enum strings for a particular one */
+    for (i = 0; i < mixer_ctl_get_num_enums(ctl); i++) {
+        if (strcmp(mixer_ctl_get_enum_string(ctl, i), string) == 0)
+            break;
+    }
+
+    return i;
+}
+
+static void start_tag(void *data, const XML_Char *tag_name,
+                      const XML_Char **attr)
+{
+    const XML_Char *attr_name = NULL;
+    const XML_Char *attr_id = NULL;
+    const XML_Char *attr_value = NULL;
+    struct config_parse_state *state = data;
+    struct audio_route *ar = state->ar;
+    unsigned int i;
+    unsigned int ctl_index;
+    struct mixer_ctl *ctl;
+    int value;
+    unsigned int id;
+    struct mixer_value mixer_value;
+    enum mixer_ctl_type type;
+
+    /* Get name, id and value attributes (these may be empty) */
+    for (i = 0; attr[i]; i += 2) {
+        if (strcmp(attr[i], "name") == 0)
+            attr_name = attr[i + 1];
+        if (strcmp(attr[i], "id") == 0)
+            attr_id = attr[i + 1];
+        else if (strcmp(attr[i], "value") == 0)
+            attr_value = attr[i + 1];
+    }
+
+    /* Look at tags */
+    if (strcmp(tag_name, "path") == 0) {
+        if (attr_name == NULL) {
+            ALOGE("Unnamed path!");
+        } else {
+            if (state->level == 1) {
+                /* top level path: create and stash the path */
+                state->path = path_create(ar, (char *)attr_name);
+            } else {
+                /* nested path */
+                struct mixer_path *sub_path = path_get_by_name(ar, attr_name);
+                path_add_path(ar, state->path, sub_path);
+            }
+        }
+    }
+
+    else if (strcmp(tag_name, "ctl") == 0) {
+        /* Obtain the mixer ctl and value */
+        ctl = mixer_get_ctl_by_name(ar->mixer, attr_name);
+        if (ctl == NULL) {
+            ALOGE("Control '%s' doesn't exist - skipping", attr_name);
+            goto done;
+        }
+
+        switch (mixer_ctl_get_type(ctl)) {
+        case MIXER_CTL_TYPE_BOOL:
+        case MIXER_CTL_TYPE_INT:
+            value = (int) strtol((char *)attr_value, NULL, 0);
+            break;
+        case MIXER_CTL_TYPE_BYTE:
+            value = (unsigned char) strtol((char *)attr_value, NULL, 16);
+            break;
+        case MIXER_CTL_TYPE_ENUM:
+            value = mixer_enum_string_to_value(ctl, (char *)attr_value);
+            break;
+        default:
+            value = 0;
+            break;
+        }
+
+        /* locate the mixer ctl in the list */
+        for (ctl_index = 0; ctl_index < ar->num_mixer_ctls; ctl_index++) {
+            if (ar->mixer_state[ctl_index].ctl == ctl)
+                break;
+        }
+
+        if (state->level == 1) {
+            /* top level ctl (initial setting) */
+
+            type = mixer_ctl_get_type(ctl);
+            if (is_supported_ctl_type(type)) {
+                /* apply the new value */
+                if (attr_id) {
+                    /* set only one value */
+                    id = atoi((char *)attr_id);
+                    if (id < ar->mixer_state[ctl_index].num_values)
+                        if (type == MIXER_CTL_TYPE_BYTE)
+                            ar->mixer_state[ctl_index].new_value.bytes[id] = value;
+                        else
+                            ar->mixer_state[ctl_index].new_value.integer[id] = value;
+                    else
+                        ALOGE("value id out of range for mixer ctl '%s'",
+                              mixer_ctl_get_name(ctl));
+                } else {
+                    /* set all values the same */
+                    for (i = 0; i < ar->mixer_state[ctl_index].num_values; i++)
+                        if (type == MIXER_CTL_TYPE_BYTE)
+                            ar->mixer_state[ctl_index].new_value.bytes[i] = value;
+                        else
+                            ar->mixer_state[ctl_index].new_value.integer[i] = value;
+                }
+            }
+        } else {
+            /* nested ctl (within a path) */
+            mixer_value.ctl_index = ctl_index;
+            mixer_value.value = value;
+            if (attr_id)
+                mixer_value.index = atoi((char *)attr_id);
+            else
+                mixer_value.index = -1;
+            path_add_value(ar, state->path, &mixer_value);
+        }
+    }
+
+done:
+    state->level++;
+}
+
+static void end_tag(void *data, const XML_Char *tag_name)
+{
+    struct config_parse_state *state = data;
+    (void)tag_name;
+
+    state->level--;
+}
+
+static int alloc_mixer_state(struct audio_route *ar)
+{
+    unsigned int i;
+    unsigned int j;
+    unsigned int num_values;
+    unsigned int value_sz;
+    struct mixer_ctl *ctl;
+    enum mixer_ctl_type type;
+
+    ar->num_mixer_ctls = mixer_get_num_ctls(ar->mixer);
+    ar->mixer_state = calloc(1, ar->num_mixer_ctls * sizeof(struct mixer_state));
+    if (!ar->mixer_state)
+        return -1;
+
+    for (i = 0; i < ar->num_mixer_ctls; i++) {
+        ctl = mixer_get_ctl(ar->mixer, i);
+        num_values = mixer_ctl_get_num_values(ctl);
+
+        ar->mixer_state[i].ctl = ctl;
+        ar->mixer_state[i].num_values = num_values;
+
+        /* Skip unsupported types that are not supported yet in XML */
+        type = mixer_ctl_get_type(ctl);
+
+        if (!is_supported_ctl_type(type))
+            continue;
+
+        if (type == MIXER_CTL_TYPE_BYTE)
+            value_sz = sizeof(unsigned char);
+        else
+            value_sz = sizeof(int);
+
+        ar->mixer_state[i].old_value.ptr = calloc(1, num_values * value_sz);
+        ar->mixer_state[i].new_value.ptr = calloc(1, num_values * value_sz);
+        ar->mixer_state[i].reset_value.ptr = calloc(1, num_values * value_sz);
+
+        if (type == MIXER_CTL_TYPE_ENUM)
+            ar->mixer_state[i].old_value.integer[0] = mixer_ctl_get_value(ctl, 0);
+        else
+            mixer_ctl_get_array(ctl, ar->mixer_state[i].old_value.ptr, num_values);
+
+        memcpy(ar->mixer_state[i].new_value.ptr, ar->mixer_state[i].old_value.ptr,
+               num_values * value_sz);
+    }
+
+    return 0;
+}
+
+static void free_mixer_state(struct audio_route *ar)
+{
+    unsigned int i;
+    enum mixer_ctl_type type;
+
+    for (i = 0; i < ar->num_mixer_ctls; i++) {
+        type = mixer_ctl_get_type(ar->mixer_state[i].ctl);
+        if (!is_supported_ctl_type(type))
+            continue;
+
+        free(ar->mixer_state[i].old_value.ptr);
+        free(ar->mixer_state[i].new_value.ptr);
+        free(ar->mixer_state[i].reset_value.ptr);
+    }
+
+    free(ar->mixer_state);
+    ar->mixer_state = NULL;
+}
+
+/* Update the mixer with any changed values */
+int audio_route_update_mixer(struct audio_route *ar)
+{
+    unsigned int i;
+    unsigned int j;
+    struct mixer_ctl *ctl;
+
+    for (i = 0; i < ar->num_mixer_ctls; i++) {
+        unsigned int num_values = ar->mixer_state[i].num_values;
+        enum mixer_ctl_type type;
+
+        ctl = ar->mixer_state[i].ctl;
+
+        /* Skip unsupported types */
+        type = mixer_ctl_get_type(ctl);
+        if (!is_supported_ctl_type(type))
+            continue;
+
+        /* if the value has changed, update the mixer */
+        bool changed = false;
+        if (type == MIXER_CTL_TYPE_BYTE) {
+            for (j = 0; j < num_values; j++) {
+                if (ar->mixer_state[i].old_value.bytes[j] != ar->mixer_state[i].new_value.bytes[j]) {
+                    changed = true;
+                    break;
+                }
+            }
+         } else {
+            for (j = 0; j < num_values; j++) {
+                if (ar->mixer_state[i].old_value.integer[j] != ar->mixer_state[i].new_value.integer[j]) {
+                    changed = true;
+                    break;
+                }
+            }
+        }
+        if (changed) {
+            unsigned int value_sz = sizeof(int);
+
+            if (type == MIXER_CTL_TYPE_BYTE)
+                value_sz = sizeof(unsigned char);
+
+            if (type == MIXER_CTL_TYPE_ENUM)
+                mixer_ctl_set_value(ctl, 0, ar->mixer_state[i].new_value.integer[0]);
+            else
+                mixer_ctl_set_array(ctl, ar->mixer_state[i].new_value.ptr, num_values);
+
+            memcpy(ar->mixer_state[i].old_value.ptr, ar->mixer_state[i].new_value.ptr,
+                   num_values * value_sz);
+        }
+    }
+
+    return 0;
+}
+
+/* saves the current state of the mixer, for resetting all controls */
+static void save_mixer_state(struct audio_route *ar)
+{
+    unsigned int i;
+    unsigned int value_sz;
+    enum mixer_ctl_type type;
+
+    for (i = 0; i < ar->num_mixer_ctls; i++) {
+        type = mixer_ctl_get_type(ar->mixer_state[i].ctl);
+        if (!is_supported_ctl_type(type))
+            continue;
+
+        if (type == MIXER_CTL_TYPE_BYTE)
+            value_sz = sizeof(unsigned char);
+        else
+            value_sz = sizeof(int);
+
+        memcpy(ar->mixer_state[i].reset_value.ptr, ar->mixer_state[i].new_value.ptr,
+               ar->mixer_state[i].num_values * value_sz);
+    }
+}
+
+/* Reset the audio routes back to the initial state */
+void audio_route_reset(struct audio_route *ar)
+{
+    unsigned int i;
+    unsigned int value_sz;
+    enum mixer_ctl_type type;
+
+    /* load all of the saved values */
+    for (i = 0; i < ar->num_mixer_ctls; i++) {
+        type = mixer_ctl_get_type(ar->mixer_state[i].ctl);
+        if (!is_supported_ctl_type(type))
+            continue;
+
+        if (type == MIXER_CTL_TYPE_BYTE)
+            value_sz = sizeof(unsigned char);
+        else
+            value_sz = sizeof(int);
+
+        memcpy(ar->mixer_state[i].new_value.ptr, ar->mixer_state[i].reset_value.ptr,
+            ar->mixer_state[i].num_values * value_sz);
+    }
+}
+
+/* Apply an audio route path by name */
+int audio_route_apply_path(struct audio_route *ar, const char *name)
+{
+    struct mixer_path *path;
+
+    if (!ar) {
+        ALOGE("invalid audio_route");
+        return -1;
+    }
+
+    path = path_get_by_name(ar, name);
+    if (!path) {
+        ALOGE("unable to find path '%s'", name);
+        return -1;
+    }
+
+    path_apply(ar, path);
+
+    return 0;
+}
+
+/* Reset an audio route path by name */
+int audio_route_reset_path(struct audio_route *ar, const char *name)
+{
+    struct mixer_path *path;
+
+    if (!ar) {
+        ALOGE("invalid audio_route");
+        return -1;
+    }
+
+    path = path_get_by_name(ar, name);
+    if (!path) {
+        ALOGE("unable to find path '%s'", name);
+        return -1;
+    }
+
+    path_reset(ar, path);
+
+    return 0;
+}
+
+/*
+ * Operates on the specified path .. controls will be updated in the
+ * order listed in the XML file
+ */
+static int audio_route_update_path(struct audio_route *ar, const char *name, bool reverse)
+{
+    struct mixer_path *path;
+    int32_t i, end;
+    unsigned int j;
+
+    if (!ar) {
+        ALOGE("invalid audio_route");
+        return -1;
+    }
+
+    path = path_get_by_name(ar, name);
+    if (!path) {
+        ALOGE("unable to find path '%s'", name);
+        return -1;
+    }
+
+    i = reverse ? (path->length - 1) : 0;
+    end = reverse ? -1 : (int32_t)path->length;
+
+    while (i != end) {
+        unsigned int ctl_index;
+        enum mixer_ctl_type type;
+
+        ctl_index = path->setting[i].ctl_index;
+
+        struct mixer_state * ms = &ar->mixer_state[ctl_index];
+
+        type = mixer_ctl_get_type(ms->ctl);
+        if (!is_supported_ctl_type(type)) {
+            continue;
+        }
+
+        /* if any value has changed, update the mixer */
+        for (j = 0; j < ms->num_values; j++) {
+            if (type == MIXER_CTL_TYPE_BYTE) {
+                if (ms->old_value.bytes[j] != ms->new_value.bytes[j]) {
+                    mixer_ctl_set_array(ms->ctl, ms->new_value.bytes, ms->num_values);
+                    memcpy(ms->old_value.bytes, ms->new_value.bytes, ms->num_values);
+                    break;
+                }
+            }
+            else if (ms->old_value.integer[j] != ms->new_value.integer[j]) {
+                if (type == MIXER_CTL_TYPE_ENUM)
+                    mixer_ctl_set_value(ms->ctl, 0, ms->new_value.integer[0]);
+                else
+                    mixer_ctl_set_array(ms->ctl, ms->new_value.integer, ms->num_values);
+                    memcpy(ms->old_value.integer, ms->new_value.integer, ms->num_values * sizeof(int));
+                break;
+            }
+        }
+
+        i = reverse ? (i - 1) : (i + 1);
+    }
+    return 0;
+}
+
+int audio_route_apply_and_update_path(struct audio_route *ar, const char *name)
+{
+    if (audio_route_apply_path(ar, name) < 0) {
+        return -1;
+    }
+    return audio_route_update_path(ar, name, false /*reverse*/);
+}
+
+int audio_route_reset_and_update_path(struct audio_route *ar, const char *name)
+{
+    if (audio_route_reset_path(ar, name) < 0) {
+        return -1;
+    }
+    return audio_route_update_path(ar, name, true /*reverse*/);
+}
+
+struct audio_route *audio_route_init(unsigned int card, const char *xml_path)
+{
+    struct config_parse_state state;
+    XML_Parser parser;
+    FILE *file;
+    int bytes_read;
+    void *buf;
+    int i;
+    struct audio_route *ar;
+
+    ar = calloc(1, sizeof(struct audio_route));
+    if (!ar)
+        goto err_calloc;
+
+    ar->mixer = mixer_open(card);
+    if (!ar->mixer) {
+        ALOGE("Unable to open the mixer, aborting.");
+        goto err_mixer_open;
+    }
+
+    ar->mixer_path = NULL;
+    ar->mixer_path_size = 0;
+    ar->num_mixer_paths = 0;
+
+    /* allocate space for and read current mixer settings */
+    if (alloc_mixer_state(ar) < 0)
+        goto err_mixer_state;
+
+    /* use the default XML path if none is provided */
+    if (xml_path == NULL)
+        xml_path = MIXER_XML_PATH;
+
+    file = fopen(xml_path, "r");
+
+    if (!file) {
+        ALOGE("Failed to open %s", xml_path);
+        goto err_fopen;
+    }
+
+    parser = XML_ParserCreate(NULL);
+    if (!parser) {
+        ALOGE("Failed to create XML parser");
+        goto err_parser_create;
+    }
+
+    memset(&state, 0, sizeof(state));
+    state.ar = ar;
+    XML_SetUserData(parser, &state);
+    XML_SetElementHandler(parser, start_tag, end_tag);
+
+    for (;;) {
+        buf = XML_GetBuffer(parser, BUF_SIZE);
+        if (buf == NULL)
+            goto err_parse;
+
+        bytes_read = fread(buf, 1, BUF_SIZE, file);
+        if (bytes_read < 0)
+            goto err_parse;
+
+        if (XML_ParseBuffer(parser, bytes_read,
+                            bytes_read == 0) == XML_STATUS_ERROR) {
+            ALOGE("Error in mixer xml (%s)", MIXER_XML_PATH);
+            goto err_parse;
+        }
+
+        if (bytes_read == 0)
+            break;
+    }
+
+    /* apply the initial mixer values, and save them so we can reset the
+       mixer to the original values */
+    audio_route_update_mixer(ar);
+    save_mixer_state(ar);
+
+    XML_ParserFree(parser);
+    fclose(file);
+    return ar;
+
+err_parse:
+    XML_ParserFree(parser);
+err_parser_create:
+    fclose(file);
+err_fopen:
+    free_mixer_state(ar);
+err_mixer_state:
+    mixer_close(ar->mixer);
+err_mixer_open:
+    free(ar);
+    ar = NULL;
+err_calloc:
+    return NULL;
+}
+
+void audio_route_free(struct audio_route *ar)
+{
+    free_mixer_state(ar);
+    mixer_close(ar->mixer);
+    free(ar);
+}
diff --git a/media/audio_route/include/audio_route/audio_route.h b/media/audio_route/include/audio_route/audio_route.h
new file mode 100644
index 0000000..9e46015
--- /dev/null
+++ b/media/audio_route/include/audio_route/audio_route.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AUDIO_ROUTE_H
+#define AUDIO_ROUTE_H
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+/* Initialize and free the audio routes */
+struct audio_route *audio_route_init(unsigned int card, const char *xml_path);
+void audio_route_free(struct audio_route *ar);
+
+/* Apply an audio route path by name */
+int audio_route_apply_path(struct audio_route *ar, const char *name);
+
+/* Apply and update mixer with audio route path by name */
+int audio_route_apply_and_update_path(struct audio_route *ar, const char *name);
+
+/* Reset an audio route path by name */
+int audio_route_reset_path(struct audio_route *ar, const char *name);
+
+/* Reset and update mixer with audio route path by name */
+int audio_route_reset_and_update_path(struct audio_route *ar, const char *name);
+
+/* Reset the audio routes back to the initial state */
+void audio_route_reset(struct audio_route *ar);
+
+/* Update the mixer with any changed values */
+int audio_route_update_mixer(struct audio_route *ar);
+
+#if defined(__cplusplus)
+}  /* extern "C" */
+#endif
+
+#endif
diff --git a/media/audio_utils/Android.mk b/media/audio_utils/Android.mk
new file mode 100644
index 0000000..a51944a
--- /dev/null
+++ b/media/audio_utils/Android.mk
@@ -0,0 +1,94 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libaudioutils
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES:= \
+	channels.c \
+	fifo.c \
+	fixedfft.cpp.arm \
+	format.c \
+	minifloat.c \
+	primitives.c \
+	resampler.c \
+	roundup.c \
+	echo_reference.c
+
+LOCAL_CFLAGS := -Wno-unused-parameter
+LOCAL_C_INCLUDES += $(call include-path-for, speex)
+LOCAL_C_INCLUDES += \
+	$(call include-path-for, speex) \
+	$(call include-path-for, audio-utils)
+
+LOCAL_SHARED_LIBRARIES := \
+	libcutils \
+	liblog \
+	libspeexresampler
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libaudioutils
+LOCAL_MODULE_TAGS := optional
+LOCAL_SRC_FILES := \
+	channels.c \
+	fifo.c \
+	format.c \
+	minifloat.c \
+	primitives.c \
+	roundup.c
+LOCAL_C_INCLUDES += \
+	$(call include-path-for, audio-utils)
+LOCAL_CFLAGS := -D__unused= -Wno-unused-parameter
+include $(BUILD_HOST_STATIC_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libsndfile
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := \
+	tinysndfile.c
+
+LOCAL_C_INCLUDES += \
+	$(call include-path-for, audio-utils)
+
+LOCAL_CFLAGS := -UHAVE_STDERR
+
+include $(BUILD_STATIC_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libsndfile
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := \
+	tinysndfile.c
+
+LOCAL_C_INCLUDES += \
+	$(call include-path-for, audio-utils)
+
+#LOCAL_SHARED_LIBRARIES := libaudioutils
+
+include $(BUILD_HOST_STATIC_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libfifo
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := \
+	fifo.c \
+	primitives.c \
+	roundup.c
+
+LOCAL_CFLAGS := -Wno-unused-parameter
+LOCAL_C_INCLUDES += \
+	$(call include-path-for, audio-utils)
+
+include $(BUILD_STATIC_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
+
diff --git a/media/audio_utils/channels.c b/media/audio_utils/channels.c
new file mode 100644
index 0000000..fa005d3
--- /dev/null
+++ b/media/audio_utils/channels.c
@@ -0,0 +1,377 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <string.h>
+#include <audio_utils/channels.h>
+#include "private/private.h"
+
+/*
+ * Clamps a 24-bit value from a 32-bit sample
+ */
+static inline int32_t clamp24(int32_t sample)
+{
+    if ((sample>>23) ^ (sample>>31)) {
+        sample = 0x007FFFFF ^ (sample>>31);
+    }
+    return sample;
+}
+
+/*
+ * Converts a uint8x3_t into an int32_t
+ */
+inline int32_t uint8x3_to_int32(uint8x3_t val) {
+#ifdef HAVE_BIG_ENDIAN
+    int32_t temp = (val.c[0] << 24 | val.c[1] << 16 | val.c[2] << 8) >> 8;
+#else
+    int32_t temp = (val.c[2] << 24 | val.c[1] << 16 | val.c[0] << 8) >> 8;
+#endif
+    return clamp24(temp);
+}
+
+/*
+ * Converts an int32_t to a uint8x3_t
+ */
+inline uint8x3_t int32_to_uint8x3(int32_t in) {
+    uint8x3_t out;
+#ifdef HAVE_BIG_ENDIAN
+    out.c[2] = in;
+    out.c[1] = in >> 8;
+    out.c[0] = in >> 16;
+#else
+    out.c[0] = in;
+    out.c[1] = in >> 8;
+    out.c[2] = in >> 16;
+#endif
+    return out;
+}
+
+/* Channel expands (adds zeroes to audio frame end) from an input buffer to an output buffer.
+ * See expand_channels() function below for parameter definitions.
+ *
+ * Move from back to front so that the conversion can be done in-place
+ * i.e. in_buff == out_buff
+ * NOTE: num_in_bytes must be a multiple of in_buff_channels * in_buff_sample_size.
+ */
+#define EXPAND_CHANNELS(in_buff, in_buff_chans, out_buff, out_buff_chans, num_in_bytes, zero) \
+{ \
+    size_t num_in_samples = (num_in_bytes) / sizeof(*(in_buff)); \
+    size_t num_out_samples = (num_in_samples * (out_buff_chans)) / (in_buff_chans); \
+    typeof(out_buff) dst_ptr = (out_buff) + num_out_samples - 1; \
+    size_t src_index; \
+    typeof(in_buff) src_ptr = (in_buff) + num_in_samples - 1; \
+    size_t num_zero_chans = (out_buff_chans) - (in_buff_chans); \
+    for (src_index = 0; src_index < num_in_samples; src_index += (in_buff_chans)) { \
+        size_t dst_offset; \
+        for (dst_offset = 0; dst_offset < num_zero_chans; dst_offset++) { \
+            *dst_ptr-- = zero; \
+        } \
+        for (; dst_offset < (out_buff_chans); dst_offset++) { \
+            *dst_ptr-- = *src_ptr--; \
+        } \
+    } \
+    /* return number of *bytes* generated */ \
+    return num_out_samples * sizeof(*(out_buff)); \
+}
+
+/* Channel expands from a MONO input buffer to a MULTICHANNEL output buffer by duplicating the
+ * single input channel to the first 2 output channels and 0-filling the remaining.
+ * See expand_channels() function below for parameter definitions.
+ *
+ * in_buff_chans MUST be 1 and out_buff_chans MUST be >= 2
+ *
+ * Move from back to front so that the conversion can be done in-place
+ * i.e. in_buff == out_buff
+ * NOTE: num_in_bytes must be a multiple of in_buff_channels * in_buff_sample_size.
+ */
+#define EXPAND_MONO_TO_MULTI(in_buff, in_buff_chans, out_buff, out_buff_chans, num_in_bytes, zero) \
+{ \
+    size_t num_in_samples = (num_in_bytes) / sizeof(*(in_buff)); \
+    size_t num_out_samples = (num_in_samples * (out_buff_chans)) / (in_buff_chans); \
+    typeof(out_buff) dst_ptr = (out_buff) + num_out_samples - 1; \
+    size_t src_index; \
+    typeof(in_buff) src_ptr = (in_buff) + num_in_samples - 1; \
+    size_t num_zero_chans = (out_buff_chans) - (in_buff_chans) - 1; \
+    for (src_index = 0; src_index < num_in_samples; src_index += (in_buff_chans)) { \
+        size_t dst_offset; \
+        for (dst_offset = 0; dst_offset < num_zero_chans; dst_offset++) { \
+            *dst_ptr-- = zero; \
+        } \
+        for (; dst_offset < (out_buff_chans); dst_offset++) { \
+            *dst_ptr-- = *src_ptr; \
+        } \
+        src_ptr--; \
+    } \
+    /* return number of *bytes* generated */ \
+    return num_out_samples * sizeof(*(out_buff)); \
+}
+
+/* Channel contracts (removes from audio frame end) from an input buffer to an output buffer.
+ * See contract_channels() function below for parameter definitions.
+ *
+ * Move from front to back so that the conversion can be done in-place
+ * i.e. in_buff == out_buff
+ * NOTE: num_in_bytes must be a multiple of in_buff_channels * in_buff_sample_size.
+ */
+#define CONTRACT_CHANNELS(in_buff, in_buff_chans, out_buff, out_buff_chans, num_in_bytes) \
+{ \
+    size_t num_in_samples = (num_in_bytes) / sizeof(*(in_buff)); \
+    size_t num_out_samples = (num_in_samples * (out_buff_chans)) / (in_buff_chans); \
+    size_t num_skip_samples = (in_buff_chans) - (out_buff_chans); \
+    typeof(out_buff) dst_ptr = out_buff; \
+    typeof(in_buff) src_ptr = in_buff; \
+    size_t src_index; \
+    for (src_index = 0; src_index < num_in_samples; src_index += (in_buff_chans)) { \
+        size_t dst_offset; \
+        for (dst_offset = 0; dst_offset < (out_buff_chans); dst_offset++) { \
+            *dst_ptr++ = *src_ptr++; \
+        } \
+        src_ptr += num_skip_samples; \
+    } \
+    /* return number of *bytes* generated */ \
+    return num_out_samples * sizeof(*(out_buff)); \
+}
+
+/* Channel contracts from a MULTICHANNEL input buffer to a MONO output buffer by mixing the
+ * first two input channels into the single output channel (and skipping the rest).
+ * See contract_channels() function below for parameter definitions.
+ *
+ * in_buff_chans MUST be >= 2 and out_buff_chans MUST be 1
+ *
+ * Move from front to back so that the conversion can be done in-place
+ * i.e. in_buff == out_buff
+ * NOTE: num_in_bytes must be a multiple of in_buff_channels * in_buff_sample_size.
+ * NOTE: Overload of the summed channels is avoided by averaging the two input channels.
+ * NOTE: Can not be used for uint8x3_t samples, see CONTRACT_TO_MONO_24() below.
+ */
+#define CONTRACT_TO_MONO(in_buff, out_buff, num_in_bytes) \
+{ \
+    size_t num_in_samples = (num_in_bytes) / sizeof(*(in_buff)); \
+    size_t num_out_samples = (num_in_samples * out_buff_chans) / in_buff_chans; \
+    size_t num_skip_samples = in_buff_chans - 2; \
+    typeof(out_buff) dst_ptr = out_buff; \
+    typeof(in_buff) src_ptr = in_buff; \
+    int32_t temp0, temp1; \
+    size_t src_index; \
+    for (src_index = 0; src_index < num_in_samples; src_index += in_buff_chans) { \
+        temp0 = *src_ptr++; \
+        temp1 = *src_ptr++; \
+        /* *dst_ptr++ = temp >> 1; */ \
+        /* This bit of magic adds and normalizes without overflow (or so claims hunga@) */ \
+        /* Bitwise half adder trick, see http://en.wikipedia.org/wiki/Adder_(electronics) */ \
+        /* Hacker's delight, p. 19 http://www.hackersdelight.org/basics2.pdf */ \
+        *dst_ptr++ = (temp0 & temp1) + ((temp0 ^ temp1) >> 1); \
+        src_ptr += num_skip_samples; \
+    } \
+    /* return number of *bytes* generated */ \
+    return num_out_samples * sizeof(*(out_buff)); \
+}
+
+/* Channel contracts from a MULTICHANNEL uint8x3_t input buffer to a MONO uint8x3_t output buffer
+ * by mixing the first two input channels into the single output channel (and skipping the rest).
+ * See contract_channels() function below for parameter definitions.
+ *
+ * Move from front to back so that the conversion can be done in-place
+ * i.e. in_buff == out_buff
+ * NOTE: num_in_bytes must be a multiple of in_buff_channels * in_buff_sample_size.
+ * NOTE: Overload of the summed channels is avoided by averaging the two input channels.
+ * NOTE: Can not be used for normal, scalar samples, see CONTRACT_TO_MONO() above.
+ */
+#define CONTRACT_TO_MONO_24(in_buff, out_buff, num_in_bytes) \
+{ \
+    size_t num_in_samples = (num_in_bytes) / sizeof(*(in_buff)); \
+    size_t num_out_samples = (num_in_samples * out_buff_chans) / in_buff_chans; \
+    size_t num_skip_samples = in_buff_chans - 2; \
+    typeof(out_buff) dst_ptr = out_buff; \
+    typeof(in_buff) src_ptr = in_buff; \
+    int32_t temp; \
+    size_t src_index; \
+    for (src_index = 0; src_index < num_in_samples; src_index += in_buff_chans) { \
+        temp = uint8x3_to_int32(*src_ptr++); \
+        temp += uint8x3_to_int32(*src_ptr++); \
+        *dst_ptr = int32_to_uint8x3(temp >> 1); \
+        src_ptr += num_skip_samples; \
+    } \
+    /* return number of *bytes* generated */ \
+    return num_out_samples * sizeof(*(out_buff)); \
+}
+
+/*
+ * Convert a buffer of N-channel, interleaved samples to M-channel
+ * (where N > M).
+ *   in_buff points to the buffer of samples
+ *   in_buff_channels Specifies the number of channels in the input buffer.
+ *   out_buff points to the buffer to receive converted samples.
+ *   out_buff_channels Specifies the number of channels in the output buffer.
+ *   sample_size_in_bytes Specifies the number of bytes per sample.
+ *   num_in_bytes size of input buffer in BYTES
+ * returns
+ *   the number of BYTES of output data.
+ * NOTE
+ *   channels > M are thrown away.
+ *   The out and sums buffers must either be completely separate (non-overlapping), or
+ *   they must both start at the same address. Partially overlapping buffers are not supported.
+ */
+static size_t contract_channels(const void* in_buff, size_t in_buff_chans,
+                                void* out_buff, size_t out_buff_chans,
+                                unsigned sample_size_in_bytes, size_t num_in_bytes)
+{
+    switch (sample_size_in_bytes) {
+    case 1:
+        if (out_buff_chans == 1) {
+            /* Special case Multi to Mono */
+            CONTRACT_TO_MONO((const uint8_t*)in_buff, (uint8_t*)out_buff, num_in_bytes);
+            // returns in macro
+        } else {
+            CONTRACT_CHANNELS((const uint8_t*)in_buff, in_buff_chans,
+                              (uint8_t*)out_buff, out_buff_chans,
+                              num_in_bytes);
+            // returns in macro
+        }
+    case 2:
+        if (out_buff_chans == 1) {
+            /* Special case Multi to Mono */
+            CONTRACT_TO_MONO((const int16_t*)in_buff, (int16_t*)out_buff, num_in_bytes);
+            // returns in macro
+        } else {
+            CONTRACT_CHANNELS((const int16_t*)in_buff, in_buff_chans,
+                              (int16_t*)out_buff, out_buff_chans,
+                              num_in_bytes);
+            // returns in macro
+        }
+    case 3:
+        if (out_buff_chans == 1) {
+            /* Special case Multi to Mono */
+            CONTRACT_TO_MONO_24((const uint8x3_t*)in_buff,
+                                       (uint8x3_t*)out_buff, num_in_bytes);
+            // returns in macro
+        } else {
+            CONTRACT_CHANNELS((const uint8x3_t*)in_buff, in_buff_chans,
+                              (uint8x3_t*)out_buff, out_buff_chans,
+                              num_in_bytes);
+            // returns in macro
+        }
+    case 4:
+        if (out_buff_chans == 1) {
+            /* Special case Multi to Mono */
+            CONTRACT_TO_MONO((const int32_t*)in_buff, (int32_t*)out_buff, num_in_bytes);
+            // returns in macro
+        } else {
+            CONTRACT_CHANNELS((const int32_t*)in_buff, in_buff_chans,
+                              (int32_t*)out_buff, out_buff_chans,
+                              num_in_bytes);
+            // returns in macro
+        }
+    default:
+        return 0;
+    }
+}
+
+/*
+ * Convert a buffer of N-channel, interleaved samples to M-channel
+ * (where N < M).
+ *   in_buff points to the buffer of samples
+ *   in_buff_channels Specifies the number of channels in the input buffer.
+ *   out_buff points to the buffer to receive converted samples.
+ *   out_buff_channels Specifies the number of channels in the output buffer.
+ *   sample_size_in_bytes Specifies the number of bytes per sample.
+ *   num_in_bytes size of input buffer in BYTES
+ * returns
+ *   the number of BYTES of output data.
+ * NOTE
+ *   channels > N are filled with silence.
+ *   The out and sums buffers must either be completely separate (non-overlapping), or
+ *   they must both start at the same address. Partially overlapping buffers are not supported.
+ */
+static size_t expand_channels(const void* in_buff, size_t in_buff_chans,
+                              void* out_buff, size_t out_buff_chans,
+                              unsigned sample_size_in_bytes, size_t num_in_bytes)
+{
+    static const uint8x3_t packed24_zero; /* zero 24 bit sample */
+
+    switch (sample_size_in_bytes) {
+    case 1:
+        if (in_buff_chans == 1) {
+            /* special case of mono source to multi-channel */
+            EXPAND_MONO_TO_MULTI((const uint8_t*)in_buff, in_buff_chans,
+                            (uint8_t*)out_buff, out_buff_chans,
+                            num_in_bytes, 0);
+            // returns in macro
+        } else {
+            EXPAND_CHANNELS((const uint8_t*)in_buff, in_buff_chans,
+                            (uint8_t*)out_buff, out_buff_chans,
+                            num_in_bytes, 0);
+            // returns in macro
+        }
+    case 2:
+        if (in_buff_chans == 1) {
+            /* special case of mono source to multi-channel */
+            EXPAND_MONO_TO_MULTI((const int16_t*)in_buff, in_buff_chans,
+                            (int16_t*)out_buff, out_buff_chans,
+                            num_in_bytes, 0);
+            // returns in macro
+        } else {
+            EXPAND_CHANNELS((const int16_t*)in_buff, in_buff_chans,
+                            (int16_t*)out_buff, out_buff_chans,
+                            num_in_bytes, 0);
+            // returns in macro
+        }
+    case 3:
+        if (in_buff_chans == 1) {
+            /* special case of mono source to multi-channel */
+            EXPAND_MONO_TO_MULTI((const uint8x3_t*)in_buff, in_buff_chans,
+                            (uint8x3_t*)out_buff, out_buff_chans,
+                            num_in_bytes, packed24_zero);
+            // returns in macro
+        } else {
+            EXPAND_CHANNELS((const uint8x3_t*)in_buff, in_buff_chans,
+                            (uint8x3_t*)out_buff, out_buff_chans,
+                            num_in_bytes, packed24_zero);
+            // returns in macro
+        }
+    case 4:
+        if (in_buff_chans == 1) {
+            /* special case of mono source to multi-channel */
+            EXPAND_MONO_TO_MULTI((const int32_t*)in_buff, in_buff_chans,
+                            (int32_t*)out_buff, out_buff_chans,
+                            num_in_bytes, 0);
+            // returns in macro
+        } else {
+           EXPAND_CHANNELS((const int32_t*)in_buff, in_buff_chans,
+                            (int32_t*)out_buff, out_buff_chans,
+                            num_in_bytes, 0);
+            // returns in macro
+        }
+    default:
+        return 0;
+    }
+}
+
+size_t adjust_channels(const void* in_buff, size_t in_buff_chans,
+                       void* out_buff, size_t out_buff_chans,
+                       unsigned sample_size_in_bytes, size_t num_in_bytes)
+{
+    if (out_buff_chans > in_buff_chans) {
+        return expand_channels(in_buff, in_buff_chans, out_buff,  out_buff_chans,
+                               sample_size_in_bytes, num_in_bytes);
+    } else if (out_buff_chans < in_buff_chans) {
+        return contract_channels(in_buff, in_buff_chans, out_buff,  out_buff_chans,
+                                 sample_size_in_bytes, num_in_bytes);
+    } else if (in_buff != out_buff) {
+        memcpy(out_buff, in_buff, num_in_bytes);
+    }
+
+    return num_in_bytes;
+}
diff --git a/media/audio_utils/echo_reference.c b/media/audio_utils/echo_reference.c
new file mode 100644
index 0000000..a822519
--- /dev/null
+++ b/media/audio_utils/echo_reference.c
@@ -0,0 +1,547 @@
+/*
+** Copyright 2011, The Android Open-Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "echo_reference"
+
+#include <errno.h>
+#include <inttypes.h>
+#include <pthread.h>
+#include <stdlib.h>
+
+#include <log/log.h>
+#include <system/audio.h>
+#include <audio_utils/resampler.h>
+#include <audio_utils/echo_reference.h>
+
+// echo reference state: bit field indicating if read, write or both are active.
+enum state {
+    ECHOREF_IDLE = 0x00,        // idle
+    ECHOREF_READING = 0x01,     // reading is active
+    ECHOREF_WRITING = 0x02      // writing is active
+};
+
+struct echo_reference {
+    struct echo_reference_itfe itfe;
+    int status;                     // init status
+    uint32_t state;                 // active state: reading, writing or both
+    audio_format_t rd_format;       // read sample format
+    uint32_t rd_channel_count;      // read number of channels
+    uint32_t rd_sampling_rate;      // read sampling rate in Hz
+    size_t rd_frame_size;           // read frame size (bytes per sample)
+    audio_format_t wr_format;       // write sample format
+    uint32_t wr_channel_count;      // write number of channels
+    uint32_t wr_sampling_rate;      // write sampling rate in Hz
+    size_t wr_frame_size;           // write frame size (bytes per sample)
+    void *buffer;                   // main buffer
+    size_t buf_size;                // main buffer size in frames
+    size_t frames_in;               // number of frames in main buffer
+    void *wr_buf;                   // buffer for input conversions
+    size_t wr_buf_size;             // size of conversion buffer in frames
+    size_t wr_frames_in;            // number of frames in conversion buffer
+    size_t wr_curr_frame_size;      // number of frames given to current write() function
+    void *wr_src_buf;               // resampler input buf (either wr_buf or buffer used by write())
+    struct timespec wr_render_time; // latest render time indicated by write()
+                                    // default ALSA gettimeofday() format
+    int32_t  playback_delay;        // playback buffer delay indicated by last write()
+    int16_t prev_delta_sign;        // sign of previous delay difference:
+                                    //  1: positive, -1: negative, 0: unknown
+    uint16_t delta_count;           // number of consecutive delay differences with same sign
+    pthread_mutex_t lock;                      // mutex protecting read/write concurrency
+    pthread_cond_t cond;                       // condition signaled when data is ready to read
+    struct resampler_itfe *resampler;          // input resampler
+    struct resampler_buffer_provider provider; // resampler buffer provider
+};
+
+
+int echo_reference_get_next_buffer(struct resampler_buffer_provider *buffer_provider,
+                                   struct resampler_buffer* buffer)
+{
+    struct echo_reference *er;
+
+    if (buffer_provider == NULL) {
+        return -EINVAL;
+    }
+
+    er = (struct echo_reference *)((char *)buffer_provider -
+                                      offsetof(struct echo_reference, provider));
+
+    if (er->wr_src_buf == NULL || er->wr_frames_in == 0) {
+        buffer->raw = NULL;
+        buffer->frame_count = 0;
+        return -ENODATA;
+    }
+
+    buffer->frame_count = (buffer->frame_count > er->wr_frames_in) ?
+            er->wr_frames_in : buffer->frame_count;
+    // this is er->rd_channel_count here as we resample after stereo to mono conversion if any
+    buffer->i16 = (int16_t *)er->wr_src_buf + (er->wr_curr_frame_size - er->wr_frames_in) *
+            er->rd_channel_count;
+
+    return 0;
+}
+
+void echo_reference_release_buffer(struct resampler_buffer_provider *buffer_provider,
+                                  struct resampler_buffer* buffer)
+{
+    struct echo_reference *er;
+
+    if (buffer_provider == NULL) {
+        return;
+    }
+
+    er = (struct echo_reference *)((char *)buffer_provider -
+                                      offsetof(struct echo_reference, provider));
+
+    er->wr_frames_in -= buffer->frame_count;
+}
+
+static void echo_reference_reset_l(struct echo_reference *er)
+{
+    ALOGV("echo_reference_reset_l()");
+    free(er->buffer);
+    er->buffer = NULL;
+    er->buf_size = 0;
+    er->frames_in = 0;
+    free(er->wr_buf);
+    er->wr_buf = NULL;
+    er->wr_buf_size = 0;
+    er->wr_render_time.tv_sec = 0;
+    er->wr_render_time.tv_nsec = 0;
+    er->delta_count = 0;
+    er->prev_delta_sign = 0;
+}
+
+/* additional space in resampler buffer allowing for extra samples to be returned
+ * by speex resampler when sample rates ratio is not an integer.
+ */
+#define RESAMPLER_HEADROOM_SAMPLES   10
+
+static int echo_reference_write(struct echo_reference_itfe *echo_reference,
+                         struct echo_reference_buffer *buffer)
+{
+    struct echo_reference *er = (struct echo_reference *)echo_reference;
+    int status = 0;
+
+    if (er == NULL) {
+        return -EINVAL;
+    }
+
+    pthread_mutex_lock(&er->lock);
+
+    if (buffer == NULL) {
+        ALOGV("echo_reference_write() stop write");
+        er->state &= ~ECHOREF_WRITING;
+        echo_reference_reset_l(er);
+        goto exit;
+    }
+
+    ALOGV("echo_reference_write() START trying to write %zu frames", buffer->frame_count);
+    ALOGV("echo_reference_write() playbackTimestamp:[%d].[%d], er->playback_delay:[%" PRId32 "]",
+            (int)buffer->time_stamp.tv_sec,
+            (int)buffer->time_stamp.tv_nsec, er->playback_delay);
+
+    //ALOGV("echo_reference_write() %d frames", buffer->frame_count);
+    // discard writes until a valid time stamp is provided.
+
+    if ((buffer->time_stamp.tv_sec == 0) && (buffer->time_stamp.tv_nsec == 0) &&
+        (er->wr_render_time.tv_sec == 0) && (er->wr_render_time.tv_nsec == 0)) {
+        goto exit;
+    }
+
+    if ((er->state & ECHOREF_WRITING) == 0) {
+        ALOGV("echo_reference_write() start write");
+        if (er->resampler != NULL) {
+            er->resampler->reset(er->resampler);
+        }
+        er->state |= ECHOREF_WRITING;
+    }
+
+    if ((er->state & ECHOREF_READING) == 0) {
+        goto exit;
+    }
+
+    er->wr_render_time.tv_sec  = buffer->time_stamp.tv_sec;
+    er->wr_render_time.tv_nsec = buffer->time_stamp.tv_nsec;
+
+    er->playback_delay = buffer->delay_ns;
+
+    // this will be used in the get_next_buffer, to support variable input buffer sizes
+    er->wr_curr_frame_size = buffer->frame_count;
+
+    void *srcBuf;
+    size_t inFrames;
+    // do stereo to mono and down sampling if necessary
+    if (er->rd_channel_count != er->wr_channel_count ||
+            er->rd_sampling_rate != er->wr_sampling_rate) {
+        size_t wrBufSize = buffer->frame_count;
+
+        inFrames = buffer->frame_count;
+
+        if (er->rd_sampling_rate != er->wr_sampling_rate) {
+            inFrames = (buffer->frame_count * er->rd_sampling_rate) / er->wr_sampling_rate +
+                                                    RESAMPLER_HEADROOM_SAMPLES;
+            // wr_buf is not only used as resampler output but also for stereo to mono conversion
+            // output so buffer size is driven by both write and read sample rates
+            if (inFrames > wrBufSize) {
+                wrBufSize = inFrames;
+            }
+        }
+
+        if (er->wr_buf_size < wrBufSize) {
+            ALOGV("echo_reference_write() increasing write buffer size from %zu to %zu",
+                    er->wr_buf_size, wrBufSize);
+            er->wr_buf_size = wrBufSize;
+            er->wr_buf = realloc(er->wr_buf, er->wr_buf_size * er->rd_frame_size);
+        }
+
+        if (er->rd_channel_count != er->wr_channel_count) {
+            // must be stereo to mono
+            int16_t *src16 = (int16_t *)buffer->raw;
+            int16_t *dst16 = (int16_t *)er->wr_buf;
+            size_t frames = buffer->frame_count;
+            while (frames--) {
+                *dst16++ = (int16_t)(((int32_t)*src16 + (int32_t)*(src16 + 1)) >> 1);
+                src16 += 2;
+            }
+        }
+        if (er->wr_sampling_rate != er->rd_sampling_rate) {
+            if (er->resampler == NULL) {
+                int rc;
+                ALOGV("echo_reference_write() new ReSampler(%d, %d)",
+                      er->wr_sampling_rate, er->rd_sampling_rate);
+                er->provider.get_next_buffer = echo_reference_get_next_buffer;
+                er->provider.release_buffer = echo_reference_release_buffer;
+                rc = create_resampler(er->wr_sampling_rate,
+                                 er->rd_sampling_rate,
+                                 er->rd_channel_count,
+                                 RESAMPLER_QUALITY_DEFAULT,
+                                 &er->provider,
+                                 &er->resampler);
+                if (rc != 0) {
+                    er->resampler = NULL;
+                    ALOGV("echo_reference_write() failure to create resampler %d", rc);
+                    status = -ENODEV;
+                    goto exit;
+                }
+            }
+            // er->wr_src_buf and er->wr_frames_in are used by getNexBuffer() called by the
+            // resampler to get new frames
+            if (er->rd_channel_count != er->wr_channel_count) {
+                er->wr_src_buf = er->wr_buf;
+            } else {
+                er->wr_src_buf = buffer->raw;
+            }
+            er->wr_frames_in = buffer->frame_count;
+            // inFrames is always more than we need here to get frames remaining from previous runs
+            // inFrames is updated by resample() with the number of frames produced
+            ALOGV("echo_reference_write() ReSampling(%d, %d)",
+                  er->wr_sampling_rate, er->rd_sampling_rate);
+            er->resampler->resample_from_provider(er->resampler,
+                                                     (int16_t *)er->wr_buf, &inFrames);
+            ALOGV_IF(er->wr_frames_in != 0,
+                    "echo_reference_write() er->wr_frames_in not 0 (%d) after resampler",
+                    er->wr_frames_in);
+        }
+        srcBuf = er->wr_buf;
+    } else {
+        inFrames = buffer->frame_count;
+        srcBuf = buffer->raw;
+    }
+
+    if (er->frames_in + inFrames > er->buf_size) {
+        ALOGV("echo_reference_write() increasing buffer size from %zu to %zu",
+                er->buf_size, er->frames_in + inFrames);
+                er->buf_size = er->frames_in + inFrames;
+                er->buffer = realloc(er->buffer, er->buf_size * er->rd_frame_size);
+    }
+    memcpy((char *)er->buffer + er->frames_in * er->rd_frame_size,
+           srcBuf,
+           inFrames * er->rd_frame_size);
+    er->frames_in += inFrames;
+
+    ALOGV("echo_reference_write() frames written:[%zu], frames total:[%zu] buffer size:[%zu]\n"
+          "                       er->wr_render_time:[%d].[%d], er->playback_delay:[%" PRId32 "]",
+          inFrames, er->frames_in, er->buf_size,
+          (int)er->wr_render_time.tv_sec, (int)er->wr_render_time.tv_nsec, er->playback_delay);
+
+    pthread_cond_signal(&er->cond);
+exit:
+    pthread_mutex_unlock(&er->lock);
+    ALOGV("echo_reference_write() END");
+    return status;
+}
+
+// delay jump threshold to update ref buffer: 6 samples at 8kHz in nsecs
+#define MIN_DELAY_DELTA_NS (375000*2)
+// number of consecutive delta with same sign between expected and actual delay before adjusting
+// the buffer
+#define MIN_DELTA_NUM 4
+
+
+static int echo_reference_read(struct echo_reference_itfe *echo_reference,
+                         struct echo_reference_buffer *buffer)
+{
+    struct echo_reference *er = (struct echo_reference *)echo_reference;
+
+    if (er == NULL) {
+        return -EINVAL;
+    }
+
+    pthread_mutex_lock(&er->lock);
+
+    if (buffer == NULL) {
+        ALOGV("echo_reference_read() stop read");
+        er->state &= ~ECHOREF_READING;
+        goto exit;
+    }
+
+    ALOGV("echo_reference_read() START, delayCapture:[%" PRId32 "], "
+            "er->frames_in:[%zu],buffer->frame_count:[%zu]",
+    buffer->delay_ns, er->frames_in, buffer->frame_count);
+
+    if ((er->state & ECHOREF_READING) == 0) {
+        ALOGV("echo_reference_read() start read");
+        echo_reference_reset_l(er);
+        er->state |= ECHOREF_READING;
+    }
+
+    if ((er->state & ECHOREF_WRITING) == 0) {
+        memset(buffer->raw, 0, er->rd_frame_size * buffer->frame_count);
+        buffer->delay_ns = 0;
+        goto exit;
+    }
+
+//    ALOGV("echo_reference_read() %d frames", buffer->frame_count);
+
+    // allow some time for new frames to arrive if not enough frames are ready for read
+    if (er->frames_in < buffer->frame_count) {
+        uint32_t timeoutMs = (uint32_t)((1000 * buffer->frame_count) / er->rd_sampling_rate / 2);
+        struct timespec ts = {0, 0};
+
+        clock_gettime(CLOCK_REALTIME, &ts);
+
+        ts.tv_sec  += timeoutMs/1000;
+        ts.tv_nsec += (timeoutMs%1000) * 1000000;
+        if (ts.tv_nsec >= 1000000000) {
+            ts.tv_nsec -= 1000000000;
+            ts.tv_sec  += 1;
+        }
+
+        pthread_cond_timedwait(&er->cond, &er->lock, &ts);
+
+        ALOGV_IF((er->frames_in < buffer->frame_count),
+                 "echo_reference_read() waited %d ms but still not enough frames"\
+                 " er->frames_in: %d, buffer->frame_count = %d",
+                 timeoutMs, er->frames_in, buffer->frame_count);
+    }
+
+    int64_t timeDiff;
+    struct timespec tmp;
+
+    if ((er->wr_render_time.tv_sec == 0 && er->wr_render_time.tv_nsec == 0) ||
+        (buffer->time_stamp.tv_sec == 0 && buffer->time_stamp.tv_nsec == 0)) {
+        ALOGV("echo_reference_read(): NEW:timestamp is zero---------setting timeDiff = 0, "\
+             "not updating delay this time");
+        timeDiff = 0;
+    } else {
+        if (buffer->time_stamp.tv_nsec < er->wr_render_time.tv_nsec) {
+            tmp.tv_sec = buffer->time_stamp.tv_sec - er->wr_render_time.tv_sec - 1;
+            tmp.tv_nsec = 1000000000 + buffer->time_stamp.tv_nsec - er->wr_render_time.tv_nsec;
+        } else {
+            tmp.tv_sec = buffer->time_stamp.tv_sec - er->wr_render_time.tv_sec;
+            tmp.tv_nsec = buffer->time_stamp.tv_nsec - er->wr_render_time.tv_nsec;
+        }
+        timeDiff = (((int64_t)tmp.tv_sec * 1000000000 + tmp.tv_nsec));
+
+        int64_t expectedDelayNs =  er->playback_delay + buffer->delay_ns - timeDiff;
+
+        if (er->resampler != NULL) {
+            // Resampler already compensates part of the delay
+            int32_t rsmp_delay = er->resampler->delay_ns(er->resampler);
+            expectedDelayNs -= rsmp_delay;
+        }
+
+        ALOGV("echo_reference_read(): expectedDelayNs[%" PRId64 "] = "
+                "er->playback_delay[%" PRId32 "] + delayCapture[%" PRId32
+                "] - timeDiff[%" PRId64 "]",
+                expectedDelayNs, er->playback_delay, buffer->delay_ns, timeDiff);
+
+        if (expectedDelayNs > 0) {
+            int64_t delayNs = ((int64_t)er->frames_in * 1000000000) / er->rd_sampling_rate;
+
+            int64_t  deltaNs = delayNs - expectedDelayNs;
+
+            ALOGV("echo_reference_read(): EchoPathDelayDeviation between reference and DMA [%"
+                    PRId64 "]", deltaNs);
+            if (llabs(deltaNs) >= MIN_DELAY_DELTA_NS) {
+                // smooth the variation and update the reference buffer only
+                // if a deviation in the same direction is observed for more than MIN_DELTA_NUM
+                // consecutive reads.
+                int16_t delay_sign = (deltaNs >= 0) ? 1 : -1;
+                if (delay_sign == er->prev_delta_sign) {
+                    er->delta_count++;
+                } else {
+                    er->delta_count = 1;
+                }
+                er->prev_delta_sign = delay_sign;
+
+                if (er->delta_count > MIN_DELTA_NUM) {
+                    size_t previousFrameIn = er->frames_in;
+                    er->frames_in = (size_t)((expectedDelayNs * er->rd_sampling_rate)/1000000000);
+                    int offset = er->frames_in - previousFrameIn;
+
+                    ALOGV("echo_reference_read(): deltaNs ENOUGH and %s: "
+                            "er->frames_in: %zu, previousFrameIn = %zu",
+                         delay_sign ? "positive" : "negative", er->frames_in, previousFrameIn);
+
+                    if (deltaNs < 0) {
+                        // Less data available in the reference buffer than expected
+                        if (er->frames_in > er->buf_size) {
+                            er->buf_size = er->frames_in;
+                            er->buffer  = realloc(er->buffer, er->buf_size * er->rd_frame_size);
+                            ALOGV("echo_reference_read(): increasing buffer size to %zu",
+                                  er->buf_size);
+                        }
+
+                        if (offset > 0) {
+                            memset((char *)er->buffer + previousFrameIn * er->rd_frame_size,
+                                   0, offset * er->rd_frame_size);
+                            ALOGV("echo_reference_read(): pushing ref buffer by [%d]", offset);
+                        }
+                    } else {
+                        // More data available in the reference buffer than expected
+                        offset = -offset;
+                        if (offset > 0) {
+                            memcpy(er->buffer, (char *)er->buffer + (offset * er->rd_frame_size),
+                                   er->frames_in * er->rd_frame_size);
+                            ALOGV("echo_reference_read(): shifting ref buffer by [%zu]",
+                                  er->frames_in);
+                        }
+                    }
+                }
+            } else {
+                er->delta_count = 0;
+                er->prev_delta_sign = 0;
+                ALOGV("echo_reference_read(): Constant EchoPathDelay - difference "
+                        "between reference and DMA %" PRId64, deltaNs);
+            }
+        } else {
+            ALOGV("echo_reference_read(): NEGATIVE expectedDelayNs[%" PRId64
+                 "] = er->playback_delay[%" PRId32 "] + delayCapture[%" PRId32
+                 "] - timeDiff[%" PRId64 "]",
+                 expectedDelayNs, er->playback_delay, buffer->delay_ns, timeDiff);
+        }
+    }
+
+    if (er->frames_in < buffer->frame_count) {
+        if (buffer->frame_count > er->buf_size) {
+            er->buf_size = buffer->frame_count;
+            er->buffer  = realloc(er->buffer, er->buf_size * er->rd_frame_size);
+            ALOGV("echo_reference_read(): increasing buffer size to %zu", er->buf_size);
+        }
+        // filling up the reference buffer with 0s to match the expected delay.
+        memset((char *)er->buffer + er->frames_in * er->rd_frame_size,
+            0, (buffer->frame_count - er->frames_in) * er->rd_frame_size);
+        er->frames_in = buffer->frame_count;
+    }
+
+    memcpy(buffer->raw,
+           (char *)er->buffer,
+           buffer->frame_count * er->rd_frame_size);
+
+    er->frames_in -= buffer->frame_count;
+    memcpy(er->buffer,
+           (char *)er->buffer + buffer->frame_count * er->rd_frame_size,
+           er->frames_in * er->rd_frame_size);
+
+    // As the reference buffer is now time aligned to the microphone signal there is a zero delay
+    buffer->delay_ns = 0;
+
+    ALOGV("echo_reference_read() END %zu frames, total frames in %zu",
+          buffer->frame_count, er->frames_in);
+
+    pthread_cond_signal(&er->cond);
+
+exit:
+    pthread_mutex_unlock(&er->lock);
+    return 0;
+}
+
+
+int create_echo_reference(audio_format_t rdFormat,
+                            uint32_t rdChannelCount,
+                            uint32_t rdSamplingRate,
+                            audio_format_t wrFormat,
+                            uint32_t wrChannelCount,
+                            uint32_t wrSamplingRate,
+                            struct echo_reference_itfe **echo_reference)
+{
+    struct echo_reference *er;
+
+    ALOGV("create_echo_reference()");
+
+    if (echo_reference == NULL) {
+        return -EINVAL;
+    }
+
+    *echo_reference = NULL;
+
+    if (rdFormat != AUDIO_FORMAT_PCM_16_BIT ||
+            rdFormat != wrFormat) {
+        ALOGW("create_echo_reference bad format rd %d, wr %d", rdFormat, wrFormat);
+        return -EINVAL;
+    }
+    if ((rdChannelCount != 1 && rdChannelCount != 2) ||
+            wrChannelCount != 2) {
+        ALOGW("create_echo_reference bad channel count rd %d, wr %d", rdChannelCount,
+                wrChannelCount);
+        return -EINVAL;
+    }
+
+    er = (struct echo_reference *)calloc(1, sizeof(struct echo_reference));
+
+    er->itfe.read = echo_reference_read;
+    er->itfe.write = echo_reference_write;
+
+    er->state = ECHOREF_IDLE;
+    er->rd_format = rdFormat;
+    er->rd_channel_count = rdChannelCount;
+    er->rd_sampling_rate = rdSamplingRate;
+    er->wr_format = wrFormat;
+    er->wr_channel_count = wrChannelCount;
+    er->wr_sampling_rate = wrSamplingRate;
+    er->rd_frame_size = audio_bytes_per_sample(rdFormat) * rdChannelCount;
+    er->wr_frame_size = audio_bytes_per_sample(wrFormat) * wrChannelCount;
+    *echo_reference = &er->itfe;
+    return 0;
+}
+
+void release_echo_reference(struct echo_reference_itfe *echo_reference) {
+    struct echo_reference *er = (struct echo_reference *)echo_reference;
+
+    if (er == NULL) {
+        return;
+    }
+
+    ALOGV("EchoReference dstor");
+    echo_reference_reset_l(er);
+    if (er->resampler != NULL) {
+        release_resampler(er->resampler);
+    }
+    free(er);
+}
+
diff --git a/media/audio_utils/fifo.c b/media/audio_utils/fifo.c
new file mode 100644
index 0000000..c818a50
--- /dev/null
+++ b/media/audio_utils/fifo.c
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_fifo"
+
+#include <stdlib.h>
+#include <string.h>
+#include <audio_utils/fifo.h>
+#include <audio_utils/roundup.h>
+#include <cutils/atomic.h>
+#include <cutils/log.h>
+
+void audio_utils_fifo_init(struct audio_utils_fifo *fifo, size_t frameCount, size_t frameSize,
+        void *buffer)
+{
+    // We would need a 64-bit roundup to support larger frameCount.
+    ALOG_ASSERT(fifo != NULL && frameCount > 0 && frameSize > 0 && buffer != NULL);
+    fifo->mFrameCount = frameCount;
+    fifo->mFrameCountP2 = roundup(frameCount);
+    fifo->mFudgeFactor = fifo->mFrameCountP2 - fifo->mFrameCount;
+    fifo->mFrameSize = frameSize;
+    fifo->mBuffer = buffer;
+    fifo->mFront = 0;
+    fifo->mRear = 0;
+}
+
+void audio_utils_fifo_deinit(struct audio_utils_fifo *fifo __unused)
+{
+}
+
+// Return a new index as the sum of an old index (either mFront or mRear) and a specified increment.
+static inline int32_t audio_utils_fifo_sum(struct audio_utils_fifo *fifo, int32_t index,
+        uint32_t increment)
+{
+    if (fifo->mFudgeFactor) {
+        uint32_t mask = fifo->mFrameCountP2 - 1;
+        ALOG_ASSERT((index & mask) < fifo->mFrameCount);
+        ALOG_ASSERT(/*0 <= increment &&*/ increment <= fifo->mFrameCountP2);
+        if ((index & mask) + increment >= fifo->mFrameCount) {
+            increment += fifo->mFudgeFactor;
+        }
+        index += increment;
+        ALOG_ASSERT((index & mask) < fifo->mFrameCount);
+        return index;
+    } else {
+        return index + increment;
+    }
+}
+
+// Return the difference between two indices: rear - front, where 0 <= difference <= mFrameCount.
+static inline size_t audio_utils_fifo_diff(struct audio_utils_fifo *fifo, int32_t rear,
+        int32_t front)
+{
+    int32_t diff = rear - front;
+    if (fifo->mFudgeFactor) {
+        uint32_t mask = ~(fifo->mFrameCountP2 - 1);
+        int32_t genDiff = (rear & mask) - (front & mask);
+        if (genDiff != 0) {
+            ALOG_ASSERT(genDiff == (int32_t) fifo->mFrameCountP2);
+            diff -= fifo->mFudgeFactor;
+        }
+    }
+    // FIFO should not be overfull
+    ALOG_ASSERT(0 <= diff && diff <= (int32_t) fifo->mFrameCount);
+    return (size_t) diff;
+}
+
+ssize_t audio_utils_fifo_write(struct audio_utils_fifo *fifo, const void *buffer, size_t count)
+{
+    int32_t front = android_atomic_acquire_load(&fifo->mFront);
+    int32_t rear = fifo->mRear;
+    size_t availToWrite = fifo->mFrameCount - audio_utils_fifo_diff(fifo, rear, front);
+    if (availToWrite > count) {
+        availToWrite = count;
+    }
+    rear &= fifo->mFrameCountP2 - 1;
+    size_t part1 = fifo->mFrameCount - rear;
+    if (part1 > availToWrite) {
+        part1 = availToWrite;
+    }
+    if (part1 > 0) {
+        memcpy((char *) fifo->mBuffer + (rear * fifo->mFrameSize), buffer,
+                part1 * fifo->mFrameSize);
+        size_t part2 = availToWrite - part1;
+        if (part2 > 0) {
+            memcpy(fifo->mBuffer, (char *) buffer + (part1 * fifo->mFrameSize),
+                    part2 * fifo->mFrameSize);
+        }
+        android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mRear, availToWrite),
+                &fifo->mRear);
+    }
+    return availToWrite;
+}
+
+ssize_t audio_utils_fifo_read(struct audio_utils_fifo *fifo, void *buffer, size_t count)
+{
+    int32_t rear = android_atomic_acquire_load(&fifo->mRear);
+    int32_t front = fifo->mFront;
+    size_t availToRead = audio_utils_fifo_diff(fifo, rear, front);
+    if (availToRead > count) {
+        availToRead = count;
+    }
+    front &= fifo->mFrameCountP2 - 1;
+    size_t part1 = fifo->mFrameCount - front;
+    if (part1 > availToRead) {
+        part1 = availToRead;
+    }
+    if (part1 > 0) {
+        memcpy(buffer, (char *) fifo->mBuffer + (front * fifo->mFrameSize),
+                part1 * fifo->mFrameSize);
+        size_t part2 = availToRead - part1;
+        if (part2 > 0) {
+            memcpy((char *) buffer + (part1 * fifo->mFrameSize), fifo->mBuffer,
+                    part2 * fifo->mFrameSize);
+        }
+        android_atomic_release_store(audio_utils_fifo_sum(fifo, fifo->mFront, availToRead),
+                &fifo->mFront);
+    }
+    return availToRead;
+}
diff --git a/media/audio_utils/fixedfft.cpp b/media/audio_utils/fixedfft.cpp
new file mode 100644
index 0000000..3fcc247
--- /dev/null
+++ b/media/audio_utils/fixedfft.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* A Fixed point implementation of Fast Fourier Transform (FFT). Complex numbers
+ * are represented by 32-bit integers, where higher 16 bits are real part and
+ * lower ones are imaginary part. Few compromises are made between efficiency,
+ * accuracy, and maintainability. To make it fast, arithmetic shifts are used
+ * instead of divisions, and bitwise inverses are used instead of negates. To
+ * keep it small, only radix-2 Cooley-Tukey algorithm is implemented, and only
+ * half of the twiddle factors are stored. Although there are still ways to make
+ * it even faster or smaller, it costs too much on one of the aspects.
+ */
+
+#include <stdio.h>
+#include <stdint.h>
+
+#include <audio_utils/fixedfft.h>
+
+#define LOG_FFT_SIZE 10
+#define MAX_FFT_SIZE (1 << LOG_FFT_SIZE)
+
+// Actually int32_t, but declare as uint32_t to avoid warnings due to overflow.
+// Be sure to cast all accesses before use, for example "(int32_t) twiddle[...]".
+static const uint32_t twiddle[MAX_FFT_SIZE / 4] = {
+    0x00008000, 0xff378001, 0xfe6e8002, 0xfda58006, 0xfcdc800a, 0xfc13800f,
+    0xfb4a8016, 0xfa81801e, 0xf9b88027, 0xf8ef8032, 0xf827803e, 0xf75e804b,
+    0xf6958059, 0xf5cd8068, 0xf5058079, 0xf43c808b, 0xf374809e, 0xf2ac80b2,
+    0xf1e480c8, 0xf11c80de, 0xf05580f6, 0xef8d8110, 0xeec6812a, 0xedff8146,
+    0xed388163, 0xec718181, 0xebab81a0, 0xeae481c1, 0xea1e81e2, 0xe9588205,
+    0xe892822a, 0xe7cd824f, 0xe7078276, 0xe642829d, 0xe57d82c6, 0xe4b982f1,
+    0xe3f4831c, 0xe3308349, 0xe26d8377, 0xe1a983a6, 0xe0e683d6, 0xe0238407,
+    0xdf61843a, 0xde9e846e, 0xdddc84a3, 0xdd1b84d9, 0xdc598511, 0xdb998549,
+    0xdad88583, 0xda1885be, 0xd95885fa, 0xd8988637, 0xd7d98676, 0xd71b86b6,
+    0xd65c86f6, 0xd59e8738, 0xd4e1877b, 0xd42487c0, 0xd3678805, 0xd2ab884c,
+    0xd1ef8894, 0xd13488dd, 0xd0798927, 0xcfbe8972, 0xcf0489be, 0xce4b8a0c,
+    0xcd928a5a, 0xccd98aaa, 0xcc218afb, 0xcb698b4d, 0xcab28ba0, 0xc9fc8bf5,
+    0xc9468c4a, 0xc8908ca1, 0xc7db8cf8, 0xc7278d51, 0xc6738dab, 0xc5c08e06,
+    0xc50d8e62, 0xc45b8ebf, 0xc3a98f1d, 0xc2f88f7d, 0xc2488fdd, 0xc198903e,
+    0xc0e990a1, 0xc03a9105, 0xbf8c9169, 0xbedf91cf, 0xbe329236, 0xbd86929e,
+    0xbcda9307, 0xbc2f9371, 0xbb8593dc, 0xbadc9448, 0xba3394b5, 0xb98b9523,
+    0xb8e39592, 0xb83c9603, 0xb7969674, 0xb6f196e6, 0xb64c9759, 0xb5a897ce,
+    0xb5059843, 0xb46298b9, 0xb3c09930, 0xb31f99a9, 0xb27f9a22, 0xb1df9a9c,
+    0xb1409b17, 0xb0a29b94, 0xb0059c11, 0xaf689c8f, 0xaecc9d0e, 0xae319d8e,
+    0xad979e0f, 0xacfd9e91, 0xac659f14, 0xabcd9f98, 0xab36a01c, 0xaaa0a0a2,
+    0xaa0aa129, 0xa976a1b0, 0xa8e2a238, 0xa84fa2c2, 0xa7bda34c, 0xa72ca3d7,
+    0xa69ca463, 0xa60ca4f0, 0xa57ea57e, 0xa4f0a60c, 0xa463a69c, 0xa3d7a72c,
+    0xa34ca7bd, 0xa2c2a84f, 0xa238a8e2, 0xa1b0a976, 0xa129aa0a, 0xa0a2aaa0,
+    0xa01cab36, 0x9f98abcd, 0x9f14ac65, 0x9e91acfd, 0x9e0fad97, 0x9d8eae31,
+    0x9d0eaecc, 0x9c8faf68, 0x9c11b005, 0x9b94b0a2, 0x9b17b140, 0x9a9cb1df,
+    0x9a22b27f, 0x99a9b31f, 0x9930b3c0, 0x98b9b462, 0x9843b505, 0x97ceb5a8,
+    0x9759b64c, 0x96e6b6f1, 0x9674b796, 0x9603b83c, 0x9592b8e3, 0x9523b98b,
+    0x94b5ba33, 0x9448badc, 0x93dcbb85, 0x9371bc2f, 0x9307bcda, 0x929ebd86,
+    0x9236be32, 0x91cfbedf, 0x9169bf8c, 0x9105c03a, 0x90a1c0e9, 0x903ec198,
+    0x8fddc248, 0x8f7dc2f8, 0x8f1dc3a9, 0x8ebfc45b, 0x8e62c50d, 0x8e06c5c0,
+    0x8dabc673, 0x8d51c727, 0x8cf8c7db, 0x8ca1c890, 0x8c4ac946, 0x8bf5c9fc,
+    0x8ba0cab2, 0x8b4dcb69, 0x8afbcc21, 0x8aaaccd9, 0x8a5acd92, 0x8a0cce4b,
+    0x89becf04, 0x8972cfbe, 0x8927d079, 0x88ddd134, 0x8894d1ef, 0x884cd2ab,
+    0x8805d367, 0x87c0d424, 0x877bd4e1, 0x8738d59e, 0x86f6d65c, 0x86b6d71b,
+    0x8676d7d9, 0x8637d898, 0x85fad958, 0x85beda18, 0x8583dad8, 0x8549db99,
+    0x8511dc59, 0x84d9dd1b, 0x84a3dddc, 0x846ede9e, 0x843adf61, 0x8407e023,
+    0x83d6e0e6, 0x83a6e1a9, 0x8377e26d, 0x8349e330, 0x831ce3f4, 0x82f1e4b9,
+    0x82c6e57d, 0x829de642, 0x8276e707, 0x824fe7cd, 0x822ae892, 0x8205e958,
+    0x81e2ea1e, 0x81c1eae4, 0x81a0ebab, 0x8181ec71, 0x8163ed38, 0x8146edff,
+    0x812aeec6, 0x8110ef8d, 0x80f6f055, 0x80def11c, 0x80c8f1e4, 0x80b2f2ac,
+    0x809ef374, 0x808bf43c, 0x8079f505, 0x8068f5cd, 0x8059f695, 0x804bf75e,
+    0x803ef827, 0x8032f8ef, 0x8027f9b8, 0x801efa81, 0x8016fb4a, 0x800ffc13,
+    0x800afcdc, 0x8006fda5, 0x8002fe6e, 0x8001ff37,
+};
+
+/* Returns the multiplication of \conj{a} and {b}. */
+static inline int32_t mult(int32_t a, int32_t b)
+{
+#if defined(__arm__)
+    int32_t t = b;
+    __asm__("smuad  %0, %0, %1"          : "+r" (t) : "r" (a));
+    __asm__("smusdx %0, %0, %1"          : "+r" (b) : "r" (a));
+    __asm__("pkhtb  %0, %0, %1, ASR #16" : "+r" (t) : "r" (b));
+    return t;
+#else
+    return (((a >> 16) * (b >> 16) + (int16_t)a * (int16_t)b) & ~0xFFFF) |
+        ((((a >> 16) * (int16_t)b - (int16_t)a * (b >> 16)) >> 16) & 0xFFFF);
+#endif
+}
+
+static inline int32_t half(int32_t a)
+{
+#if defined(__arm__)
+    __asm__("shadd16 %0, %0, %1" : "+r" (a) : "r" (0));
+    return a;
+#else
+    return ((a >> 1) & ~0x8000) | (a & 0x8000);
+#endif
+}
+
+void fixed_fft(int n, int32_t *v)
+{
+    int scale = LOG_FFT_SIZE, i, p, r;
+
+    for (r = 0, i = 1; i < n; ++i) {
+        for (p = n; !(p & r); p >>= 1, r ^= p);
+        if (i < r) {
+            int32_t t = v[i];
+            v[i] = v[r];
+            v[r] = t;
+        }
+    }
+
+    for (p = 1; p < n; p <<= 1) {
+        --scale;
+
+        for (i = 0; i < n; i += p << 1) {
+            int32_t x = half(v[i]);
+            int32_t y = half(v[i + p]);
+            v[i] = x + y;
+            v[i + p] = x - y;
+        }
+
+        for (r = 1; r < p; ++r) {
+            int32_t w = MAX_FFT_SIZE / 4 - (r << scale);
+            i = w >> 31;
+            w = ((int32_t) twiddle[(w ^ i) - i]) ^ (i << 16);
+            for (i = r; i < n; i += p << 1) {
+                int32_t x = half(v[i]);
+                int32_t y = mult(w, v[i + p]);
+                v[i] = x - y;
+                v[i + p] = x + y;
+            }
+        }
+    }
+}
+
+void fixed_fft_real(int n, int32_t *v)
+{
+    int scale = LOG_FFT_SIZE, m = n >> 1, i;
+
+    fixed_fft(n, v);
+    for (i = 1; i <= n; i <<= 1, --scale);
+    v[0] = mult(~v[0], 0x80008000);
+    v[m] = half(v[m]);
+
+    for (i = 1; i < n >> 1; ++i) {
+        int32_t x = half(v[i]);
+        int32_t z = half(v[n - i]);
+        int32_t y = z - (x ^ 0xFFFF);
+        x = half(x + (z ^ 0xFFFF));
+        y = mult(y, ((int32_t) twiddle[i << scale]));
+        v[i] = x - y;
+        v[n - i] = (x + y) ^ 0xFFFF;
+    }
+}
diff --git a/media/audio_utils/format.c b/media/audio_utils/format.c
new file mode 100644
index 0000000..66b0a6d
--- /dev/null
+++ b/media/audio_utils/format.c
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/* #define LOG_NDEBUG 0 */
+#define LOG_TAG "audio_utils_format"
+
+#include <cutils/log.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/format.h>
+
+void memcpy_by_audio_format(void *dst, audio_format_t dst_format,
+        const void *src, audio_format_t src_format, size_t count)
+{
+    /* default cases for error falls through to fatal log below. */
+    if (dst_format == src_format) {
+        switch (dst_format) {
+        case AUDIO_FORMAT_PCM_16_BIT:
+        case AUDIO_FORMAT_PCM_FLOAT:
+        case AUDIO_FORMAT_PCM_8_BIT:
+        case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+        case AUDIO_FORMAT_PCM_32_BIT:
+        case AUDIO_FORMAT_PCM_8_24_BIT:
+            memcpy(dst, src, count * audio_bytes_per_sample(dst_format));
+            return;
+        default:
+            break;
+        }
+    }
+    switch (dst_format) {
+    case AUDIO_FORMAT_PCM_16_BIT:
+        switch (src_format) {
+        case AUDIO_FORMAT_PCM_FLOAT:
+            memcpy_to_i16_from_float((int16_t*)dst, (float*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_8_BIT:
+            memcpy_to_i16_from_u8((int16_t*)dst, (uint8_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+            memcpy_to_i16_from_p24((int16_t*)dst, (uint8_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_32_BIT:
+            memcpy_to_i16_from_i32((int16_t*)dst, (int32_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_8_24_BIT:
+            memcpy_to_i16_from_q8_23((int16_t*)dst, (int32_t*)src, count);
+            return;
+        default:
+            break;
+        }
+        break;
+    case AUDIO_FORMAT_PCM_FLOAT:
+        switch (src_format) {
+        case AUDIO_FORMAT_PCM_16_BIT:
+            memcpy_to_float_from_i16((float*)dst, (int16_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_8_BIT:
+            memcpy_to_float_from_u8((float*)dst, (uint8_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+            memcpy_to_float_from_p24((float*)dst, (uint8_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_32_BIT:
+            memcpy_to_float_from_i32((float*)dst, (int32_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_8_24_BIT:
+            memcpy_to_float_from_q8_23((float*)dst, (int32_t*)src, count);
+            return;
+        default:
+            break;
+        }
+        break;
+    case AUDIO_FORMAT_PCM_8_BIT:
+        switch (src_format) {
+        case AUDIO_FORMAT_PCM_16_BIT:
+            memcpy_to_u8_from_i16((uint8_t*)dst, (int16_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_FLOAT:
+            memcpy_to_u8_from_float((uint8_t*)dst, (float*)src, count);
+            return;
+        default:
+            break;
+        }
+        break;
+    case AUDIO_FORMAT_PCM_24_BIT_PACKED:
+        switch (src_format) {
+        case AUDIO_FORMAT_PCM_16_BIT:
+            memcpy_to_p24_from_i16((uint8_t*)dst, (int16_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_FLOAT:
+            memcpy_to_p24_from_float((uint8_t*)dst, (float*)src, count);
+            return;
+        default:
+            break;
+        }
+        break;
+    case AUDIO_FORMAT_PCM_32_BIT:
+        switch (src_format) {
+        case AUDIO_FORMAT_PCM_16_BIT:
+            memcpy_to_i32_from_i16((int32_t*)dst, (int16_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_FLOAT:
+            memcpy_to_i32_from_float((int32_t*)dst, (float*)src, count);
+            return;
+        default:
+            break;
+        }
+        break;
+    case AUDIO_FORMAT_PCM_8_24_BIT:
+        switch (src_format) {
+        case AUDIO_FORMAT_PCM_16_BIT:
+            memcpy_to_q8_23_from_i16((int32_t*)dst, (int16_t*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_FLOAT:
+            memcpy_to_q8_23_from_float_with_clamp((int32_t*)dst, (float*)src, count);
+            return;
+        case AUDIO_FORMAT_PCM_24_BIT_PACKED: {
+            memcpy_to_q8_23_from_p24((int32_t *)dst, (uint8_t *)src, count);
+            return;
+        }
+        default:
+            break;
+        }
+        break;
+    default:
+        break;
+    }
+    LOG_ALWAYS_FATAL("invalid src format %#x for dst format %#x",
+            src_format, dst_format);
+}
+
+size_t memcpy_by_index_array_initialization_from_channel_mask(int8_t *idxary, size_t arysize,
+        audio_channel_mask_t dst_channel_mask, audio_channel_mask_t src_channel_mask)
+{
+    const audio_channel_representation_t src_representation =
+            audio_channel_mask_get_representation(src_channel_mask);
+    const audio_channel_representation_t dst_representation =
+            audio_channel_mask_get_representation(dst_channel_mask);
+    const uint32_t src_bits = audio_channel_mask_get_bits(src_channel_mask);
+    const uint32_t dst_bits = audio_channel_mask_get_bits(dst_channel_mask);
+
+    switch (src_representation) {
+    case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+        switch (dst_representation) {
+        case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+            return memcpy_by_index_array_initialization(idxary, arysize,
+                    dst_bits, src_bits);
+        case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+            return memcpy_by_index_array_initialization_dst_index(idxary, arysize,
+                    dst_bits, src_bits);
+        default:
+            return 0;
+        }
+        break;
+    case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+        switch (dst_representation) {
+        case AUDIO_CHANNEL_REPRESENTATION_POSITION:
+            return memcpy_by_index_array_initialization_src_index(idxary, arysize,
+                    dst_bits, src_bits);
+        case AUDIO_CHANNEL_REPRESENTATION_INDEX:
+            return memcpy_by_index_array_initialization(idxary, arysize,
+                    dst_bits, src_bits);
+        default:
+            return 0;
+        }
+        break;
+    default:
+        return 0;
+    }
+}
diff --git a/media/audio_utils/include/audio_utils/channels.h b/media/audio_utils/include/audio_utils/channels.h
new file mode 100644
index 0000000..a967681
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/channels.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_CHANNELS_H
+#define ANDROID_AUDIO_CHANNELS_H
+
+__BEGIN_DECLS
+
+/*
+ * Expands or contracts sample data from one interleaved channel format to another.
+ * Expanded channels are filled with zeros and put at the end of each audio frame.
+ * Contracted channels are omitted from the end of each audio frame.
+ *   in_buff points to the buffer of samples
+ *   in_buff_channels Specifies the number of channels in the input buffer.
+ *   out_buff points to the buffer to receive converted samples.
+ *   out_buff_channels Specifies the number of channels in the output buffer.
+ *   sample_size_in_bytes Specifies the number of bytes per sample. 1, 2, 3, 4 are
+ *     currently valid.
+ *   num_in_bytes size of input buffer in BYTES
+ * returns
+ *   the number of BYTES of output data or 0 if an error occurs.
+ * NOTE
+ *   The out and sums buffers must either be completely separate (non-overlapping), or
+ *   they must both start at the same address. Partially overlapping buffers are not supported.
+ */
+size_t adjust_channels(const void* in_buff, size_t in_buff_chans,
+                       void* out_buff, size_t out_buff_chans,
+                       unsigned sample_size_in_bytes, size_t num_in_bytes);
+__END_DECLS
+
+#endif
diff --git a/media/audio_utils/include/audio_utils/echo_reference.h b/media/audio_utils/include/audio_utils/echo_reference.h
new file mode 100644
index 0000000..15edda4
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/echo_reference.h
@@ -0,0 +1,66 @@
+/*
+** Copyright 2011, The Android Open-Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_ECHO_REFERENCE_H
+#define ANDROID_ECHO_REFERENCE_H
+
+#include <stdint.h>
+#include <sys/time.h>
+
+__BEGIN_DECLS
+
+/* Buffer descriptor used by read() and write() methods, including the time stamp and delay. */
+struct echo_reference_buffer {
+    void *raw;                  // pointer to audio frame
+    size_t frame_count;         // number of frames in buffer
+    int32_t delay_ns;           // delay for this buffer (see comment below)
+    struct timespec time_stamp; // time stamp for this buffer (see comment below)
+                                // default ALSA gettimeofday() format
+};
+/**
+ * + as input:
+ *      - delay_ns is the delay introduced by playback buffers
+ *      - time_stamp is the time stamp corresponding to the delay calculation
+ * + as output:
+ *      unused
+ * when used for EchoReference::read():
+ * + as input:
+ *      - delay_ns is the delay introduced by capture buffers
+ *      - time_stamp is the time stamp corresponding to the delay calculation
+ * + as output:
+ *      - delay_ns is the delay between the returned frames and the capture time derived from
+ *      delay and time stamp indicated as input. This delay is to be communicated to the AEC.
+ *      - frame_count is updated with the actual number of frames returned
+ */
+
+struct echo_reference_itfe {
+    int (*read)(struct echo_reference_itfe *echo_reference, struct echo_reference_buffer *buffer);
+    int (*write)(struct echo_reference_itfe *echo_reference, struct echo_reference_buffer *buffer);
+};
+
+int create_echo_reference(audio_format_t rdFormat,
+                          uint32_t rdChannelCount,
+                          uint32_t rdSamplingRate,
+                          audio_format_t wrFormat,
+                          uint32_t wrChannelCount,
+                          uint32_t wrSamplingRate,
+                          struct echo_reference_itfe **);
+
+void release_echo_reference(struct echo_reference_itfe *echo_reference);
+
+__END_DECLS
+
+#endif // ANDROID_ECHO_REFERENCE_H
diff --git a/media/audio_utils/include/audio_utils/fifo.h b/media/audio_utils/include/audio_utils/fifo.h
new file mode 100644
index 0000000..ba4c5c6
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/fifo.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_FIFO_H
+#define ANDROID_AUDIO_FIFO_H
+
+#include <stdlib.h>
+
+// FIXME use atomic_int_least32_t and new atomic operations instead of legacy Android ones
+// #include <stdatomic.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Single writer, single reader non-blocking FIFO.
+// Writer and reader must be in same process.
+
+// No user-serviceable parts within.
+struct audio_utils_fifo {
+    // These fields are const after initialization
+    size_t     mFrameCount;   // max number of significant frames to be stored in the FIFO > 0
+    size_t     mFrameCountP2; // roundup(mFrameCount)
+    size_t     mFudgeFactor;  // mFrameCountP2 - mFrameCount, the number of "wasted" frames after
+                              // the end of mBuffer.  Only the indices are wasted, not any memory.
+    size_t     mFrameSize;    // size of each frame in bytes
+    void      *mBuffer;       // pointer to caller-allocated buffer of size mFrameCount frames
+
+    volatile int32_t mFront; // frame index of first frame slot available to read, or read index
+    volatile int32_t mRear;  // frame index of next frame slot available to write, or write index
+};
+
+// Initialize a FIFO object.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+//  frameCount  Max number of significant frames to be stored in the FIFO > 0.
+//              If writes and reads always use the same count, and that count is a divisor of
+//              frameCount, then the writes and reads will never do a partial transfer.
+//  frameSize   Size of each frame in bytes.
+//  buffer      Pointer to a caller-allocated buffer of frameCount frames.
+void audio_utils_fifo_init(struct audio_utils_fifo *fifo, size_t frameCount, size_t frameSize,
+        void *buffer);
+
+// De-initialize a FIFO object.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+void audio_utils_fifo_deinit(struct audio_utils_fifo *fifo);
+
+// Write to FIFO.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+//  buffer      Pointer to source buffer containing 'count' frames of data.
+// Returns actual number of frames written <= count.
+// The actual transfer count may be zero if the FIFO is full,
+// or partial if the FIFO was almost full.
+// A negative return value indicates an error.  Currently there are no errors defined.
+ssize_t audio_utils_fifo_write(struct audio_utils_fifo *fifo, const void *buffer, size_t count);
+
+// Read from FIFO.
+// Input parameters:
+//  fifo        Pointer to the FIFO object.
+//  buffer      Pointer to destination buffer to be filled with up to 'count' frames of data.
+// Returns actual number of frames read <= count.
+// The actual transfer count may be zero if the FIFO is empty,
+// or partial if the FIFO was almost empty.
+// A negative return value indicates an error.  Currently there are no errors defined.
+ssize_t audio_utils_fifo_read(struct audio_utils_fifo *fifo, void *buffer, size_t count);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // !ANDROID_AUDIO_FIFO_H
diff --git a/media/audio_utils/include/audio_utils/fixedfft.h b/media/audio_utils/include/audio_utils/fixedfft.h
new file mode 100644
index 0000000..5903619
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/fixedfft.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_FIXEDFFT_H
+#define ANDROID_AUDIO_FIXEDFFT_H
+
+#include <stdint.h>
+#include <sys/cdefs.h>
+
+__BEGIN_DECLS
+
+/* See description in fixedfft.cpp */
+extern void fixed_fft_real(int n, int32_t *v);
+
+__END_DECLS
+
+#endif  // ANDROID_AUDIO_FIXEDFFT_H
diff --git a/media/audio_utils/include/audio_utils/format.h b/media/audio_utils/include/audio_utils/format.h
new file mode 100644
index 0000000..7ac6539
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/format.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_FORMAT_H
+#define ANDROID_AUDIO_FORMAT_H
+
+#include <stdint.h>
+#include <sys/cdefs.h>
+#include <system/audio.h>
+
+__BEGIN_DECLS
+
+/* Copy buffers with conversion between buffer sample formats.
+ *
+ *  dst        Destination buffer
+ *  dst_format Destination buffer format
+ *  src        Source buffer
+ *  src_format Source buffer format
+ *  count      Number of samples to copy
+ *
+ * Allowed format conversions are given by either case 1 or 2 below:
+ *
+ * 1) One of src_format or dst_format is AUDIO_FORMAT_PCM_16_BIT or
+ * AUDIO_FORMAT_PCM_FLOAT, and the other format type is one of:
+ *
+ * AUDIO_FORMAT_PCM_16_BIT
+ * AUDIO_FORMAT_PCM_FLOAT
+ * AUDIO_FORMAT_PCM_8_BIT
+ * AUDIO_FORMAT_PCM_24_BIT_PACKED
+ * AUDIO_FORMAT_PCM_32_BIT
+ * AUDIO_FORMAT_PCM_8_24_BIT
+ *
+ * 2) Both dst_format and src_format are identical and of the list given
+ * in (1). This is a straight copy.
+ *
+ * The destination and source buffers must be completely separate if the destination
+ * format size is larger than the source format size. These routines call functions
+ * in primitives.h, so descriptions of detailed behavior can be reviewed there.
+ *
+ * Logs a fatal error if dst or src format is not allowed by the conversion rules above.
+ */
+void memcpy_by_audio_format(void *dst, audio_format_t dst_format,
+        const void *src, audio_format_t src_format, size_t count);
+
+
+/* This function creates an index array for converting audio data with different
+ * channel position and index masks, used by memcpy_by_index_array().
+ * Returns the number of array elements required.
+ * This may be greater than idxcount, so the return value should be checked
+ * if idxary size is less than 32. Returns zero if the input masks are unrecognized.
+ *
+ * Note that idxary is a caller allocated array
+ * of at least as many channels as present in the dst_mask.
+ *
+ * Parameters:
+ *  idxary      Updated array of indices of channels in the src frame for the dst frame
+ *  idxcount    Number of caller allocated elements in idxary
+ *  dst_mask    Bit mask corresponding to destination channels present
+ *  src_mask    Bit mask corresponding to source channels present
+ */
+size_t memcpy_by_index_array_initialization_from_channel_mask(int8_t *idxary, size_t arysize,
+        audio_channel_mask_t dst_channel_mask, audio_channel_mask_t src_channel_mask);
+
+__END_DECLS
+
+#endif  // ANDROID_AUDIO_FORMAT_H
diff --git a/media/audio_utils/include/audio_utils/minifloat.h b/media/audio_utils/include/audio_utils/minifloat.h
new file mode 100644
index 0000000..1b664fc
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/minifloat.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_MINIFLOAT_H
+#define ANDROID_AUDIO_MINIFLOAT_H
+
+#include <stdint.h>
+#include <sys/cdefs.h>
+
+__BEGIN_DECLS
+
+/* A single gain expressed as minifloat */
+typedef uint16_t gain_minifloat_t;
+
+/* A pair of gain_minifloat_t packed into a single word */
+typedef uint32_t gain_minifloat_packed_t;
+
+/* The nominal range of a gain, expressed as a float */
+#define GAIN_FLOAT_ZERO         0.0f
+#define GAIN_FLOAT_UNITY        1.0f
+
+/* Unity gain expressed as a minifloat */
+#define GAIN_MINIFLOAT_UNITY    0xE000
+
+/* Pack a pair of gain_mini_float_t into a combined gain_minifloat_packed_t */
+static inline gain_minifloat_packed_t gain_minifloat_pack(gain_minifloat_t left,
+        gain_minifloat_t right)
+{
+    return (right << 16) | left;
+}
+
+/* Unpack a gain_minifloat_packed_t into the two gain_minifloat_t components */
+static inline gain_minifloat_t gain_minifloat_unpack_left(gain_minifloat_packed_t packed)
+{
+    return packed & 0xFFFF;
+}
+
+static inline gain_minifloat_t gain_minifloat_unpack_right(gain_minifloat_packed_t packed)
+{
+    return packed >> 16;
+}
+
+/* A pair of unity gains expressed as a gain_minifloat_packed_t */
+#define GAIN_MINIFLOAT_PACKED_UNITY gain_minifloat_pack(GAIN_MINIFLOAT_UNITY, GAIN_MINIFLOAT_UNITY)
+
+/* Convert a float to the internal representation used for gains.
+ * The nominal range [0.0, 1.0], but the hard range is [0.0, 2.0).
+ * Negative and underflow values are converted to 0.0,
+ * and values larger than the hard maximum are truncated to the hard maximum.
+ *
+ * Minifloats are ordered, and standard comparisons may be used between them
+ * in the gain_minifloat_t representation.
+ *
+ * Details on internal representation of gains, based on mini-floats:
+ * The nominal maximum is 1.0 and the hard maximum is 1 ULP less than 2.0, or +6 dB.
+ * The minimum non-zero value is approximately 1.9e-6 or -114 dB.
+ * Negative numbers, infinity, and NaN are not supported.
+ * There are 13 significand bits specified, 1 implied hidden bit, 3 exponent bits,
+ * and no sign bit.  Denormals are supported.
+ */
+gain_minifloat_t gain_from_float(float f);
+
+/* Convert the internal representation used for gains to float */
+float float_from_gain(gain_minifloat_t gain);
+
+__END_DECLS
+
+#endif  // ANDROID_AUDIO_MINIFLOAT_H
diff --git a/media/audio_utils/include/audio_utils/primitives.h b/media/audio_utils/include/audio_utils/primitives.h
new file mode 100644
index 0000000..5fde7d4
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/primitives.h
@@ -0,0 +1,959 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_PRIMITIVES_H
+#define ANDROID_AUDIO_PRIMITIVES_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <sys/cdefs.h>
+
+__BEGIN_DECLS
+
+/* The memcpy_* conversion routines are designed to work in-place on same dst as src
+ * buffers only if the types shrink on copy, with the exception of memcpy_to_i16_from_u8().
+ * This allows the loops to go upwards for faster cache access (and may be more flexible
+ * for future optimization later).
+ */
+
+/**
+ * Dither and clamp pairs of 32-bit input samples (sums) to 16-bit output samples (out).
+ * Each 32-bit input sample can be viewed as a signed fixed-point Q19.12 of which the
+ * .12 fraction bits are dithered and the 19 integer bits are clamped to signed 16 bits.
+ * Alternatively the input can be viewed as Q4.27, of which the lowest .12 of the fraction
+ * is dithered and the remaining fraction is converted to the output Q.15, with clamping
+ * on the 4 integer guard bits.
+ *
+ * For interleaved stereo, c is the number of sample pairs,
+ * and out is an array of interleaved pairs of 16-bit samples per channel.
+ * For mono, c is the number of samples / 2, and out is an array of 16-bit samples.
+ * The name "dither" is a misnomer; the current implementation does not actually dither
+ * but uses truncation.  This may change.
+ * The out and sums buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void ditherAndClamp(int32_t* out, const int32_t *sums, size_t c);
+
+/* Expand and copy samples from unsigned 8-bit offset by 0x80 to signed 16-bit.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_i16_from_u8(int16_t *dst, const uint8_t *src, size_t count);
+
+/* Shrink and copy samples from signed 16-bit to unsigned 8-bit offset by 0x80.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ * The conversion is done by truncation, without dithering, so it loses resolution.
+ */
+void memcpy_to_u8_from_i16(uint8_t *dst, const int16_t *src, size_t count);
+
+/* Copy samples from float to unsigned 8-bit offset by 0x80.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ * The conversion is done by truncation, without dithering, so it loses resolution.
+ */
+void memcpy_to_u8_from_float(uint8_t *dst, const float *src, size_t count);
+
+/* Shrink and copy samples from signed 32-bit fixed-point Q0.31 to signed 16-bit Q0.15.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ * The conversion is done by truncation, without dithering, so it loses resolution.
+ */
+void memcpy_to_i16_from_i32(int16_t *dst, const int32_t *src, size_t count);
+
+/* Shrink and copy samples from single-precision floating-point to signed 16-bit.
+ * Each float should be in the range -1.0 to 1.0.  Values outside that range are clamped,
+ * refer to clamp16_from_float().
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ * The conversion is done by truncation, without dithering, so it loses resolution.
+ */
+void memcpy_to_i16_from_float(int16_t *dst, const float *src, size_t count);
+
+/* Copy samples from signed fixed-point 32-bit Q4.27 to single-precision floating-point.
+ * The nominal output float range is [-1.0, 1.0] if the fixed-point range is
+ * [0xf8000000, 0x07ffffff].  The full float range is [-16.0, 16.0].  Note the closed range
+ * at 1.0 and 16.0 is due to rounding on conversion to float. See float_from_q4_27() for details.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_float_from_q4_27(float *dst, const int32_t *src, size_t count);
+
+/* Copy samples from signed fixed-point 16 bit Q0.15 to single-precision floating-point.
+ * The output float range is [-1.0, 1.0) for the fixed-point range [0x8000, 0x7fff].
+ * No rounding is needed as the representation is exact.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_float_from_i16(float *dst, const int16_t *src, size_t count);
+
+/* Copy samples from unsigned fixed-point 8 bit to single-precision floating-point.
+ * The output float range is [-1.0, 1.0) for the fixed-point range [0x00, 0xFF].
+ * No rounding is needed as the representation is exact.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_float_from_u8(float *dst, const uint8_t *src, size_t count);
+
+/* Copy samples from signed fixed-point packed 24 bit Q0.23 to single-precision floating-point.
+ * The packed 24 bit input is stored in native endian format in a uint8_t byte array.
+ * The output float range is [-1.0, 1.0) for the fixed-point range [0x800000, 0x7fffff].
+ * No rounding is needed as the representation is exact.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_float_from_p24(float *dst, const uint8_t *src, size_t count);
+
+/* Copy samples from signed fixed-point packed 24 bit Q0.23 to signed fixed point 16 bit Q0.15.
+ * The packed 24 bit output is stored in native endian format in a uint8_t byte array.
+ * The data is truncated without rounding.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_i16_from_p24(int16_t *dst, const uint8_t *src, size_t count);
+
+/* Copy samples from signed fixed-point packed 24 bit Q0.23 to signed fixed-point 32-bit Q0.31.
+ * The packed 24 bit input is stored in native endian format in a uint8_t byte array.
+ * The output data range is [0x80000000, 0x7fffff00] at intervals of 0x100.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_i32_from_p24(int32_t *dst, const uint8_t *src, size_t count);
+
+/* Copy samples from signed fixed point 16 bit Q0.15 to signed fixed-point packed 24 bit Q0.23.
+ * The packed 24 bit output is assumed to be a native-endian uint8_t byte array.
+ * The output data range is [0x800000, 0x7fff00] (not full).
+ * Nevertheless there is no DC offset on the output, if the input has no DC offset.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_p24_from_i16(uint8_t *dst, const int16_t *src, size_t count);
+
+/* Copy samples from single-precision floating-point to signed fixed-point packed 24 bit Q0.23.
+ * The packed 24 bit output is assumed to be a native-endian uint8_t byte array.
+ * The data is clamped and rounded to nearest, ties away from zero. See clamp24_from_float()
+ * for details.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_p24_from_float(uint8_t *dst, const float *src, size_t count);
+
+/* Copy samples from signed fixed-point 32-bit Q8.23 to signed fixed-point packed 24 bit Q0.23.
+ * The packed 24 bit output is assumed to be a native-endian uint8_t byte array.
+ * The data is clamped to the range is [0x800000, 0x7fffff].
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_p24_from_q8_23(uint8_t *dst, const int32_t *src, size_t count);
+
+/* Shrink and copy samples from signed 32-bit fixed-point Q0.31
+ * to signed fixed-point packed 24 bit Q0.23.
+ * The packed 24 bit output is assumed to be a native-endian uint8_t byte array.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ * The conversion is done by truncation, without dithering, so it loses resolution.
+ */
+void memcpy_to_p24_from_i32(uint8_t *dst, const int32_t *src, size_t count);
+
+/* Copy samples from signed fixed point 16-bit Q0.15 to signed fixed-point 32-bit Q8.23.
+ * The output data range is [0xff800000, 0x007fff00] at intervals of 0x100.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_q8_23_from_i16(int32_t *dst, const int16_t *src, size_t count);
+
+/* Copy samples from single-precision floating-point to signed fixed-point 32-bit Q8.23.
+ * This copy will clamp the Q8.23 representation to [0xff800000, 0x007fffff] even though there
+ * are guard bits available. Fractional lsb is rounded to nearest, ties away from zero.
+ * See clamp24_from_float() for details.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_q8_23_from_float_with_clamp(int32_t *dst, const float *src, size_t count);
+
+/* Copy samples from signed fixed point packed 24-bit Q0.23 to signed fixed-point 32-bit Q8.23.
+ * The output data range is [0xff800000, 0x007fffff].
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_q8_23_from_p24(int32_t *dst, const uint8_t *src, size_t count);
+
+/* Copy samples from single-precision floating-point to signed fixed-point 32-bit Q4.27.
+ * The conversion will use the full available Q4.27 range, including guard bits.
+ * Fractional lsb is rounded to nearest, ties away from zero.
+ * See clampq4_27_from_float() for details.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_q4_27_from_float(int32_t *dst, const float *src, size_t count);
+
+/* Copy samples from signed fixed-point 32-bit Q8.23 to signed fixed point 16-bit Q0.15.
+ * The data is clamped, and truncated without rounding.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_i16_from_q8_23(int16_t *dst, const int32_t *src, size_t count);
+
+/* Copy samples from signed fixed-point 32-bit Q8.23 to single-precision floating-point.
+ * The nominal output float range is [-1.0, 1.0) for the fixed-point
+ * range [0xff800000, 0x007fffff]. The maximum output float range is [-256.0, 256.0).
+ * No rounding is needed as the representation is exact for nominal values.
+ * Rounding for overflow values is to nearest, ties to even.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_float_from_q8_23(float *dst, const int32_t *src, size_t count);
+
+/* Copy samples from signed fixed point 16-bit Q0.15 to signed fixed-point 32-bit Q0.31.
+ * The output data range is [0x80000000, 0x7fff0000] at intervals of 0x10000.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must be completely separate.
+ */
+void memcpy_to_i32_from_i16(int32_t *dst, const int16_t *src, size_t count);
+
+/* Copy samples from single-precision floating-point to signed fixed-point 32-bit Q0.31.
+ * If rounding is needed on truncation, the fractional lsb is rounded to nearest,
+ * ties away from zero. See clamp32_from_float() for details.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_i32_from_float(int32_t *dst, const float *src, size_t count);
+
+/* Copy samples from signed fixed-point 32-bit Q0.31 to single-precision floating-point.
+ * The float range is [-1.0, 1.0] for the fixed-point range [0x80000000, 0x7fffffff].
+ * Rounding is done according to float_from_i32().
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of samples to copy
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_float_from_i32(float *dst, const int32_t *src, size_t count);
+
+/* Downmix pairs of interleaved stereo input 16-bit samples to mono output 16-bit samples.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of stereo frames to downmix
+ * The destination and source buffers must be completely separate (non-overlapping).
+ * The current implementation truncates the mean rather than dither, but this may change.
+ */
+void downmix_to_mono_i16_from_stereo_i16(int16_t *dst, const int16_t *src, size_t count);
+
+/* Upmix mono input 16-bit samples to pairs of interleaved stereo output 16-bit samples by
+ * duplicating.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of mono samples to upmix
+ * The destination and source buffers must be completely separate (non-overlapping).
+ */
+void upmix_to_stereo_i16_from_mono_i16(int16_t *dst, const int16_t *src, size_t count);
+
+/* Downmix pairs of interleaved stereo input float samples to mono output float samples
+ * by averaging the stereo pair together.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of stereo frames to downmix
+ * The destination and source buffers must be completely separate (non-overlapping),
+ * or they must both start at the same address.
+ */
+void downmix_to_mono_float_from_stereo_float(float *dst, const float *src, size_t count);
+
+/* Upmix mono input float samples to pairs of interleaved stereo output float samples by
+ * duplicating.
+ * Parameters:
+ *  dst     Destination buffer
+ *  src     Source buffer
+ *  count   Number of mono samples to upmix
+ * The destination and source buffers must be completely separate (non-overlapping).
+ */
+void upmix_to_stereo_float_from_mono_float(float *dst, const float *src, size_t count);
+
+/* Return the total number of non-zero 32-bit samples */
+size_t nonZeroMono32(const int32_t *samples, size_t count);
+
+/* Return the total number of non-zero 16-bit samples */
+size_t nonZeroMono16(const int16_t *samples, size_t count);
+
+/* Return the total number of non-zero stereo frames, where a frame is considered non-zero
+ * if either of its constituent 32-bit samples is non-zero
+ */
+size_t nonZeroStereo32(const int32_t *frames, size_t count);
+
+/* Return the total number of non-zero stereo frames, where a frame is considered non-zero
+ * if either of its constituent 16-bit samples is non-zero
+ */
+size_t nonZeroStereo16(const int16_t *frames, size_t count);
+
+/* Copy frames, selecting source samples based on a source channel mask to fit
+ * the destination channel mask. Unmatched channels in the destination channel mask
+ * are zero filled. Unmatched channels in the source channel mask are dropped.
+ * Channels present in the channel mask are represented by set bits in the
+ * uint32_t value and are matched without further interpretation.
+ * Parameters:
+ *  dst         Destination buffer
+ *  dst_mask    Bit mask corresponding to destination channels present
+ *  src         Source buffer
+ *  src_mask    Bit mask corresponding to source channels present
+ *  sample_size Size of each sample in bytes.  Must be 1, 2, 3, or 4.
+ *  count       Number of frames to copy
+ * The destination and source buffers must be completely separate (non-overlapping).
+ * If the sample size is not in range, the function will abort.
+ */
+void memcpy_by_channel_mask(void *dst, uint32_t dst_mask,
+        const void *src, uint32_t src_mask, size_t sample_size, size_t count);
+
+/* Copy frames, selecting source samples based on an index array (idxary).
+ * The idxary[] consists of dst_channels number of elements.
+ * The ith element if idxary[] corresponds the ith destination channel.
+ * A non-negative value is the channel index in the source frame.
+ * A negative index (-1) represents filling with 0.
+ *
+ * Example: Swapping L and R channels for stereo streams
+ * idxary[0] = 1;
+ * idxary[1] = 0;
+ *
+ * Example: Copying a mono source to the front center 5.1 channel
+ * idxary[0] = -1;
+ * idxary[1] = -1;
+ * idxary[2] = 0;
+ * idxary[3] = -1;
+ * idxary[4] = -1;
+ * idxary[5] = -1;
+ *
+ * This copy allows swizzling of channels or replication of channels.
+ *
+ * Parameters:
+ *  dst           Destination buffer
+ *  dst_channels  Number of destination channels per frame
+ *  src           Source buffer
+ *  src_channels  Number of source channels per frame
+ *  idxary        Array of indices representing channels in the source frame
+ *  sample_size   Size of each sample in bytes.  Must be 1, 2, 3, or 4.
+ *  count         Number of frames to copy
+ * The destination and source buffers must be completely separate (non-overlapping).
+ * If the sample size is not in range, the function will abort.
+ */
+void memcpy_by_index_array(void *dst, uint32_t dst_channels,
+        const void *src, uint32_t src_channels,
+        const int8_t *idxary, size_t sample_size, size_t count);
+
+/* Prepares an index array (idxary) from channel masks, which can be later
+ * used by memcpy_by_index_array(). Returns the number of array elements required.
+ * This may be greater than idxcount, so the return value should be checked
+ * if idxary size is less than 32. Note that idxary is a caller allocated array
+ * of at least as many channels as present in the dst_mask.
+ * Channels present in the channel mask are represented by set bits in the
+ * uint32_t value and are matched without further interpretation.
+ *
+ * This function is typically used for converting audio data with different
+ * channel position masks.
+ *
+ * Parameters:
+ *  idxary      Updated array of indices of channels in the src frame for the dst frame
+ *  idxcount    Number of caller allocated elements in idxary
+ *  dst_mask    Bit mask corresponding to destination channels present
+ *  src_mask    Bit mask corresponding to source channels present
+ */
+size_t memcpy_by_index_array_initialization(int8_t *idxary, size_t idxcount,
+        uint32_t dst_mask, uint32_t src_mask);
+
+/* Prepares an index array (idxary) from channel masks, which can be later
+ * used by memcpy_by_index_array(). Returns the number of array elements required.
+ *
+ * For a source channel index mask, the source channels will map to the destination
+ * channels as if counting the set bits in dst_mask in order from lsb to msb
+ * (zero bits are ignored). The ith bit of the src_mask corresponds to the
+ * ith SET bit of dst_mask and the ith destination channel.  Hence, a zero ith
+ * bit of the src_mask indicates that the ith destination channel plays silence.
+ *
+ * Parameters:
+ *  idxary      Updated array of indices of channels in the src frame for the dst frame
+ *  idxcount    Number of caller allocated elements in idxary
+ *  dst_mask    Bit mask corresponding to destination channels present
+ *  src_mask    Bit mask corresponding to source channels present
+ */
+size_t memcpy_by_index_array_initialization_src_index(int8_t *idxary, size_t idxcount,
+        uint32_t dst_mask, uint32_t src_mask);
+
+/* Prepares an index array (idxary) from channel mask bits, which can be later
+ * used by memcpy_by_index_array(). Returns the number of array elements required.
+ *
+ * This initialization is for a destination channel index mask from a positional
+ * source mask.
+ *
+ * For an destination channel index mask, the input channels will map
+ * to the destination channels, with the ith SET bit in the source bits corresponding
+ * to the ith bit in the destination bits. If there is a zero bit in the middle
+ * of set destination bits (unlikely), the corresponding source channel will
+ * be dropped.
+ *
+ * Parameters:
+ *  idxary      Updated array of indices of channels in the src frame for the dst frame
+ *  idxcount    Number of caller allocated elements in idxary
+ *  dst_mask    Bit mask corresponding to destination channels present
+ *  src_mask    Bit mask corresponding to source channels present
+ */
+size_t memcpy_by_index_array_initialization_dst_index(int8_t *idxary, size_t idxcount,
+        uint32_t dst_mask, uint32_t src_mask);
+
+/**
+ * Clamp (aka hard limit or clip) a signed 32-bit sample to 16-bit range.
+ */
+static inline int16_t clamp16(int32_t sample)
+{
+    if ((sample>>15) ^ (sample>>31))
+        sample = 0x7FFF ^ (sample>>31);
+    return sample;
+}
+
+/*
+ * Convert a IEEE 754 single precision float [-1.0, 1.0) to int16_t [-32768, 32767]
+ * with clamping.  Note the open bound at 1.0, values within 1/65536 of 1.0 map
+ * to 32767 instead of 32768 (early clamping due to the smaller positive integer subrange).
+ *
+ * Values outside the range [-1.0, 1.0) are properly clamped to -32768 and 32767,
+ * including -Inf and +Inf. NaN will generally be treated either as -32768 or 32767,
+ * depending on the sign bit inside NaN (whose representation is not unique).
+ * Nevertheless, strictly speaking, NaN behavior should be considered undefined.
+ *
+ * Rounding of 0.5 lsb is to even (default for IEEE 754).
+ */
+static inline int16_t clamp16_from_float(float f)
+{
+    /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
+     * floating point significand. The normal shift is 3<<22, but the -15 offset
+     * is used to multiply by 32768.
+     */
+    static const float offset = (float)(3 << (22 - 15));
+    /* zero = (0x10f << 22) =  0x43c00000 (not directly used) */
+    static const int32_t limneg = (0x10f << 22) /*zero*/ - 32768; /* 0x43bf8000 */
+    static const int32_t limpos = (0x10f << 22) /*zero*/ + 32767; /* 0x43c07fff */
+
+    union {
+        float f;
+        int32_t i;
+    } u;
+
+    u.f = f + offset; /* recenter valid range */
+    /* Now the valid range is represented as integers between [limneg, limpos].
+     * Clamp using the fact that float representation (as an integer) is an ordered set.
+     */
+    if (u.i < limneg)
+        u.i = -32768;
+    else if (u.i > limpos)
+        u.i = 32767;
+    return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+}
+
+/*
+ * Convert a IEEE 754 single precision float [-1.0, 1.0) to uint8_t [0, 0xff]
+ * with clamping.  Note the open bound at 1.0, values within 1/128 of 1.0 map
+ * to 255 instead of 256 (early clamping due to the smaller positive integer subrange).
+ *
+ * Values outside the range [-1.0, 1.0) are properly clamped to 0 and 255,
+ * including -Inf and +Inf. NaN will generally be treated either as 0 or 255,
+ * depending on the sign bit inside NaN (whose representation is not unique).
+ * Nevertheless, strictly speaking, NaN behavior should be considered undefined.
+ *
+ * Rounding of 0.5 lsb is to even (default for IEEE 754).
+ */
+static inline uint8_t clamp8_from_float(float f)
+{
+    /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
+     * floating point significand. The normal shift is 3<<22, but the -7 offset
+     * is used to multiply by 128.
+     */
+    static const float offset = (float)((3 << (22 - 7)) + 1 /* to cancel -1.0 */);
+    /* zero = (0x11f << 22) =  0x47c00000 */
+    static const int32_t limneg = (0x11f << 22) /*zero*/;
+    static const int32_t limpos = (0x11f << 22) /*zero*/ + 255; /* 0x47c000ff */
+
+    union {
+        float f;
+        int32_t i;
+    } u;
+
+    u.f = f + offset; /* recenter valid range */
+    /* Now the valid range is represented as integers between [limneg, limpos].
+     * Clamp using the fact that float representation (as an integer) is an ordered set.
+     */
+    if (u.i < limneg)
+        return 0;
+    if (u.i > limpos)
+        return 255;
+    return u.i; /* Return lower 8 bits, the part of interest in the significand. */
+}
+
+/* Convert a single-precision floating point value to a Q0.23 integer value, stored in a
+ * 32 bit signed integer (technically stored as Q8.23, but clamped to Q0.23).
+ *
+ * Rounds to nearest, ties away from 0.
+ *
+ * Values outside the range [-1.0, 1.0) are properly clamped to -8388608 and 8388607,
+ * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
+ * depending on hardware and future implementation of this function.
+ */
+static inline int32_t clamp24_from_float(float f)
+{
+    static const float scale = (float)(1 << 23);
+    static const float limpos = 0x7fffff / scale;
+    static const float limneg = -0x800000 / scale;
+
+    if (f <= limneg) {
+        return -0x800000;
+    } else if (f >= limpos) {
+        return 0x7fffff;
+    }
+    f *= scale;
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+/* Convert a signed fixed-point 32-bit Q8.23 value to a Q0.23 integer value,
+ * stored in a 32-bit signed integer (technically stored as Q8.23, but clamped to Q0.23).
+ *
+ * Values outside the range [-0x800000, 0x7fffff] are clamped to that range.
+ */
+static inline int32_t clamp24_from_q8_23(int32_t ival)
+{
+    static const int32_t limpos = 0x7fffff;
+    static const int32_t limneg = -0x800000;
+    if (ival < limneg) {
+        return limneg;
+    } else if (ival > limpos) {
+        return limpos;
+    } else {
+        return ival;
+    }
+}
+
+/* Convert a single-precision floating point value to a Q4.27 integer value.
+ * Rounds to nearest, ties away from 0.
+ *
+ * Values outside the range [-16.0, 16.0) are properly clamped to -2147483648 and 2147483647,
+ * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
+ * depending on hardware and future implementation of this function.
+ */
+static inline int32_t clampq4_27_from_float(float f)
+{
+    static const float scale = (float)(1UL << 27);
+    static const float limpos = 16.;
+    static const float limneg = -16.;
+
+    if (f <= limneg) {
+        return -0x80000000; /* or 0x80000000 */
+    } else if (f >= limpos) {
+        return 0x7fffffff;
+    }
+    f *= scale;
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+/* Convert a single-precision floating point value to a Q0.31 integer value.
+ * Rounds to nearest, ties away from 0.
+ *
+ * Values outside the range [-1.0, 1.0) are properly clamped to -2147483648 and 2147483647,
+ * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
+ * depending on hardware and future implementation of this function.
+ */
+static inline int32_t clamp32_from_float(float f)
+{
+    static const float scale = (float)(1UL << 31);
+    static const float limpos = 1.;
+    static const float limneg = -1.;
+
+    if (f <= limneg) {
+        return -0x80000000; /* or 0x80000000 */
+    } else if (f >= limpos) {
+        return 0x7fffffff;
+    }
+    f *= scale;
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+/* Convert a signed fixed-point 32-bit Q4.27 value to single-precision floating-point.
+ * The nominal output float range is [-1.0, 1.0] if the fixed-point range is
+ * [0xf8000000, 0x07ffffff].  The full float range is [-16.0, 16.0].
+ *
+ * Note the closed range at 1.0 and 16.0 is due to rounding on conversion to float.
+ * In more detail: if the fixed-point integer exceeds 24 bit significand of single
+ * precision floating point, the 0.5 lsb in the significand conversion will round
+ * towards even, as per IEEE 754 default.
+ */
+static inline float float_from_q4_27(int32_t ival)
+{
+    /* The scale factor is the reciprocal of the fractional bits.
+     *
+     * Since the scale factor is a power of 2, the scaling is exact, and there
+     * is no rounding due to the multiplication - the bit pattern is preserved.
+     * However, there may be rounding due to the fixed-point to float conversion,
+     * as described above.
+     */
+    static const float scale = 1. / (float)(1UL << 27);
+
+    return ival * scale;
+}
+
+/* Convert an unsigned fixed-point 32-bit U4.28 value to single-precision floating-point.
+ * The nominal output float range is [0.0, 1.0] if the fixed-point range is
+ * [0x00000000, 0x10000000].  The full float range is [0.0, 16.0].
+ *
+ * Note the closed range at 1.0 and 16.0 is due to rounding on conversion to float.
+ * In more detail: if the fixed-point integer exceeds 24 bit significand of single
+ * precision floating point, the 0.5 lsb in the significand conversion will round
+ * towards even, as per IEEE 754 default.
+ */
+static inline float float_from_u4_28(uint32_t uval)
+{
+    static const float scale = 1. / (float)(1UL << 28);
+
+    return uval * scale;
+}
+
+/* Convert an unsigned fixed-point 16-bit U4.12 value to single-precision floating-point.
+ * The nominal output float range is [0.0, 1.0] if the fixed-point range is
+ * [0x0000, 0x1000].  The full float range is [0.0, 16.0).
+ */
+static inline float float_from_u4_12(uint16_t uval)
+{
+    static const float scale = 1. / (float)(1UL << 12);
+
+    return uval * scale;
+}
+
+/* Convert a single-precision floating point value to a U4.28 integer value.
+ * Rounds to nearest, ties away from 0.
+ *
+ * Values outside the range [0, 16.0] are properly clamped to [0, 4294967295]
+ * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
+ * depending on hardware and future implementation of this function.
+ */
+static inline uint32_t u4_28_from_float(float f)
+{
+    static const float scale = (float)(1 << 28);
+    static const float limpos = 0xffffffffUL / scale;
+
+    if (f <= 0.) {
+        return 0;
+    } else if (f >= limpos) {
+        return 0xffffffff;
+    }
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f * scale + 0.5;
+}
+
+/* Convert a single-precision floating point value to a U4.12 integer value.
+ * Rounds to nearest, ties away from 0.
+ *
+ * Values outside the range [0, 16.0) are properly clamped to [0, 65535]
+ * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
+ * depending on hardware and future implementation of this function.
+ */
+static inline uint16_t u4_12_from_float(float f)
+{
+    static const float scale = (float)(1 << 12);
+    static const float limpos = 0xffff / scale;
+
+    if (f <= 0.) {
+        return 0;
+    } else if (f >= limpos) {
+        return 0xffff;
+    }
+    /* integer conversion is through truncation (though int to float is not).
+     * ensure that we round to nearest, ties away from 0.
+     */
+    return f * scale + 0.5;
+}
+
+/* Convert a signed fixed-point 16-bit Q0.15 value to single-precision floating-point.
+ * The output float range is [-1.0, 1.0) for the fixed-point range
+ * [0x8000, 0x7fff].
+ *
+ * There is no rounding, the conversion and representation is exact.
+ */
+static inline float float_from_i16(int16_t ival)
+{
+    /* The scale factor is the reciprocal of the nominal 16 bit integer
+     * half-sided range (32768).
+     *
+     * Since the scale factor is a power of 2, the scaling is exact, and there
+     * is no rounding due to the multiplication - the bit pattern is preserved.
+     */
+    static const float scale = 1. / (float)(1UL << 15);
+
+    return ival * scale;
+}
+
+/* Convert an unsigned fixed-point 8-bit U0.8 value to single-precision floating-point.
+ * The nominal output float range is [-1.0, 1.0) if the fixed-point range is
+ * [0x00, 0xff].
+ */
+static inline float float_from_u8(uint8_t uval)
+{
+    static const float scale = 1. / (float)(1UL << 7);
+
+    return ((int)uval - 128) * scale;
+}
+
+/* Convert a packed 24bit Q0.23 value stored native-endian in a uint8_t ptr
+ * to a signed fixed-point 32 bit integer Q0.31 value. The output Q0.31 range
+ * is [0x80000000, 0x7fffff00] for the fixed-point range [0x800000, 0x7fffff].
+ * Even though the output range is limited on the positive side, there is no
+ * DC offset on the output, if the input has no DC offset.
+ *
+ * Avoid relying on the limited output range, as future implementations may go
+ * to full range.
+ */
+static inline int32_t i32_from_p24(const uint8_t *packed24)
+{
+    /* convert to 32b */
+    return (packed24[0] << 8) | (packed24[1] << 16) | (packed24[2] << 24);
+}
+
+/* Convert a 32-bit Q0.31 value to single-precision floating-point.
+ * The output float range is [-1.0, 1.0] for the fixed-point range
+ * [0x80000000, 0x7fffffff].
+ *
+ * Rounding may occur in the least significant 8 bits for large fixed point
+ * values due to storage into the 24-bit floating-point significand.
+ * Rounding will be to nearest, ties to even.
+ */
+static inline float float_from_i32(int32_t ival)
+{
+    static const float scale = 1. / (float)(1UL << 31);
+
+    return ival * scale;
+}
+
+/* Convert a packed 24bit Q0.23 value stored native endian in a uint8_t ptr
+ * to single-precision floating-point. The output float range is [-1.0, 1.0)
+ * for the fixed-point range [0x800000, 0x7fffff].
+ *
+ * There is no rounding, the conversion and representation is exact.
+ */
+static inline float float_from_p24(const uint8_t *packed24)
+{
+    return float_from_i32(i32_from_p24(packed24));
+}
+
+/* Convert a 24-bit Q8.23 value to single-precision floating-point.
+ * The nominal output float range is [-1.0, 1.0) for the fixed-point
+ * range [0xff800000, 0x007fffff].  The maximum float range is [-256.0, 256.0).
+ *
+ * There is no rounding in the nominal range, the conversion and representation
+ * is exact. For values outside the nominal range, rounding is to nearest, ties to even.
+ */
+static inline float float_from_q8_23(int32_t ival)
+{
+    static const float scale = 1. / (float)(1UL << 23);
+
+    return ival * scale;
+}
+
+/**
+ * Multiply-accumulate 16-bit terms with 32-bit result: return a + in*v.
+ */
+static inline
+int32_t mulAdd(int16_t in, int16_t v, int32_t a)
+{
+#if defined(__arm__) && !defined(__thumb__)
+    int32_t out;
+    asm( "smlabb %[out], %[in], %[v], %[a] \n"
+         : [out]"=r"(out)
+         : [in]"%r"(in), [v]"r"(v), [a]"r"(a)
+         : );
+    return out;
+#else
+    return a + in * (int32_t)v;
+#endif
+}
+
+/**
+ * Multiply 16-bit terms with 32-bit result: return in*v.
+ */
+static inline
+int32_t mul(int16_t in, int16_t v)
+{
+#if defined(__arm__) && !defined(__thumb__)
+    int32_t out;
+    asm( "smulbb %[out], %[in], %[v] \n"
+         : [out]"=r"(out)
+         : [in]"%r"(in), [v]"r"(v)
+         : );
+    return out;
+#else
+    return in * (int32_t)v;
+#endif
+}
+
+/**
+ * Similar to mulAdd, but the 16-bit terms are extracted from a 32-bit interleaved stereo pair.
+ */
+static inline
+int32_t mulAddRL(int left, uint32_t inRL, uint32_t vRL, int32_t a)
+{
+#if defined(__arm__) && !defined(__thumb__)
+    int32_t out;
+    if (left) {
+        asm( "smlabb %[out], %[inRL], %[vRL], %[a] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [vRL]"r"(vRL), [a]"r"(a)
+             : );
+    } else {
+        asm( "smlatt %[out], %[inRL], %[vRL], %[a] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [vRL]"r"(vRL), [a]"r"(a)
+             : );
+    }
+    return out;
+#else
+    if (left) {
+        return a + (int16_t)(inRL&0xFFFF) * (int16_t)(vRL&0xFFFF);
+    } else {
+        return a + (int16_t)(inRL>>16) * (int16_t)(vRL>>16);
+    }
+#endif
+}
+
+/**
+ * Similar to mul, but the 16-bit terms are extracted from a 32-bit interleaved stereo pair.
+ */
+static inline
+int32_t mulRL(int left, uint32_t inRL, uint32_t vRL)
+{
+#if defined(__arm__) && !defined(__thumb__)
+    int32_t out;
+    if (left) {
+        asm( "smulbb %[out], %[inRL], %[vRL] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [vRL]"r"(vRL)
+             : );
+    } else {
+        asm( "smultt %[out], %[inRL], %[vRL] \n"
+             : [out]"=r"(out)
+             : [inRL]"%r"(inRL), [vRL]"r"(vRL)
+             : );
+    }
+    return out;
+#else
+    if (left) {
+        return (int16_t)(inRL&0xFFFF) * (int16_t)(vRL&0xFFFF);
+    } else {
+        return (int16_t)(inRL>>16) * (int16_t)(vRL>>16);
+    }
+#endif
+}
+
+__END_DECLS
+
+#endif  // ANDROID_AUDIO_PRIMITIVES_H
diff --git a/media/audio_utils/include/audio_utils/resampler.h b/media/audio_utils/include/audio_utils/resampler.h
new file mode 100644
index 0000000..0c7046f
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/resampler.h
@@ -0,0 +1,109 @@
+/*
+** Copyright 2008, The Android Open-Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_RESAMPLER_H
+#define ANDROID_RESAMPLER_H
+
+#include <stdint.h>
+#include <sys/time.h>
+
+__BEGIN_DECLS
+
+
+#define RESAMPLER_QUALITY_MAX 10
+#define RESAMPLER_QUALITY_MIN 0
+#define RESAMPLER_QUALITY_DEFAULT 4
+#define RESAMPLER_QUALITY_VOIP 3
+#define RESAMPLER_QUALITY_DESKTOP 5
+
+struct resampler_buffer {
+    union {
+        void*       raw;
+        short*      i16;
+        int8_t*     i8;
+    };
+    size_t frame_count;
+};
+
+/* call back interface used by the resampler to get new data */
+struct resampler_buffer_provider
+{
+    /**
+     *  get a new buffer of data:
+     *   as input: buffer->frame_count is the number of frames requested
+     *   as output: buffer->frame_count is the number of frames returned
+     *              buffer->raw points to data returned
+     */
+    int (*get_next_buffer)(struct resampler_buffer_provider *provider,
+            struct resampler_buffer *buffer);
+    /**
+     *  release a consumed buffer of data:
+     *   as input: buffer->frame_count is the number of frames released
+     *             buffer->raw points to data released
+     */
+    void (*release_buffer)(struct resampler_buffer_provider *provider,
+            struct resampler_buffer *buffer);
+};
+
+/* resampler interface */
+struct resampler_itfe {
+    /**
+     * reset resampler state
+     */
+    void (*reset)(struct resampler_itfe *resampler);
+    /**
+     * resample input from buffer provider and output at most *outFrameCount to out buffer.
+     * *outFrameCount is updated with the actual number of frames produced.
+     */
+    int (*resample_from_provider)(struct resampler_itfe *resampler,
+                    int16_t *out,
+                    size_t *outFrameCount);
+    /**
+     * resample at most *inFrameCount frames from in buffer and output at most
+     * *outFrameCount to out buffer. *inFrameCount and *outFrameCount are updated respectively
+     * with the number of frames remaining in input and written to output.
+     */
+    int (*resample_from_input)(struct resampler_itfe *resampler,
+                    int16_t *in,
+                    size_t *inFrameCount,
+                    int16_t *out,
+                    size_t *outFrameCount);
+    /**
+     * return the latency introduced by the resampler in ns.
+     */
+    int32_t (*delay_ns)(struct resampler_itfe *resampler);
+};
+
+/**
+ * create a resampler according to input parameters passed.
+ * If resampler_buffer_provider is not NULL only resample_from_provider() can be called.
+ * If resampler_buffer_provider is NULL only resample_from_input() can be called.
+ */
+int create_resampler(uint32_t inSampleRate,
+          uint32_t outSampleRate,
+          uint32_t channelCount,
+          uint32_t quality,
+          struct resampler_buffer_provider *provider,
+          struct resampler_itfe **);
+
+/**
+ * release resampler resources.
+ */
+void release_resampler(struct resampler_itfe *);
+
+__END_DECLS
+
+#endif // ANDROID_RESAMPLER_H
diff --git a/media/audio_utils/include/audio_utils/roundup.h b/media/audio_utils/include/audio_utils/roundup.h
new file mode 100644
index 0000000..ad34289
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/roundup.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_ROUNDUP_H
+#define ANDROID_AUDIO_ROUNDUP_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Round up to the next highest power of 2
+unsigned roundup(unsigned v);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // ANDROID_AUDIO_ROUNDUP_H
diff --git a/media/audio_utils/include/audio_utils/sndfile.h b/media/audio_utils/include/audio_utils/sndfile.h
new file mode 100644
index 0000000..e24632b
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/sndfile.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __AUDIO_UTIL_SNDFILE_H
+#define __AUDIO_UTIL_SNDFILE_H
+
+// This is a C library for reading and writing PCM .wav files.  It is
+// influenced by other libraries such as libsndfile and audiofile, except is
+// much smaller and has an Apache 2.0 license.
+// The API should be familiar to clients of similar libraries, but there is
+// no guarantee that it will stay exactly source-code compatible with other libraries.
+
+#include <stdio.h>
+#include <sys/cdefs.h>
+
+__BEGIN_DECLS
+
+// visible to clients
+typedef int sf_count_t;
+
+typedef struct {
+    sf_count_t frames;
+    int samplerate;
+    int channels;
+    int format;
+} SF_INFO;
+
+// opaque to clients
+typedef struct SNDFILE_ SNDFILE;
+
+// Access modes
+#define SFM_READ    1
+#define SFM_WRITE   2
+
+// Format
+#define SF_FORMAT_TYPEMASK  1
+#define SF_FORMAT_WAV       1
+#define SF_FORMAT_SUBMASK   14
+#define SF_FORMAT_PCM_16    2
+#define SF_FORMAT_PCM_U8    4
+#define SF_FORMAT_FLOAT     6
+#define SF_FORMAT_PCM_32    8
+#define SF_FORMAT_PCM_24    10
+
+// Open stream
+SNDFILE *sf_open(const char *path, int mode, SF_INFO *info);
+
+// Close stream
+void sf_close(SNDFILE *handle);
+
+// Read interleaved frames and return actual number of frames read
+sf_count_t sf_readf_short(SNDFILE *handle, short *ptr, sf_count_t desired);
+sf_count_t sf_readf_float(SNDFILE *handle, float *ptr, sf_count_t desired);
+sf_count_t sf_readf_int(SNDFILE *handle, int *ptr, sf_count_t desired);
+
+// Write interleaved frames and return actual number of frames written
+sf_count_t sf_writef_short(SNDFILE *handle, const short *ptr, sf_count_t desired);
+sf_count_t sf_writef_float(SNDFILE *handle, const float *ptr, sf_count_t desired);
+sf_count_t sf_writef_int(SNDFILE *handle, const int *ptr, sf_count_t desired);
+
+__END_DECLS
+
+#endif /* __AUDIO_UTIL_SNDFILE_H */
diff --git a/media/audio_utils/include/audio_utils/spdif/FrameScanner.h b/media/audio_utils/include/audio_utils/spdif/FrameScanner.h
new file mode 100644
index 0000000..6d391ee
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/spdif/FrameScanner.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_FRAME_SCANNER_H
+#define ANDROID_AUDIO_FRAME_SCANNER_H
+
+#include <stdint.h>
+
+namespace android {
+
+
+/**
+ * Scan a byte stream looking for the start of an encoded frame.
+ * Parse the sample rate and the size of the encoded frame.
+ * Buffer the sync header so it can be prepended to the remaining data.
+ *
+ * This is used directly by the SPDIFEncoder. External clients will
+ * generally not call this class.
+ */
+class FrameScanner {
+public:
+    FrameScanner(int dataType,
+            const uint8_t *syncBytes,
+            uint32_t syncLength,
+            uint32_t headerLength
+            );
+    virtual ~FrameScanner();
+
+    /**
+     * Pass each byte of the encoded stream to this scanner.
+     * @return true if a complete and valid header was detected
+     */
+    virtual bool scan(uint8_t byte);
+
+    /**
+     * @return address of where the sync header was stored by scan()
+     */
+    const uint8_t *getHeaderAddress() const { return mHeaderBuffer; }
+
+    /**
+     * @return number of bytes in sync header stored by scan()
+     */
+    size_t getHeaderSizeBytes() const { return mHeaderLength; }
+
+    /**
+     * @return sample rate of the encoded audio
+     */
+    uint32_t getSampleRate()   const { return mSampleRate; }
+
+    /**
+     * Some formats, for example EAC3, are wrapped in data bursts that have
+     * a sample rate that is a multiple of the encoded sample rate.
+     * The default multiplier is 1.
+     * @return sample rate multiplier for the SP/DIF PCM data bursts
+     */
+    uint32_t getRateMultiplier()   const { return mRateMultiplier; }
+
+    size_t getFrameSizeBytes()     const { return mFrameSizeBytes; }
+
+    /**
+     * dataType is defined by the SPDIF standard for each format
+     */
+    int getDataType()      const { return mDataType; }
+    int getDataTypeInfo()  const { return mDataTypeInfo; }
+
+    virtual int getMaxChannels() const = 0;
+
+    virtual void resetBurst() = 0;
+
+    /**
+     * @return the number of pcm frames that correspond to one encoded frame
+     */
+    virtual int getMaxSampleFramesPerSyncFrame() const = 0;
+    virtual int getSampleFramesPerSyncFrame()    const = 0;
+
+    /**
+     * @return true if this parsed frame must be the first frame in a data burst.
+     */
+    virtual bool isFirstInBurst() = 0;
+
+    /**
+     * If this returns false then the previous frame may or may not be the last frame.
+     * @return true if this parsed frame is definitely the last frame in a data burst.
+     */
+    virtual bool isLastInBurst()  = 0;
+
+    /**
+     * Most compression types use a lengthCode expressed in bits.
+     */
+    virtual uint16_t convertBytesToLengthCode(uint16_t numBytes) const { return numBytes * 8; }
+
+protected:
+    uint32_t  mBytesSkipped;     // how many bytes were skipped looking for the start of a frame
+    const uint8_t *mSyncBytes;   // pointer to the sync word specific to a format
+    uint32_t  mSyncLength;       // number of bytes in sync word
+    uint8_t   mHeaderBuffer[32]; // a place to gather the relevant header bytes for parsing
+    uint32_t  mHeaderLength;     // the number of bytes we need to parse
+    uint32_t  mCursor;           // position in the mHeaderBuffer
+    uint32_t  mFormatDumpCount;  // used to thin out the debug dumps
+    uint32_t  mSampleRate;       // encoded sample rate
+    uint32_t  mRateMultiplier;   // SPDIF output data burst rate = msampleRate * mRateMultiplier
+    size_t    mFrameSizeBytes;   // encoded frame size
+    int       mDataType;         // as defined in IEC61937-2 paragraph 4.2
+    int       mDataTypeInfo;     // as defined in IEC61937-2 paragraph 4.1
+
+    /**
+     * Parse data in mHeaderBuffer.
+     * Sets mDataType, mFrameSizeBytes, mSampleRate, mRateMultiplier.
+     * @return true if the header is valid.
+     */
+    virtual bool parseHeader() = 0;
+
+};
+
+
+}  // namespace android
+#endif  // ANDROID_AUDIO_FRAME_SCANNER_H
diff --git a/media/audio_utils/include/audio_utils/spdif/SPDIFEncoder.h b/media/audio_utils/include/audio_utils/spdif/SPDIFEncoder.h
new file mode 100644
index 0000000..b356149
--- /dev/null
+++ b/media/audio_utils/include/audio_utils/spdif/SPDIFEncoder.h
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_SPDIF_ENCODER_H
+#define ANDROID_AUDIO_SPDIF_ENCODER_H
+
+#include <stdint.h>
+#include <hardware/audio.h>
+#include <audio_utils/spdif/FrameScanner.h>
+
+namespace android {
+
+/**
+ * Scan the incoming byte stream for a frame sync.
+ * Then wrap the encoded frame in a data burst and send it as if it were PCM.
+ * The receiver will see the data burst header and decode the wrapped frame.
+ */
+#define SPDIF_MAX_CHANNELS          8
+#define SPDIF_ENCODED_CHANNEL_COUNT 2
+
+class SPDIFEncoder {
+public:
+
+    explicit SPDIFEncoder(audio_format_t format);
+    // Defaults to AC3 format. Was in original API.
+    SPDIFEncoder();
+
+    virtual ~SPDIFEncoder();
+
+    /**
+     * Write encoded data to be wrapped for SPDIF.
+     * The compressed frames do not have to be aligned.
+     * @return number of bytes written or negative error
+     */
+    ssize_t write( const void* buffer, size_t numBytes );
+
+    /**
+     * Called by SPDIFEncoder when it is ready to output a data burst.
+     * Must be implemented in the subclass.
+     * @return number of bytes written or negative error
+     */
+    virtual ssize_t writeOutput( const void* buffer, size_t numBytes ) = 0;
+
+    /**
+     * Get ratio of the encoded data burst sample rate to the encoded rate.
+     * For example, EAC3 data bursts are 4X the encoded rate.
+     */
+    uint32_t getRateMultiplier() const { return mRateMultiplier; }
+
+    /**
+     * @return number of PCM frames in a data burst
+     */
+    uint32_t getBurstFrames() const { return mBurstFrames; }
+
+    /**
+     * @return number of bytes per PCM frame for the data burst
+     */
+    int      getBytesPerOutputFrame();
+
+    /**
+     * @return  true if we can wrap this format in an SPDIF stream
+     */
+    static bool isFormatSupported(audio_format_t format);
+
+    /**
+     * Discard any data in the buffer. Reset frame scanners.
+     * This should be called when seeking to a new position in the stream.
+     */
+    void reset();
+
+protected:
+    void   clearBurstBuffer();
+    void   writeBurstBufferShorts(const uint16_t* buffer, size_t numBytes);
+    void   writeBurstBufferBytes(const uint8_t* buffer, size_t numBytes);
+    void   sendZeroPad();
+    void   flushBurstBuffer();
+    void   startDataBurst();
+    size_t startSyncFrame();
+
+    // Works with various formats including AC3.
+    FrameScanner *mFramer;
+
+    uint32_t  mSampleRate;
+    size_t    mFrameSize;   // size of sync frame in bytes
+    uint16_t *mBurstBuffer; // ALSA wants to get SPDIF data as shorts.
+    size_t    mBurstBufferSizeBytes;
+    uint32_t  mRateMultiplier;
+    uint32_t  mBurstFrames;
+    size_t    mByteCursor;  // cursor into data burst
+    int       mBitstreamNumber;
+    size_t    mPayloadBytesPending; // number of bytes needed to finish burst
+    // state variable, true if scanning for start of frame
+    bool      mScanning;
+
+    static const unsigned short kSPDIFSync1; // Pa
+    static const unsigned short kSPDIFSync2; // Pb
+};
+
+}  // namespace android
+
+#endif  // ANDROID_AUDIO_SPDIF_ENCODER_H
diff --git a/media/audio_utils/minifloat.c b/media/audio_utils/minifloat.c
new file mode 100644
index 0000000..70a4a8c
--- /dev/null
+++ b/media/audio_utils/minifloat.c
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <math.h>
+#include <audio_utils/minifloat.h>
+
+#define EXPONENT_BITS   3
+#define EXPONENT_MAX    ((1 << EXPONENT_BITS) - 1)
+#define EXCESS          ((1 << EXPONENT_BITS) - 2)
+
+#define MANTISSA_BITS   13
+#define MANTISSA_MAX    ((1 << MANTISSA_BITS) - 1)
+#define HIDDEN_BIT      (1 << MANTISSA_BITS)
+#define ONE_FLOAT       ((float) (1 << (MANTISSA_BITS + 1)))
+
+#define MINIFLOAT_MAX   ((EXPONENT_MAX << MANTISSA_BITS) | MANTISSA_MAX)
+
+#if EXPONENT_BITS + MANTISSA_BITS != 16
+#error EXPONENT_BITS and MANTISSA_BITS must sum to 16
+#endif
+
+gain_minifloat_t gain_from_float(float v)
+{
+    if (isnan(v) || v <= 0.0f) {
+        return 0;
+    }
+    if (v >= 2.0f) {
+        return MINIFLOAT_MAX;
+    }
+    int exp;
+    float r = frexpf(v, &exp);
+    if ((exp += EXCESS) > EXPONENT_MAX) {
+        return MINIFLOAT_MAX;
+    }
+    if (-exp >= MANTISSA_BITS) {
+        return 0;
+    }
+    int mantissa = (int) (r * ONE_FLOAT);
+    return exp > 0 ? (exp << MANTISSA_BITS) | (mantissa & ~HIDDEN_BIT) :
+            (mantissa >> (1 - exp)) & MANTISSA_MAX;
+}
+
+float float_from_gain(gain_minifloat_t a)
+{
+    int mantissa = a & MANTISSA_MAX;
+    int exponent = (a >> MANTISSA_BITS) & EXPONENT_MAX;
+    return ldexpf((exponent > 0 ? HIDDEN_BIT | mantissa : mantissa << 1) / ONE_FLOAT,
+            exponent - EXCESS);
+}
diff --git a/media/audio_utils/primitives.c b/media/audio_utils/primitives.c
new file mode 100644
index 0000000..d44c29e
--- /dev/null
+++ b/media/audio_utils/primitives.c
@@ -0,0 +1,526 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cutils/bitops.h>  /* for popcount() */
+#include <audio_utils/primitives.h>
+#include "private/private.h"
+
+void ditherAndClamp(int32_t* out, const int32_t *sums, size_t c)
+{
+    size_t i;
+    for (i=0 ; i<c ; i++) {
+        int32_t l = *sums++;
+        int32_t r = *sums++;
+        int32_t nl = l >> 12;
+        int32_t nr = r >> 12;
+        l = clamp16(nl);
+        r = clamp16(nr);
+        *out++ = (r<<16) | (l & 0xFFFF);
+    }
+}
+
+void memcpy_to_i16_from_u8(int16_t *dst, const uint8_t *src, size_t count)
+{
+    dst += count;
+    src += count;
+    while (count--) {
+        *--dst = (int16_t)(*--src - 0x80) << 8;
+    }
+}
+
+void memcpy_to_u8_from_i16(uint8_t *dst, const int16_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = (*src++ >> 8) + 0x80;
+    }
+}
+
+void memcpy_to_u8_from_float(uint8_t *dst, const float *src, size_t count)
+{
+    while (count--) {
+        *dst++ = clamp8_from_float(*src++);
+    }
+}
+
+void memcpy_to_i16_from_i32(int16_t *dst, const int32_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = *src++ >> 16;
+    }
+}
+
+void memcpy_to_i16_from_float(int16_t *dst, const float *src, size_t count)
+{
+    while (count--) {
+        *dst++ = clamp16_from_float(*src++);
+    }
+}
+
+void memcpy_to_float_from_q4_27(float *dst, const int32_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = float_from_q4_27(*src++);
+    }
+}
+
+void memcpy_to_float_from_i16(float *dst, const int16_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = float_from_i16(*src++);
+    }
+}
+
+void memcpy_to_float_from_u8(float *dst, const uint8_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = float_from_u8(*src++);
+    }
+}
+
+void memcpy_to_float_from_p24(float *dst, const uint8_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = float_from_p24(src);
+        src += 3;
+    }
+}
+
+void memcpy_to_i16_from_p24(int16_t *dst, const uint8_t *src, size_t count)
+{
+    while (count--) {
+#ifdef HAVE_BIG_ENDIAN
+        *dst++ = src[1] | (src[0] << 8);
+#else
+        *dst++ = src[1] | (src[2] << 8);
+#endif
+        src += 3;
+    }
+}
+
+void memcpy_to_i32_from_p24(int32_t *dst, const uint8_t *src, size_t count)
+{
+    while (count--) {
+#ifdef HAVE_BIG_ENDIAN
+        *dst++ = (src[2] << 8) | (src[1] << 16) | (src[0] << 24);
+#else
+        *dst++ = (src[0] << 8) | (src[1] << 16) | (src[2] << 24);
+#endif
+        src += 3;
+    }
+}
+
+void memcpy_to_p24_from_i16(uint8_t *dst, const int16_t *src, size_t count)
+{
+    while (count--) {
+#ifdef HAVE_BIG_ENDIAN
+        *dst++ = *src >> 8;
+        *dst++ = *src++;
+        *dst++ = 0;
+#else
+        *dst++ = 0;
+        *dst++ = *src;
+        *dst++ = *src++ >> 8;
+#endif
+    }
+}
+
+void memcpy_to_p24_from_float(uint8_t *dst, const float *src, size_t count)
+{
+    while (count--) {
+        int32_t ival = clamp24_from_float(*src++);
+
+#ifdef HAVE_BIG_ENDIAN
+        *dst++ = ival >> 16;
+        *dst++ = ival >> 8;
+        *dst++ = ival;
+#else
+        *dst++ = ival;
+        *dst++ = ival >> 8;
+        *dst++ = ival >> 16;
+#endif
+    }
+}
+
+void memcpy_to_p24_from_q8_23(uint8_t *dst, const int32_t *src, size_t count)
+{
+    while (count--) {
+        int32_t ival = clamp24_from_q8_23(*src++);
+
+#ifdef HAVE_BIG_ENDIAN
+        *dst++ = ival >> 16;
+        *dst++ = ival >> 8;
+        *dst++ = ival;
+#else
+        *dst++ = ival;
+        *dst++ = ival >> 8;
+        *dst++ = ival >> 16;
+#endif
+    }
+}
+
+void memcpy_to_p24_from_i32(uint8_t *dst, const int32_t *src, size_t count)
+{
+    while (count--) {
+        int32_t ival = *src++ >> 8;
+
+#ifdef HAVE_BIG_ENDIAN
+        *dst++ = ival >> 16;
+        *dst++ = ival >> 8;
+        *dst++ = ival;
+#else
+        *dst++ = ival;
+        *dst++ = ival >> 8;
+        *dst++ = ival >> 16;
+#endif
+    }
+}
+
+void memcpy_to_q8_23_from_i16(int32_t *dst, const int16_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = (int32_t)*src++ << 8;
+    }
+}
+
+void memcpy_to_q8_23_from_float_with_clamp(int32_t *dst, const float *src, size_t count)
+{
+    while (count--) {
+        *dst++ = clamp24_from_float(*src++);
+    }
+}
+
+void memcpy_to_q8_23_from_p24(int32_t *dst, const uint8_t *src, size_t count)
+{
+    while (count--) {
+#ifdef HAVE_BIG_ENDIAN
+        *dst++ = (int8_t)src[0] << 16 | src[1] << 8 | src[2];
+#else
+        *dst++ = (int8_t)src[2] << 16 | src[1] << 8 | src[0];
+#endif
+        src += 3;
+    }
+}
+
+void memcpy_to_q4_27_from_float(int32_t *dst, const float *src, size_t count)
+{
+    while (count--) {
+        *dst++ = clampq4_27_from_float(*src++);
+    }
+}
+
+void memcpy_to_i16_from_q8_23(int16_t *dst, const int32_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = clamp16(*src++ >> 8);
+    }
+}
+
+void memcpy_to_float_from_q8_23(float *dst, const int32_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = float_from_q8_23(*src++);
+    }
+}
+
+void memcpy_to_i32_from_i16(int32_t *dst, const int16_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = (int32_t)*src++ << 16;
+    }
+}
+
+void memcpy_to_i32_from_float(int32_t *dst, const float *src, size_t count)
+{
+    while (count--) {
+        *dst++ = clamp32_from_float(*src++);
+    }
+}
+
+void memcpy_to_float_from_i32(float *dst, const int32_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = float_from_i32(*src++);
+    }
+}
+
+void downmix_to_mono_i16_from_stereo_i16(int16_t *dst, const int16_t *src, size_t count)
+{
+    while (count--) {
+        *dst++ = (int16_t)(((int32_t)src[0] + (int32_t)src[1]) >> 1);
+        src += 2;
+    }
+}
+
+void upmix_to_stereo_i16_from_mono_i16(int16_t *dst, const int16_t *src, size_t count)
+{
+    while (count--) {
+        int32_t temp = *src++;
+        dst[0] = temp;
+        dst[1] = temp;
+        dst += 2;
+    }
+}
+
+void downmix_to_mono_float_from_stereo_float(float *dst, const float *src, size_t frames)
+{
+    while (frames--) {
+        *dst++ = (src[0] + src[1]) * 0.5;
+        src += 2;
+    }
+}
+
+void upmix_to_stereo_float_from_mono_float(float *dst, const float *src, size_t frames)
+{
+    while (frames--) {
+        float temp = *src++;
+        dst[0] = temp;
+        dst[1] = temp;
+        dst += 2;
+    }
+}
+
+size_t nonZeroMono32(const int32_t *samples, size_t count)
+{
+    size_t nonZero = 0;
+    while (count-- > 0) {
+        if (*samples++ != 0) {
+            nonZero++;
+        }
+    }
+    return nonZero;
+}
+
+size_t nonZeroMono16(const int16_t *samples, size_t count)
+{
+    size_t nonZero = 0;
+    while (count-- > 0) {
+        if (*samples++ != 0) {
+            nonZero++;
+        }
+    }
+    return nonZero;
+}
+
+size_t nonZeroStereo32(const int32_t *frames, size_t count)
+{
+    size_t nonZero = 0;
+    while (count-- > 0) {
+        if (frames[0] != 0 || frames[1] != 0) {
+            nonZero++;
+        }
+        frames += 2;
+    }
+    return nonZero;
+}
+
+size_t nonZeroStereo16(const int16_t *frames, size_t count)
+{
+    size_t nonZero = 0;
+    while (count-- > 0) {
+        if (frames[0] != 0 || frames[1] != 0) {
+            nonZero++;
+        }
+        frames += 2;
+    }
+    return nonZero;
+}
+
+/*
+ * C macro to do channel mask copying independent of dst/src sample type.
+ * Don't pass in any expressions for the macro arguments here.
+ */
+#define copy_frame_by_mask(dst, dmask, src, smask, count, zero) \
+{ \
+    uint32_t bit, ormask; \
+    while ((count)--) { \
+        ormask = (dmask) | (smask); \
+        while (ormask) { \
+            bit = ormask & -ormask; /* get lowest bit */ \
+            ormask ^= bit; /* remove lowest bit */ \
+            if ((dmask) & bit) { \
+                *(dst)++ = (smask) & bit ? *(src)++ : (zero); \
+            } else { /* source channel only */ \
+                ++(src); \
+            } \
+        } \
+    } \
+}
+
+void memcpy_by_channel_mask(void *dst, uint32_t dst_mask,
+        const void *src, uint32_t src_mask, size_t sample_size, size_t count)
+{
+#if 0
+    /* alternate way of handling memcpy_by_channel_mask by using the idxary */
+    int8_t idxary[32];
+    uint32_t src_channels = popcount(src_mask);
+    uint32_t dst_channels =
+            memcpy_by_index_array_initialization(idxary, 32, dst_mask, src_mask);
+
+    memcpy_by_idxary(dst, dst_channels, src, src_channels, idxary, sample_size, count);
+#else
+    if (dst_mask == src_mask) {
+        memcpy(dst, src, sample_size * popcount(dst_mask) * count);
+        return;
+    }
+    switch (sample_size) {
+    case 1: {
+        uint8_t *udst = (uint8_t*)dst;
+        const uint8_t *usrc = (const uint8_t*)src;
+
+        copy_frame_by_mask(udst, dst_mask, usrc, src_mask, count, 0);
+    } break;
+    case 2: {
+        uint16_t *udst = (uint16_t*)dst;
+        const uint16_t *usrc = (const uint16_t*)src;
+
+        copy_frame_by_mask(udst, dst_mask, usrc, src_mask, count, 0);
+    } break;
+    case 3: { /* could be slow.  use a struct to represent 3 bytes of data. */
+        uint8x3_t *udst = (uint8x3_t*)dst;
+        const uint8x3_t *usrc = (const uint8x3_t*)src;
+        static const uint8x3_t zero; /* tricky - we use this to zero out a sample */
+
+        copy_frame_by_mask(udst, dst_mask, usrc, src_mask, count, zero);
+    } break;
+    case 4: {
+        uint32_t *udst = (uint32_t*)dst;
+        const uint32_t *usrc = (const uint32_t*)src;
+
+        copy_frame_by_mask(udst, dst_mask, usrc, src_mask, count, 0);
+    } break;
+    default:
+        abort(); /* illegal value */
+        break;
+    }
+#endif
+}
+
+/*
+ * C macro to do copying by index array, to rearrange samples
+ * within a frame.  This is independent of src/dst sample type.
+ * Don't pass in any expressions for the macro arguments here.
+ */
+#define copy_frame_by_idx(dst, dst_channels, src, src_channels, idxary, count, zero) \
+{ \
+    unsigned i; \
+    int index; \
+    while ((count)--) { \
+        for (i = 0; i < (dst_channels); ++i) { \
+            index = (idxary)[i]; \
+            *(dst)++ = index < 0 ? (zero) : (src)[index]; \
+        } \
+        (src) += (src_channels); \
+    } \
+}
+
+void memcpy_by_index_array(void *dst, uint32_t dst_channels,
+        const void *src, uint32_t src_channels,
+        const int8_t *idxary, size_t sample_size, size_t count)
+{
+    switch (sample_size) {
+    case 1: {
+        uint8_t *udst = (uint8_t*)dst;
+        const uint8_t *usrc = (const uint8_t*)src;
+
+        copy_frame_by_idx(udst, dst_channels, usrc, src_channels, idxary, count, 0);
+    } break;
+    case 2: {
+        uint16_t *udst = (uint16_t*)dst;
+        const uint16_t *usrc = (const uint16_t*)src;
+
+        copy_frame_by_idx(udst, dst_channels, usrc, src_channels, idxary, count, 0);
+    } break;
+    case 3: { /* could be slow.  use a struct to represent 3 bytes of data. */
+        uint8x3_t *udst = (uint8x3_t*)dst;
+        const uint8x3_t *usrc = (const uint8x3_t*)src;
+        static const uint8x3_t zero;
+
+        copy_frame_by_idx(udst, dst_channels, usrc, src_channels, idxary, count, zero);
+    } break;
+    case 4: {
+        uint32_t *udst = (uint32_t*)dst;
+        const uint32_t *usrc = (const uint32_t*)src;
+
+        copy_frame_by_idx(udst, dst_channels, usrc, src_channels, idxary, count, 0);
+    } break;
+    default:
+        abort(); /* illegal value */
+        break;
+    }
+}
+
+size_t memcpy_by_index_array_initialization(int8_t *idxary, size_t idxcount,
+        uint32_t dst_mask, uint32_t src_mask)
+{
+    size_t n = 0;
+    int srcidx = 0;
+    uint32_t bit, ormask = src_mask | dst_mask;
+
+    while (ormask && n < idxcount) {
+        bit = ormask & -ormask;          /* get lowest bit */
+        ormask ^= bit;                   /* remove lowest bit */
+        if (src_mask & dst_mask & bit) { /* matching channel */
+            idxary[n++] = srcidx++;
+        } else if (src_mask & bit) {     /* source channel only */
+            ++srcidx;
+        } else {                         /* destination channel only */
+            idxary[n++] = -1;
+        }
+    }
+    return n + popcount(ormask & dst_mask);
+}
+
+size_t memcpy_by_index_array_initialization_src_index(int8_t *idxary, size_t idxcount,
+        uint32_t dst_mask, uint32_t src_mask) {
+    size_t dst_count = popcount(dst_mask);
+    if (idxcount == 0) {
+        return dst_count;
+    }
+    if (dst_count > idxcount) {
+        dst_count = idxcount;
+    }
+
+    size_t src_idx, dst_idx;
+    for (src_idx = 0, dst_idx = 0; dst_idx < dst_count; ++dst_idx) {
+        if (src_mask & 1) {
+            idxary[dst_idx] = src_idx++;
+        } else {
+            idxary[dst_idx] = -1;
+        }
+        src_mask >>= 1;
+    }
+    return dst_idx;
+}
+
+size_t memcpy_by_index_array_initialization_dst_index(int8_t *idxary, size_t idxcount,
+        uint32_t dst_mask, uint32_t src_mask) {
+    size_t src_idx, dst_idx;
+    size_t dst_count = __builtin_popcount(dst_mask);
+    size_t src_count = __builtin_popcount(src_mask);
+    if (idxcount == 0) {
+        return dst_count;
+    }
+    if (dst_count > idxcount) {
+        dst_count = idxcount;
+    }
+    for (src_idx = 0, dst_idx = 0; dst_idx < dst_count; ++src_idx) {
+        if (dst_mask & 1) {
+            idxary[dst_idx++] = src_idx < src_count ? (signed)src_idx : -1;
+        }
+        dst_mask >>= 1;
+    }
+    return dst_idx;
+}
diff --git a/media/audio_utils/private/private.h b/media/audio_utils/private/private.h
new file mode 100644
index 0000000..d10d1ea
--- /dev/null
+++ b/media/audio_utils/private/private.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_PRIVATE_H
+#define ANDROID_AUDIO_PRIVATE_H
+
+#include <stdint.h>
+
+__BEGIN_DECLS
+
+/* Defines not necessary for external use but kept here to be common
+ * to the audio_utils library.
+ */
+
+/* struct representation of 3 bytes for packed PCM 24 bit data.
+ * The naming follows the ARM NEON convention.
+ */
+typedef struct {uint8_t c[3];} __attribute__((__packed__)) uint8x3_t;
+
+__END_DECLS
+
+#endif /*ANDROID_AUDIO_PRIVATE_H*/
diff --git a/media/audio_utils/resampler.c b/media/audio_utils/resampler.c
new file mode 100644
index 0000000..7282aa9
--- /dev/null
+++ b/media/audio_utils/resampler.c
@@ -0,0 +1,264 @@
+/*
+** Copyright 2011, The Android Open-Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "resampler"
+
+#include <errno.h>
+#include <stdlib.h>
+#include <cutils/log.h>
+#include <system/audio.h>
+#include <audio_utils/resampler.h>
+#include <speex/speex_resampler.h>
+
+
+struct resampler {
+    struct resampler_itfe itfe;
+    SpeexResamplerState *speex_resampler;       // handle on speex resampler
+    struct resampler_buffer_provider *provider; // buffer provider installed by client
+    uint32_t in_sample_rate;                    // input sampling rate in Hz
+    uint32_t out_sample_rate;                   // output sampling rate in Hz
+    uint32_t channel_count;                     // number of channels (interleaved)
+    int16_t *in_buf;                            // input buffer
+    size_t in_buf_size;                         // input buffer size
+    size_t frames_in;                           // number of frames in input buffer
+    size_t frames_rq;                           // cached number of output frames
+    size_t frames_needed;                       // minimum number of input frames to produce
+                                                // frames_rq output frames
+    int32_t speex_delay_ns;                     // delay introduced by speex resampler in ns
+};
+
+
+//------------------------------------------------------------------------------
+// speex based resampler
+//------------------------------------------------------------------------------
+
+static void resampler_reset(struct resampler_itfe *resampler)
+{
+    struct resampler *rsmp = (struct resampler *)resampler;
+
+    rsmp->frames_in = 0;
+    rsmp->frames_rq = 0;
+
+    if (rsmp != NULL && rsmp->speex_resampler != NULL) {
+        speex_resampler_reset_mem(rsmp->speex_resampler);
+    }
+}
+
+static int32_t resampler_delay_ns(struct resampler_itfe *resampler)
+{
+    struct resampler *rsmp = (struct resampler *)resampler;
+
+    int32_t delay = (int32_t)((1000000000 * (int64_t)rsmp->frames_in) / rsmp->in_sample_rate);
+    delay += rsmp->speex_delay_ns;
+
+    return delay;
+}
+
+// outputs a number of frames less or equal to *outFrameCount and updates *outFrameCount
+// with the actual number of frames produced.
+int resampler_resample_from_provider(struct resampler_itfe *resampler,
+                       int16_t *out,
+                       size_t *outFrameCount)
+{
+    struct resampler *rsmp = (struct resampler *)resampler;
+
+    if (rsmp == NULL || out == NULL || outFrameCount == NULL) {
+        return -EINVAL;
+    }
+    if (rsmp->provider == NULL) {
+        *outFrameCount = 0;
+        return -ENOSYS;
+    }
+
+    size_t framesRq = *outFrameCount;
+    // update and cache the number of frames needed at the input sampling rate to produce
+    // the number of frames requested at the output sampling rate
+    if (framesRq != rsmp->frames_rq) {
+        rsmp->frames_needed = (framesRq * rsmp->in_sample_rate) / rsmp->out_sample_rate + 1;
+        rsmp->frames_rq = framesRq;
+    }
+
+    size_t framesWr = 0;
+    spx_uint32_t inFrames = 0;
+    while (framesWr < framesRq) {
+        if (rsmp->frames_in < rsmp->frames_needed) {
+            // make sure that the number of frames present in rsmp->in_buf (rsmp->frames_in) is at
+            // least the number of frames needed to produce the number of frames requested at
+            // the output sampling rate
+            if (rsmp->in_buf_size < rsmp->frames_needed) {
+                rsmp->in_buf_size = rsmp->frames_needed;
+                rsmp->in_buf = (int16_t *)realloc(rsmp->in_buf,
+                                        rsmp->in_buf_size * rsmp->channel_count * sizeof(int16_t));
+            }
+            struct resampler_buffer buf;
+            buf.frame_count = rsmp->frames_needed - rsmp->frames_in;
+            rsmp->provider->get_next_buffer(rsmp->provider, &buf);
+            if (buf.raw == NULL) {
+                break;
+            }
+            memcpy(rsmp->in_buf + rsmp->frames_in * rsmp->channel_count,
+                    buf.raw,
+                    buf.frame_count * rsmp->channel_count * sizeof(int16_t));
+            rsmp->frames_in += buf.frame_count;
+            rsmp->provider->release_buffer(rsmp->provider, &buf);
+        }
+
+        spx_uint32_t outFrames = framesRq - framesWr;
+        inFrames = rsmp->frames_in;
+        if (rsmp->channel_count == 1) {
+            speex_resampler_process_int(rsmp->speex_resampler,
+                                        0,
+                                        rsmp->in_buf,
+                                        &inFrames,
+                                        out + framesWr,
+                                        &outFrames);
+        } else {
+            speex_resampler_process_interleaved_int(rsmp->speex_resampler,
+                                        rsmp->in_buf,
+                                        &inFrames,
+                                        out + framesWr * rsmp->channel_count,
+                                        &outFrames);
+        }
+        framesWr += outFrames;
+        rsmp->frames_in -= inFrames;
+        ALOGW_IF((framesWr != framesRq) && (rsmp->frames_in != 0),
+                "ReSampler::resample() remaining %zu frames in and %zu frames out",
+                rsmp->frames_in, (framesRq - framesWr));
+    }
+    if (rsmp->frames_in) {
+        memmove(rsmp->in_buf,
+                rsmp->in_buf + inFrames * rsmp->channel_count,
+                rsmp->frames_in * rsmp->channel_count * sizeof(int16_t));
+    }
+    *outFrameCount = framesWr;
+
+    return 0;
+}
+
+int resampler_resample_from_input(struct resampler_itfe *resampler,
+                                  int16_t *in,
+                                  size_t *inFrameCount,
+                                  int16_t *out,
+                                  size_t *outFrameCount)
+{
+    struct resampler *rsmp = (struct resampler *)resampler;
+
+    if (rsmp == NULL || in == NULL || inFrameCount == NULL ||
+            out == NULL || outFrameCount == NULL) {
+        return -EINVAL;
+    }
+    if (rsmp->provider != NULL) {
+        *outFrameCount = 0;
+        return -ENOSYS;
+    }
+
+    if (rsmp->channel_count == 1) {
+        speex_resampler_process_int(rsmp->speex_resampler,
+                                    0,
+                                    in,
+                                    (spx_uint32_t *)inFrameCount,
+                                    out,
+                                    (spx_uint32_t *)outFrameCount);
+    } else {
+        speex_resampler_process_interleaved_int(rsmp->speex_resampler,
+                                                in,
+                                                (spx_uint32_t *)inFrameCount,
+                                                out,
+                                                (spx_uint32_t *)outFrameCount);
+    }
+
+    ALOGV("resampler_resample_from_input() DONE in %zu out %zu", *inFrameCount, *outFrameCount);
+
+    return 0;
+}
+
+int create_resampler(uint32_t inSampleRate,
+                    uint32_t outSampleRate,
+                    uint32_t channelCount,
+                    uint32_t quality,
+                    struct resampler_buffer_provider* provider,
+                    struct resampler_itfe **resampler)
+{
+    int error;
+    struct resampler *rsmp;
+
+    ALOGV("create_resampler() In SR %d Out SR %d channels %d",
+         inSampleRate, outSampleRate, channelCount);
+
+    if (resampler == NULL) {
+        return -EINVAL;
+    }
+
+    *resampler = NULL;
+
+    if (quality <= RESAMPLER_QUALITY_MIN || quality >= RESAMPLER_QUALITY_MAX) {
+        return -EINVAL;
+    }
+
+    rsmp = (struct resampler *)calloc(1, sizeof(struct resampler));
+
+    rsmp->speex_resampler = speex_resampler_init(channelCount,
+                                      inSampleRate,
+                                      outSampleRate,
+                                      quality,
+                                      &error);
+    if (rsmp->speex_resampler == NULL) {
+        ALOGW("ReSampler: Cannot create speex resampler: %s", speex_resampler_strerror(error));
+        free(rsmp);
+        return -ENODEV;
+    }
+
+    rsmp->itfe.reset = resampler_reset;
+    rsmp->itfe.resample_from_provider = resampler_resample_from_provider;
+    rsmp->itfe.resample_from_input = resampler_resample_from_input;
+    rsmp->itfe.delay_ns = resampler_delay_ns;
+
+    rsmp->provider = provider;
+    rsmp->in_sample_rate = inSampleRate;
+    rsmp->out_sample_rate = outSampleRate;
+    rsmp->channel_count = channelCount;
+    rsmp->in_buf = NULL;
+    rsmp->in_buf_size = 0;
+
+    resampler_reset(&rsmp->itfe);
+
+    int frames = speex_resampler_get_input_latency(rsmp->speex_resampler);
+    rsmp->speex_delay_ns = (int32_t)((1000000000 * (int64_t)frames) / rsmp->in_sample_rate);
+    frames = speex_resampler_get_output_latency(rsmp->speex_resampler);
+    rsmp->speex_delay_ns += (int32_t)((1000000000 * (int64_t)frames) / rsmp->out_sample_rate);
+
+    *resampler = &rsmp->itfe;
+    ALOGV("create_resampler() DONE rsmp %p &rsmp->itfe %p speex %p",
+         rsmp, &rsmp->itfe, rsmp->speex_resampler);
+    return 0;
+}
+
+void release_resampler(struct resampler_itfe *resampler)
+{
+    struct resampler *rsmp = (struct resampler *)resampler;
+
+    if (rsmp == NULL) {
+        return;
+    }
+
+    free(rsmp->in_buf);
+
+    if (rsmp->speex_resampler != NULL) {
+        speex_resampler_destroy(rsmp->speex_resampler);
+    }
+    free(rsmp);
+}
diff --git a/media/audio_utils/roundup.c b/media/audio_utils/roundup.c
new file mode 100644
index 0000000..a2bc7b2
--- /dev/null
+++ b/media/audio_utils/roundup.c
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <audio_utils/roundup.h>
+
+unsigned roundup(unsigned v)
+{
+    // __builtin_clz is undefined for zero input
+    if (v == 0) {
+        v = 1;
+    }
+    int lz = __builtin_clz((int) v);
+    unsigned rounded = ((unsigned) 0x80000000) >> lz;
+    // 0x800000001 and higher are actually rounded _down_ to prevent overflow
+    if (v > rounded && lz > 0) {
+        rounded <<= 1;
+    }
+    return rounded;
+}
diff --git a/media/audio_utils/spdif/AC3FrameScanner.cpp b/media/audio_utils/spdif/AC3FrameScanner.cpp
new file mode 100644
index 0000000..3b94898
--- /dev/null
+++ b/media/audio_utils/spdif/AC3FrameScanner.cpp
@@ -0,0 +1,256 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioSPDIF"
+
+#include <string.h>
+
+#include <utils/Log.h>
+#include <audio_utils/spdif/FrameScanner.h>
+
+#include "AC3FrameScanner.h"
+
+namespace android {
+
+// These values are from the AC3 spec. Do not change them.
+
+const uint8_t AC3FrameScanner::kSyncBytes[] = { 0x0B, 0x77 };
+
+const uint16_t AC3FrameScanner::kAC3SampleRateTable[AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES]
+    = { 48000, 44100, 32000 };
+
+// Table contains number of 16-bit words in an AC3 frame.
+// From AC3 spec table 5.13
+const uint16_t AC3FrameScanner::kAC3FrameSizeTable[AC3_NUM_FRAME_SIZE_TABLE_ENTRIES]
+        [AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES] = {
+    { 64, 69, 96 },
+    { 64, 70, 96 },
+    { 80, 87, 120 },
+    { 80, 88, 120 },
+    { 96, 104, 144 },
+    { 96, 105, 144 },
+    { 112, 121, 168 },
+    { 112, 122, 168 },
+    { 128, 139, 192 },
+    { 128, 140, 192 },
+    { 160, 174, 240 },
+    { 160, 175, 240 },
+    { 192, 208, 288 },
+    { 192, 209, 288 },
+    { 224, 243, 336 },
+    { 224, 244, 336 },
+    { 256, 278, 384 },
+    { 256, 279, 384 },
+    { 320, 348, 480 },
+    { 320, 349, 480 },
+    { 384, 417, 576 },
+    { 384, 418, 576 },
+    { 448, 487, 672 },
+    { 448, 488, 672 },
+    { 512, 557, 768 },
+    { 512, 558, 768 },
+    { 640, 696, 960 },
+    { 640, 697, 960 },
+    { 768, 835, 1152 },
+    { 768, 836, 1152 },
+    { 896, 975, 1344 },
+    { 896, 976, 1344 },
+    { 1024, 1114, 1536 },
+    { 1024, 1115, 1536 },
+    { 1152, 1253, 1728 },
+    { 1152, 1254, 1728 },
+    { 1280, 1393, 1920 },
+    { 1280, 1394, 1920 }
+};
+
+const uint16_t AC3FrameScanner::kEAC3ReducedSampleRateTable[AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES]
+        = { 24000, 22050, 16000 };
+
+const uint16_t
+        AC3FrameScanner::kEAC3BlocksPerFrameTable[EAC3_NUM_BLOCKS_PER_FRAME_TABLE_ENTRIES]
+        = { 1, 2, 3, 6 };
+
+// Defined in IEC61937-2
+#define SPDIF_DATA_TYPE_AC3     1
+#define SPDIF_DATA_TYPE_E_AC3  21
+#define AC3_STREAM_TYPE_0       0
+#define AC3_STREAM_TYPE_1       1
+#define AC3_STREAM_TYPE_2       2
+// -----------------------------------------------------------------------------
+
+// Scanner for AC3 byte streams.
+AC3FrameScanner::AC3FrameScanner(audio_format_t format)
+ : FrameScanner(SPDIF_DATA_TYPE_AC3,
+        AC3FrameScanner::kSyncBytes,
+        sizeof(AC3FrameScanner::kSyncBytes), 6)
+ , mStreamType(0)
+ , mSubstreamID(0)
+ , mFormat(format)
+{
+    mAudioBlocksPerSyncFrame = 6;
+    memset(mSubstreamBlockCounts, 0, sizeof(mSubstreamBlockCounts));
+}
+
+AC3FrameScanner::~AC3FrameScanner()
+{
+}
+
+int AC3FrameScanner::getSampleFramesPerSyncFrame() const
+{
+    return mRateMultiplier
+            * AC3_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK * AC3_PCM_FRAMES_PER_BLOCK;
+}
+
+void AC3FrameScanner::resetBurst()
+{
+    for (int i = 0; i < EAC3_MAX_SUBSTREAMS; i++) {
+        if (mSubstreamBlockCounts[i] >= AC3_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK) {
+            mSubstreamBlockCounts[i] -= AC3_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK;
+        } else if (mSubstreamBlockCounts[i] > 0) {
+            ALOGW("EAC3 substream[%d] has only %d audio blocks!",
+                i, mSubstreamBlockCounts[i]);
+            mSubstreamBlockCounts[i] = 0;
+        }
+    }
+}
+
+// Per IEC 61973-3:5.3.3, for E-AC3 burst-length shall be in bytes.
+uint16_t AC3FrameScanner::convertBytesToLengthCode(uint16_t numBytes) const
+{
+    return (mDataType == SPDIF_DATA_TYPE_E_AC3) ? numBytes : numBytes * 8;
+}
+
+// per IEC 61973-3 Paragraph 5.3.3
+// We have to send 6 audio blocks on all active substreams.
+// Substream zero must be the first.
+// We don't know if we have all the blocks we need until we see
+// the 7th block of substream#0.
+bool AC3FrameScanner::isFirstInBurst()
+{
+    if (mDataType == SPDIF_DATA_TYPE_E_AC3) {
+        if (((mStreamType == AC3_STREAM_TYPE_0)
+                || (mStreamType == AC3_STREAM_TYPE_2))
+                && (mSubstreamID == 0)
+                // The ">" is intentional. We have to see the beginning
+                // of the block in the next burst before we can send
+                // the current burst.
+                && (mSubstreamBlockCounts[0] > AC3_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK)) {
+            return true;
+        }
+    }
+    return false;
+}
+
+bool AC3FrameScanner::isLastInBurst()
+{
+    // For EAC3 we don't know if we are the end until we see a
+    // frame that must be at the beginning. See isFirstInBurst().
+    return (mDataType != SPDIF_DATA_TYPE_E_AC3); // Just one AC3 frame per burst.
+}
+
+// TODO Use BitFieldParser
+
+// Parse AC3 header.
+// Detect whether the stream is AC3 or EAC3. Extract data depending on type.
+//
+// @return true if valid
+bool AC3FrameScanner::parseHeader()
+{
+    // Interpret bsid based on paragraph E2.3.1.6 of EAC3 spec.
+    uint32_t bsid = mHeaderBuffer[5] >> 3; // bitstream ID
+    // Check BSID to see if this is EAC3 or regular AC3.
+    // These arbitrary BSID numbers do not have any names in the spec.
+    if ((bsid > 10) && (bsid <= 16)) {
+        mDataType = SPDIF_DATA_TYPE_E_AC3;
+    } else if (bsid <= 8) {
+        mDataType = SPDIF_DATA_TYPE_AC3;
+    } else {
+        ALOGW("AC3 bsid = %d not supported", bsid);
+        return false;
+    }
+
+    // The names fscod, frmsiz are from the AC3 spec.
+    uint32_t fscod = mHeaderBuffer[4] >> 6;
+    if (mDataType == SPDIF_DATA_TYPE_E_AC3) {
+        mStreamType = mHeaderBuffer[2] >> 6; // strmtyp in spec
+        mSubstreamID = (mHeaderBuffer[2] >> 3) & 0x07;
+
+        // Frame size is explicit in EAC3. Paragraph E2.3.1.3
+        uint32_t frmsiz = ((mHeaderBuffer[2] & 0x07) << 8) + mHeaderBuffer[3];
+        mFrameSizeBytes = (frmsiz + 1) * sizeof(int16_t);
+
+        uint32_t numblkscod = 3; // 6 blocks default
+        if (fscod == 3) {
+            uint32_t fscod2 = (mHeaderBuffer[4] >> 4) & 0x03;
+            if (fscod2 >= AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES) {
+                ALOGW("Invalid EAC3 fscod2 = %d\n", fscod2);
+                return false;
+            } else {
+                mSampleRate = kEAC3ReducedSampleRateTable[fscod2];
+            }
+        } else {
+            mSampleRate = kAC3SampleRateTable[fscod];
+            numblkscod = (mHeaderBuffer[4] >> 4) & 0x03;
+        }
+        mRateMultiplier = EAC3_RATE_MULTIPLIER; // per IEC 61973-3 Paragraph 5.3.3
+        // Don't send data burst until we have 6 blocks per substream.
+        mAudioBlocksPerSyncFrame = kEAC3BlocksPerFrameTable[numblkscod];
+        // Keep track of how many audio blocks we have for each substream.
+        // This should be safe because mSubstreamID is ANDed with 0x07 above.
+        // And the array is allocated as [8].
+        if ((mStreamType == AC3_STREAM_TYPE_0)
+                || (mStreamType == AC3_STREAM_TYPE_2)) {
+            mSubstreamBlockCounts[mSubstreamID] += mAudioBlocksPerSyncFrame;
+        }
+
+        // Print enough so we can see all the substreams.
+        ALOGD_IF((mFormatDumpCount < 3*8 ),
+                "EAC3 mStreamType = %d, mSubstreamID = %d",
+                mStreamType, mSubstreamID);
+    } else { // regular AC3
+        // Extract sample rate and frame size from codes.
+        uint32_t frmsizcod = mHeaderBuffer[4] & 0x3F; // frame size code
+
+        if (fscod >= AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES) {
+            ALOGW("Invalid AC3 sampleRateCode = %d\n", fscod);
+            return false;
+        } else if (frmsizcod >= AC3_NUM_FRAME_SIZE_TABLE_ENTRIES) {
+            ALOGW("Invalid AC3 frameSizeCode = %d\n", frmsizcod);
+            return false;
+        } else {
+            mSampleRate = kAC3SampleRateTable[fscod];
+            mRateMultiplier = 1;
+            mFrameSizeBytes = sizeof(uint16_t)
+                    * kAC3FrameSizeTable[frmsizcod][fscod];
+        }
+        mAudioBlocksPerSyncFrame = 6;
+        if (mFormat == AUDIO_FORMAT_E_AC3) {
+            ALOGV("Its a Ac3 substream in EAC3 stream");
+            mStreamType = 2;
+            mSubstreamID = 0;
+            mSubstreamBlockCounts[0] += mAudioBlocksPerSyncFrame;
+            mDataType = SPDIF_DATA_TYPE_E_AC3;
+            mRateMultiplier = EAC3_RATE_MULTIPLIER;
+        }
+    }
+    ALOGI_IF((mFormatDumpCount == 0),
+            "AC3 frame rate = %d * %d, size = %zu, audioBlocksPerSyncFrame = %d\n",
+            mSampleRate, mRateMultiplier, mFrameSizeBytes, mAudioBlocksPerSyncFrame);
+    mFormatDumpCount++;
+    return true;
+}
+
+}  // namespace android
diff --git a/media/audio_utils/spdif/AC3FrameScanner.h b/media/audio_utils/spdif/AC3FrameScanner.h
new file mode 100644
index 0000000..9f3ea57
--- /dev/null
+++ b/media/audio_utils/spdif/AC3FrameScanner.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_AC3_FRAME_SCANNER_H
+#define ANDROID_AUDIO_AC3_FRAME_SCANNER_H
+
+#include <stdint.h>
+#include <hardware/audio.h>
+#include <audio_utils/spdif/FrameScanner.h>
+
+namespace android {
+
+#define AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES          3
+#define AC3_NUM_FRAME_SIZE_TABLE_ENTRIES          38
+#define AC3_PCM_FRAMES_PER_BLOCK                 256
+#define AC3_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK        6
+#define EAC3_RATE_MULTIPLIER                       4
+#define EAC3_NUM_SAMPLE_RATE_TABLE_ENTRIES         3
+#define EAC3_NUM_BLOCKS_PER_FRAME_TABLE_ENTRIES   38
+#define EAC3_MAX_SUBSTREAMS                        8
+
+class AC3FrameScanner : public FrameScanner
+{
+public:
+    explicit AC3FrameScanner(audio_format_t format);
+    virtual ~AC3FrameScanner();
+
+    virtual int getMaxChannels()   const { return 5 + 1; } // 5.1 surround
+
+    virtual int getMaxSampleFramesPerSyncFrame() const { return EAC3_RATE_MULTIPLIER
+            * AC3_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK * AC3_PCM_FRAMES_PER_BLOCK; }
+    virtual int getSampleFramesPerSyncFrame() const;
+
+    virtual bool isFirstInBurst();
+    virtual bool isLastInBurst();
+    virtual void resetBurst();
+
+    virtual uint16_t convertBytesToLengthCode(uint16_t numBytes) const;
+
+protected:
+    // Keep track of how many of each substream blocks have been accumulated.
+    // We need all of each substream before sending block data burst.
+    uint8_t   mSubstreamBlockCounts[EAC3_MAX_SUBSTREAMS];
+    int       mAudioBlocksPerSyncFrame;
+    // The type of EAC3 stream as per EAC3 spec paragraph 2.3.1.1
+    uint32_t  mStreamType;
+    // substream index
+    uint32_t  mSubstreamID;
+    audio_format_t mFormat;
+
+    // used to recognize the start of an AC3 sync frame
+    static const uint8_t  kSyncBytes[];
+    // sample rates from AC3 spec table 5.1
+    static const uint16_t kAC3SampleRateTable[AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES];
+    // frame sizes from AC3 spec table 5.13
+    static const uint16_t kAC3FrameSizeTable[AC3_NUM_FRAME_SIZE_TABLE_ENTRIES]
+            [AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES];
+    // sample rates from EAC3 spec table E2.3
+    static const uint16_t kEAC3ReducedSampleRateTable[AC3_NUM_SAMPLE_RATE_TABLE_ENTRIES];
+    // audio blocks per frame from EAC3 spec table E2.4
+    static const uint16_t kEAC3BlocksPerFrameTable[EAC3_NUM_BLOCKS_PER_FRAME_TABLE_ENTRIES];
+
+    virtual bool parseHeader();
+};
+
+}  // namespace android
+
+#endif  // ANDROID_AUDIO_AC3_FRAME_SCANNER_H
diff --git a/media/audio_utils/spdif/Android.mk b/media/audio_utils/spdif/Android.mk
new file mode 100644
index 0000000..39c2fa2
--- /dev/null
+++ b/media/audio_utils/spdif/Android.mk
@@ -0,0 +1,21 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libaudiospdif
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES:= \
+	BitFieldParser.cpp \
+	FrameScanner.cpp \
+	AC3FrameScanner.cpp \
+	DTSFrameScanner.cpp \
+	SPDIFEncoder.cpp
+
+LOCAL_C_INCLUDES += $(call include-path-for, audio-utils)
+
+LOCAL_SHARED_LIBRARIES := \
+	libcutils \
+	liblog
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/audio_utils/spdif/BitFieldParser.cpp b/media/audio_utils/spdif/BitFieldParser.cpp
new file mode 100644
index 0000000..8f1c11e
--- /dev/null
+++ b/media/audio_utils/spdif/BitFieldParser.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioSPDIF"
+//#define LOG_NDEBUG 0
+
+#include <string.h>
+#include <assert.h>
+
+#include <utils/Log.h>
+#include "BitFieldParser.h"
+
+namespace android {
+
+BitFieldParser::BitFieldParser(uint8_t *data)
+ : mData(data)
+ , mBitCursor(0)
+{
+}
+
+BitFieldParser::~BitFieldParser()
+{
+}
+
+uint32_t BitFieldParser::readBits(uint32_t numBits)
+{
+    ALOG_ASSERT(numBits <= 32);
+
+    // Extract some bits from the current byte.
+    uint32_t byteCursor = mBitCursor >> 3; // 8 bits per byte
+    uint8_t byte = mData[byteCursor];
+
+    uint32_t bitsLeftInByte = 8 - (mBitCursor & 7);
+    uint32_t bitsFromByte = (bitsLeftInByte < numBits) ? bitsLeftInByte : numBits;
+    uint32_t result = byte >> (bitsLeftInByte - bitsFromByte);
+    result &= (1 << bitsFromByte) - 1; // mask
+    mBitCursor += bitsFromByte;
+
+    uint32_t bitsRemaining = numBits - bitsFromByte;
+    if (bitsRemaining == 0) {
+        return result;
+    } else {
+        // Use recursion to get remaining bits.
+        return (result << bitsRemaining) | readBits(bitsRemaining);
+    }
+}
+
+}  // namespace android
diff --git a/media/audio_utils/spdif/BitFieldParser.h b/media/audio_utils/spdif/BitFieldParser.h
new file mode 100644
index 0000000..3f6fe59
--- /dev/null
+++ b/media/audio_utils/spdif/BitFieldParser.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_BIT_FIELD_PARSER_H
+#define ANDROID_AUDIO_BIT_FIELD_PARSER_H
+
+#include <stdint.h>
+
+namespace android {
+
+/**
+ * Extract bit fields from a byte array.
+ */
+class BitFieldParser {
+public:
+
+    explicit BitFieldParser(uint8_t *data);
+    virtual ~BitFieldParser();
+
+    /**
+     * Read numBits bits from the data array.
+     * Fields may span byte boundaries but may not exceed 32-bits.
+     * Note that the caller must ensure that there is suffcient data.
+     * Assume data is organized as BigEndian format.
+     */
+    uint32_t readBits(uint32_t numBits);
+
+    /*
+     * When the cursor is zero it points to a position right before
+     * the most significant bit.
+     * When the cursor is seven it points to a position right before
+     * the least significant bit.
+     */
+    uint32_t getBitCursor() const { return mBitCursor; }
+
+private:
+    uint8_t *mData;
+    uint32_t mBitCursor;
+};
+
+
+}  // namespace android
+
+#endif  // ANDROID_AUDIO_BIT_FIELD_PARSER_H
diff --git a/media/audio_utils/spdif/DTSFrameScanner.cpp b/media/audio_utils/spdif/DTSFrameScanner.cpp
new file mode 100644
index 0000000..71e2b0b
--- /dev/null
+++ b/media/audio_utils/spdif/DTSFrameScanner.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioSPDIF"
+//#define LOG_NDEBUG 0
+
+#include <assert.h>
+#include <string.h>
+
+#include <utils/Log.h>
+#include <audio_utils/spdif/FrameScanner.h>
+
+#include "BitFieldParser.h"
+#include "DTSFrameScanner.h"
+
+namespace android {
+
+// TODO Handle termination frames.
+// TODO assert if parse past end of header buffer
+// TODO Handle DTS_HD
+
+const uint8_t DTSFrameScanner::kSyncBytes[] =
+        { 0x7F, 0xFE, 0x80, 0x01 };
+
+const int32_t DTSFrameScanner::kDTSSampleRateTable[DTS_NUM_SAMPLE_RATE_TABLE_ENTRIES]
+        = { -1, 8000, 16000, 32000, -1, -1,
+        11025, 22050, 44100, -1, -1, 12000, 24000, 48000, -1, -1 };
+
+// Defined in IEC61937-2
+#define IEC61937_DATA_TYPE_DTS_I        11
+#define IEC61937_DATA_TYPE_DTS_II       12
+#define IEC61937_DATA_TYPE_DTS_III      13
+#define IEC61937_DATA_TYPE_DTS_IV       17
+
+#define IEC61937_MAX_SAMPLES_TYPE_I    512
+#define IEC61937_MAX_SAMPLES_TYPE_II  1024
+#define IEC61937_MAX_SAMPLES_TYPE_III 2048
+
+// Limits defined in DTS spec paragraph 5.3.1
+#define DTS_MINIMUM_NBLKS                5
+#define DTS_MINIMUM_FSIZE               95
+
+#define DTS_HEADER_BYTES_NEEDED         12
+
+// Scanner for DTS byte streams.
+DTSFrameScanner::DTSFrameScanner()
+ : FrameScanner(IEC61937_DATA_TYPE_DTS_I,
+    DTSFrameScanner::kSyncBytes,
+    sizeof(DTSFrameScanner::kSyncBytes),
+    DTS_HEADER_BYTES_NEEDED)
+ , mSampleFramesPerSyncFrame(0)
+{
+}
+
+DTSFrameScanner::~DTSFrameScanner()
+{
+}
+
+// Parse DTS header.
+// Detect whether the stream is DTS or DTS_HD. Extract data depending on type.
+// Sets mDataType, mFrameSizeBytes,
+//      mSampleRate, mRateMultiplier, mLengthCode.
+//
+// @return true if valid
+bool DTSFrameScanner::parseHeader()
+{
+    BitFieldParser parser(&mHeaderBuffer[mSyncLength]);
+
+    // These variables are named after the fields in the DTS spec 5.3.1
+    // Extract field in order.
+    uint32_t ftype = parser.readBits(1);
+    uint32_t deficit = parser.readBits(5); // "short"
+    uint32_t cpf = parser.readBits(1);
+    uint32_t nblks = parser.readBits(7);
+    uint32_t fsize = parser.readBits(14);
+    uint32_t amode = parser.readBits(6);
+    uint32_t sfreq = parser.readBits(4);
+    // make sure we did not read past collected data
+    ALOG_ASSERT((mSyncLength + ((parser.getBitCursor() + 7) >> 3))
+            <= mHeaderLength);
+
+    // Validate fields.
+    if (cpf != 0) {
+        ALOGE("DTSFrameScanner: ERROR - CPF not zero!");
+        return false;
+    }
+    if (nblks < DTS_MINIMUM_NBLKS) {
+        ALOGE("DTSFrameScanner: ERROR - nblks = %u", nblks);
+        return false;
+    }
+    if (fsize < DTS_MINIMUM_FSIZE) {
+        ALOGE("DTSFrameScanner: ERROR - fsize = %u", fsize);
+        return false;
+    }
+
+    int32_t sampleRate = kDTSSampleRateTable[sfreq];
+    if (sampleRate < 0) {
+        ALOGE("DTSFrameScanner: ERROR - invalid sampleRate[%u] = %d", sfreq, sampleRate);
+        return false;
+    }
+    mSampleRate = (uint32_t) sampleRate;
+
+    mSampleFramesPerSyncFrame = (nblks + 1) * DTS_PCM_FRAMES_PER_BLOCK;
+    if (mSampleFramesPerSyncFrame <= IEC61937_MAX_SAMPLES_TYPE_I) {
+        mDataType = IEC61937_DATA_TYPE_DTS_I;
+    } else if (mSampleFramesPerSyncFrame <= IEC61937_MAX_SAMPLES_TYPE_II) {
+        mDataType = IEC61937_DATA_TYPE_DTS_II;
+    } else if (mSampleFramesPerSyncFrame <= IEC61937_MAX_SAMPLES_TYPE_III) {
+        mDataType = IEC61937_DATA_TYPE_DTS_III;
+    } else {
+        mDataType = IEC61937_DATA_TYPE_DTS_IV;
+        // TODO set bits 8,10
+    }
+
+    mFrameSizeBytes = fsize + 1;
+
+    mRateMultiplier = 1; // TODO what about "frequency extension"?
+    ALOGI_IF((mFormatDumpCount == 0),
+            "DTS frame rate = %d * %d, size = %zu\n",
+            mSampleRate, mRateMultiplier, mFrameSizeBytes);
+    mFormatDumpCount++;
+    return true;
+}
+
+
+}  // namespace android
diff --git a/media/audio_utils/spdif/DTSFrameScanner.h b/media/audio_utils/spdif/DTSFrameScanner.h
new file mode 100644
index 0000000..883ded9
--- /dev/null
+++ b/media/audio_utils/spdif/DTSFrameScanner.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_DTS_FRAME_SCANNER_H
+#define ANDROID_AUDIO_DTS_FRAME_SCANNER_H
+
+#include <stdint.h>
+#include <audio_utils/spdif/FrameScanner.h>
+
+namespace android {
+
+#define DTS_NUM_SAMPLE_RATE_TABLE_ENTRIES      16
+#define DTS_PCM_FRAMES_PER_BLOCK               32
+#define DTS_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK   128
+
+class DTSFrameScanner : public FrameScanner
+{
+public:
+    DTSFrameScanner();
+    virtual ~DTSFrameScanner();
+
+    virtual int getMaxChannels()   const { return 5 + 1; }
+
+    virtual int getMaxSampleFramesPerSyncFrame() const {
+        return  DTS_MAX_BLOCKS_PER_SYNC_FRAME_BLOCK * DTS_PCM_FRAMES_PER_BLOCK;
+    }
+
+    virtual int getSampleFramesPerSyncFrame() const {
+        return mSampleFramesPerSyncFrame;
+    }
+
+    virtual bool isFirstInBurst() { return true; }
+    virtual bool isLastInBurst() { return true; }
+    virtual void resetBurst()  { }
+
+protected:
+
+    int mSampleFramesPerSyncFrame;
+
+    virtual bool parseHeader();
+
+    static const uint8_t kSyncBytes[];
+    static const int32_t kDTSSampleRateTable[];
+
+};
+
+}  // namespace android
+#endif  // ANDROID_AUDIO_DTS_FRAME_SCANNER_H
diff --git a/media/audio_utils/spdif/FrameScanner.cpp b/media/audio_utils/spdif/FrameScanner.cpp
new file mode 100644
index 0000000..80c1d94
--- /dev/null
+++ b/media/audio_utils/spdif/FrameScanner.cpp
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioSPDIF"
+
+#include <string.h>
+#include <assert.h>
+
+#include <utils/Log.h>
+#include <audio_utils/spdif/FrameScanner.h>
+
+namespace android {
+
+FrameScanner::FrameScanner(int dataType,
+            const uint8_t *syncBytes,
+            uint32_t syncLength,
+            uint32_t headerLength)
+ : mBytesSkipped(0)
+ , mSyncBytes(syncBytes)
+ , mSyncLength(syncLength)
+ , mHeaderLength(headerLength)
+ , mCursor(0)
+ , mFormatDumpCount(0)
+ , mSampleRate(0)
+ , mRateMultiplier(1)
+ , mFrameSizeBytes(0)
+ , mDataType(dataType)
+ , mDataTypeInfo(0)
+{
+}
+
+FrameScanner::~FrameScanner()
+{
+}
+
+// State machine that scans for headers in a byte stream.
+// @return true if we have detected a complete and valid header.
+bool FrameScanner::scan(uint8_t byte)
+{
+    bool result = false;
+    ALOGV("FrameScanner: byte = 0x%02X, mCursor = %d\n", byte, mCursor);
+    assert(mCursor < sizeof(mHeaderBuffer));
+    if (mCursor < mSyncLength) {
+        // match sync word
+        if (byte == mSyncBytes[mCursor]) {
+            mHeaderBuffer[mCursor++] = byte;
+        } else {
+            mBytesSkipped += 1; // skip unsynchronized data
+            mCursor = 0;
+        }
+    } else if (mCursor < mHeaderLength) {
+        // gather header for parsing
+        mHeaderBuffer[mCursor++] = byte;
+        if (mCursor >= mHeaderLength) {
+            if (parseHeader()) {
+                result = true;
+            } else {
+                ALOGE("FrameScanner: ERROR - parseHeader() failed.");
+            }
+            mCursor = 0;
+        }
+    }
+    return result;
+}
+
+}  // namespace android
diff --git a/media/audio_utils/spdif/SPDIFEncoder.cpp b/media/audio_utils/spdif/SPDIFEncoder.cpp
new file mode 100644
index 0000000..5a70b2c
--- /dev/null
+++ b/media/audio_utils/spdif/SPDIFEncoder.cpp
@@ -0,0 +1,275 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+#include <string.h>
+
+#define LOG_TAG "AudioSPDIF"
+#include <utils/Log.h>
+#include <audio_utils/spdif/SPDIFEncoder.h>
+
+#include "AC3FrameScanner.h"
+#include "DTSFrameScanner.h"
+
+namespace android {
+
+// Burst Preamble defined in IEC61937-1
+const unsigned short SPDIFEncoder::kSPDIFSync1 = 0xF872; // Pa
+const unsigned short SPDIFEncoder::kSPDIFSync2 = 0x4E1F; // Pb
+
+static int32_t sEndianDetector = 1;
+#define isLittleEndian()  (*((uint8_t *)&sEndianDetector))
+
+SPDIFEncoder::SPDIFEncoder(audio_format_t format)
+  : mFramer(NULL)
+  , mSampleRate(48000)
+  , mBurstBuffer(NULL)
+  , mBurstBufferSizeBytes(0)
+  , mRateMultiplier(1)
+  , mBurstFrames(0)
+  , mByteCursor(0)
+  , mBitstreamNumber(0)
+  , mPayloadBytesPending(0)
+  , mScanning(true)
+{
+    switch(format) {
+        case AUDIO_FORMAT_AC3:
+        case AUDIO_FORMAT_E_AC3:
+            mFramer = new AC3FrameScanner(format);
+            break;
+        case AUDIO_FORMAT_DTS:
+        case AUDIO_FORMAT_DTS_HD:
+            mFramer = new DTSFrameScanner();
+            break;
+        default:
+            break;
+    }
+
+    // This a programmer error. Call isFormatSupported() first.
+    LOG_ALWAYS_FATAL_IF((mFramer == NULL),
+        "SPDIFEncoder: invalid audio format = 0x%08X", format);
+
+    mBurstBufferSizeBytes = sizeof(uint16_t)
+            * SPDIF_ENCODED_CHANNEL_COUNT
+            * mFramer->getMaxSampleFramesPerSyncFrame();
+
+    ALOGI("SPDIFEncoder: mBurstBufferSizeBytes = %zu, littleEndian = %d",
+            mBurstBufferSizeBytes, isLittleEndian());
+    mBurstBuffer = new uint16_t[mBurstBufferSizeBytes >> 1];
+    clearBurstBuffer();
+}
+
+SPDIFEncoder::SPDIFEncoder()
+    : SPDIFEncoder(AUDIO_FORMAT_AC3)
+{
+}
+
+SPDIFEncoder::~SPDIFEncoder()
+{
+    delete[] mBurstBuffer;
+    delete mFramer;
+}
+
+bool SPDIFEncoder::isFormatSupported(audio_format_t format)
+{
+    switch(format) {
+        case AUDIO_FORMAT_AC3:
+        case AUDIO_FORMAT_E_AC3:
+        case AUDIO_FORMAT_DTS:
+        case AUDIO_FORMAT_DTS_HD:
+            return true;
+        default:
+            return false;
+    }
+}
+
+int SPDIFEncoder::getBytesPerOutputFrame()
+{
+    return SPDIF_ENCODED_CHANNEL_COUNT * sizeof(int16_t);
+}
+
+void SPDIFEncoder::writeBurstBufferShorts(const uint16_t *buffer, size_t numShorts)
+{
+    // avoid static analyser warning
+    LOG_ALWAYS_FATAL_IF((mBurstBuffer == NULL), "mBurstBuffer never allocated");
+    mByteCursor = (mByteCursor + 1) & ~1; // round up to even byte
+    size_t bytesToWrite = numShorts * sizeof(uint16_t);
+    if ((mByteCursor + bytesToWrite) > mBurstBufferSizeBytes) {
+        ALOGE("SPDIFEncoder: Burst buffer overflow!\n");
+        reset();
+        return;
+    }
+    memcpy(&mBurstBuffer[mByteCursor >> 1], buffer, bytesToWrite);
+    mByteCursor += bytesToWrite;
+}
+
+// Pack the bytes into the short buffer in the order:
+//   byte[0] -> short[0] MSB
+//   byte[1] -> short[0] LSB
+//   byte[2] -> short[1] MSB
+//   byte[3] -> short[1] LSB
+//   etcetera
+// This way they should come out in the correct order for SPDIF on both
+// Big and Little Endian CPUs.
+void SPDIFEncoder::writeBurstBufferBytes(const uint8_t *buffer, size_t numBytes)
+{
+    size_t bytesToWrite = numBytes;
+    if ((mByteCursor + bytesToWrite) > mBurstBufferSizeBytes) {
+        ALOGE("SPDIFEncoder: Burst buffer overflow!\n");
+        clearBurstBuffer();
+        return;
+    }
+    uint16_t pad = mBurstBuffer[mByteCursor >> 1];
+    for (size_t i = 0; i < bytesToWrite; i++) {
+        if (mByteCursor & 1 ) {
+            pad |= *buffer++; // put second byte in LSB
+            mBurstBuffer[mByteCursor >> 1] = pad;
+            pad = 0;
+        } else {
+            pad |= (*buffer++) << 8; // put first byte in MSB
+        }
+        mByteCursor++;
+    }
+    // Save partially filled short.
+    if (mByteCursor & 1 ){
+        mBurstBuffer[mByteCursor >> 1] = pad;
+    }
+}
+
+void SPDIFEncoder::sendZeroPad()
+{
+    // Pad remainder of burst with zeros.
+    size_t burstSize = mFramer->getSampleFramesPerSyncFrame() * sizeof(uint16_t)
+            * SPDIF_ENCODED_CHANNEL_COUNT;
+    if (mByteCursor > burstSize) {
+        ALOGE("SPDIFEncoder: Burst buffer, contents too large!");
+        clearBurstBuffer();
+    } else {
+        // We don't have to write zeros because buffer already set to zero
+        // by clearBurstBuffer(). Just pretend we wrote zeros by
+        // incrementing cursor.
+        mByteCursor = burstSize;
+    }
+}
+
+void SPDIFEncoder::reset()
+{
+    ALOGV("SPDIFEncoder: reset()");
+    clearBurstBuffer();
+    if (mFramer != NULL) {
+        mFramer->resetBurst();
+    }
+    mPayloadBytesPending = 0;
+    mScanning = true;
+}
+
+void SPDIFEncoder::flushBurstBuffer()
+{
+    const int preambleSize = 4 * sizeof(uint16_t);
+    if (mByteCursor > preambleSize) {
+        // Set lengthCode for valid payload before zeroPad.
+        uint16_t numBytes = (mByteCursor - preambleSize);
+        mBurstBuffer[3] = mFramer->convertBytesToLengthCode(numBytes);
+
+        sendZeroPad();
+        writeOutput(mBurstBuffer, mByteCursor);
+    }
+    reset();
+}
+
+void SPDIFEncoder::clearBurstBuffer()
+{
+    if (mBurstBuffer) {
+        memset(mBurstBuffer, 0, mBurstBufferSizeBytes);
+    }
+    mByteCursor = 0;
+}
+
+void SPDIFEncoder::startDataBurst()
+{
+    // Encode IEC61937-1 Burst Preamble
+    uint16_t preamble[4];
+
+    uint16_t burstInfo = (mBitstreamNumber << 13)
+        | (mFramer->getDataTypeInfo() << 8)
+        | mFramer->getDataType();
+
+    mRateMultiplier = mFramer->getRateMultiplier();
+
+    preamble[0] = kSPDIFSync1;
+    preamble[1] = kSPDIFSync2;
+    preamble[2] = burstInfo;
+    preamble[3] = 0; // lengthCode - This will get set after the buffer is full.
+    writeBurstBufferShorts(preamble, 4);
+}
+
+size_t SPDIFEncoder::startSyncFrame()
+{
+    // Write start of encoded frame that was buffered in frame detector.
+    size_t syncSize = mFramer->getHeaderSizeBytes();
+    writeBurstBufferBytes(mFramer->getHeaderAddress(), syncSize);
+    return mFramer->getFrameSizeBytes() - syncSize;
+}
+
+// Wraps raw encoded data into a data burst.
+ssize_t SPDIFEncoder::write( const void *buffer, size_t numBytes )
+{
+    size_t bytesLeft = numBytes;
+    const uint8_t *data = (const uint8_t *)buffer;
+    ALOGV("SPDIFEncoder: mScanning = %d, write(buffer[0] = 0x%02X, numBytes = %zu)",
+        mScanning, (uint) *data, numBytes);
+    while (bytesLeft > 0) {
+        if (mScanning) {
+        // Look for beginning of next encoded frame.
+            if (mFramer->scan(*data)) {
+                if (mByteCursor == 0) {
+                    startDataBurst();
+                } else if (mFramer->isFirstInBurst()) {
+                    // Make sure that this frame is at the beginning of the data burst.
+                    flushBurstBuffer();
+                    startDataBurst();
+                }
+                mPayloadBytesPending = startSyncFrame();
+                mScanning = false;
+            }
+            data++;
+            bytesLeft--;
+        } else {
+            // Write payload until we hit end of frame.
+            size_t bytesToWrite = bytesLeft;
+            // Only write as many as we need to finish the frame.
+            if (bytesToWrite > mPayloadBytesPending) {
+                bytesToWrite = mPayloadBytesPending;
+            }
+            writeBurstBufferBytes(data, bytesToWrite);
+
+            data += bytesToWrite;
+            bytesLeft -= bytesToWrite;
+            mPayloadBytesPending -= bytesToWrite;
+
+            // If we have all the payload then send a data burst.
+            if (mPayloadBytesPending == 0) {
+                if (mFramer->isLastInBurst()) {
+                    flushBurstBuffer();
+                }
+                mScanning = true;
+            }
+        }
+    }
+    return numBytes;
+}
+
+}  // namespace android
diff --git a/media/audio_utils/tests/Android.mk b/media/audio_utils/tests/Android.mk
new file mode 100644
index 0000000..04e75b5
--- /dev/null
+++ b/media/audio_utils/tests/Android.mk
@@ -0,0 +1,37 @@
+# Build the unit tests for audio_utils
+
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SHARED_LIBRARIES := \
+	liblog \
+	libcutils \
+	libaudioutils
+
+LOCAL_C_INCLUDES := \
+	$(call include-path-for, audio-utils)
+
+LOCAL_SRC_FILES := \
+	primitives_tests.cpp
+
+LOCAL_MODULE := primitives_tests
+LOCAL_MODULE_TAGS := tests
+
+include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES := fifo_tests.cpp
+LOCAL_MODULE := fifo_tests
+LOCAL_C_INCLUDES := $(call include-path-for, audio-utils)
+LOCAL_SHARED_LIBRARIES := libaudioutils
+# libmedia libbinder libcutils libutils
+LOCAL_STATIC_LIBRARIES := libsndfile
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES := fifo_tests.cpp
+LOCAL_MODULE := fifo_tests
+LOCAL_C_INCLUDES := $(call include-path-for, audio-utils)
+# libmedia libbinder libcutils libutils
+LOCAL_STATIC_LIBRARIES := libsndfile libaudioutils liblog
+include $(BUILD_HOST_EXECUTABLE)
diff --git a/media/audio_utils/tests/build_and_run_all_unit_tests.sh b/media/audio_utils/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..3656974
--- /dev/null
+++ b/media/audio_utils/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Run tests in this directory.
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+    echo "Android build environment not set"
+    exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+echo "========================================"
+echo "testing primitives"
+adb push $OUT/system/lib/libaudioutils.so /system/lib
+adb push $OUT/data/nativetest/primitives_tests /system/bin
+adb shell /system/bin/primitives_tests
diff --git a/media/audio_utils/tests/fifo_tests.cpp b/media/audio_utils/tests/fifo_tests.cpp
new file mode 100644
index 0000000..1fea244
--- /dev/null
+++ b/media/audio_utils/tests/fifo_tests.cpp
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Test program for audio_utils FIFO library.
+// This only tests the single-threaded aspects, not the barriers.
+
+#include <limits.h>
+#include <stdlib.h>
+#include <string.h>
+#include <audio_utils/fifo.h>
+#include <audio_utils/sndfile.h>
+
+int main(int argc, char **argv)
+{
+    size_t frameCount = 256;
+    size_t maxFramesPerRead = 1;
+    size_t maxFramesPerWrite = 1;
+    int i;
+    for (i = 1; i < argc; i++) {
+        char *arg = argv[i];
+        if (arg[0] != '-')
+            break;
+        switch (arg[1]) {
+        case 'c':   // FIFO frame count
+            frameCount = atoi(&arg[2]);
+            break;
+        case 'r':   // maximum frame count per read from FIFO
+            maxFramesPerRead = atoi(&arg[2]);
+            break;
+        case 'w':   // maximum frame count per write to FIFO
+            maxFramesPerWrite = atoi(&arg[2]);
+            break;
+        default:
+            fprintf(stderr, "%s: unknown option %s\n", argv[0], arg);
+            goto usage;
+        }
+    }
+
+    if (argc - i != 2) {
+usage:
+        fprintf(stderr, "usage: %s [-c#] in.wav out.wav\n", argv[0]);
+        return EXIT_FAILURE;
+    }
+    char *inputFile = argv[i];
+    char *outputFile = argv[i+1];
+
+    SF_INFO sfinfoin;
+    memset(&sfinfoin, 0, sizeof(sfinfoin));
+    SNDFILE *sfin = sf_open(inputFile, SFM_READ, &sfinfoin);
+    if (sfin == NULL) {
+        perror(inputFile);
+        return EXIT_FAILURE;
+    }
+    // sf_readf_short() does conversion, so not strictly necessary to check the file format.
+    // But I want to do "cmp" on input and output files afterwards,
+    // and it is easier if they are all the same format.
+    // Enforcing that everything is 16-bit is convenient for this.
+    if ((sfinfoin.format & (SF_FORMAT_TYPEMASK | SF_FORMAT_SUBMASK)) !=
+            (SF_FORMAT_WAV | SF_FORMAT_PCM_16)) {
+        fprintf(stderr, "%s: unsupported format\n", inputFile);
+        sf_close(sfin);
+        return EXIT_FAILURE;
+    }
+    size_t frameSize = sizeof(short) * sfinfoin.channels;
+    short *inputBuffer = new short[sfinfoin.frames * sfinfoin.channels];
+    sf_count_t actualRead = sf_readf_short(sfin, inputBuffer, sfinfoin.frames);
+    if (actualRead != sfinfoin.frames) {
+        fprintf(stderr, "%s: unexpected EOF or error\n", inputFile);
+        sf_close(sfin);
+        return EXIT_FAILURE;
+    }
+    sf_close(sfin);
+
+    short *outputBuffer = new short[sfinfoin.frames * sfinfoin.channels];
+    size_t framesWritten = 0;
+    size_t framesRead = 0;
+    struct audio_utils_fifo fifo;
+    short *fifoBuffer = new short[frameCount * sfinfoin.channels];
+    audio_utils_fifo_init(&fifo, frameCount, frameSize, fifoBuffer);
+    int fifoWriteCount = 0, fifoReadCount = 0;
+    int fifoFillLevel = 0, minFillLevel = INT_MAX, maxFillLevel = INT_MIN;
+    for (;;) {
+        size_t framesToWrite = sfinfoin.frames - framesWritten;
+        size_t framesToRead = sfinfoin.frames - framesRead;
+        if (framesToWrite == 0 && framesToRead == 0) {
+            break;
+        }
+
+        if (framesToWrite > maxFramesPerWrite) {
+            framesToWrite = maxFramesPerWrite;
+        }
+        framesToWrite = rand() % (framesToWrite + 1);
+        ssize_t actualWritten = audio_utils_fifo_write(&fifo,
+                &inputBuffer[framesWritten * sfinfoin.channels], framesToWrite);
+        if (actualWritten < 0 || (size_t) actualWritten > framesToWrite) {
+            fprintf(stderr, "write to FIFO failed\n");
+            break;
+        }
+        framesWritten += actualWritten;
+        if (actualWritten > 0) {
+            fifoWriteCount++;
+        }
+        fifoFillLevel += actualWritten;
+        if (fifoFillLevel > maxFillLevel) {
+            maxFillLevel = fifoFillLevel;
+            if (maxFillLevel > (int) frameCount)
+                abort();
+        }
+
+        if (framesToRead > maxFramesPerRead) {
+            framesToRead = maxFramesPerRead;
+        }
+        framesToRead = rand() % (framesToRead + 1);
+        ssize_t actualRead = audio_utils_fifo_read(&fifo,
+                &outputBuffer[framesRead * sfinfoin.channels], framesToRead);
+        if (actualRead < 0 || (size_t) actualRead > framesToRead) {
+            fprintf(stderr, "read from FIFO failed\n");
+            break;
+        }
+        framesRead += actualRead;
+        if (actualRead > 0) {
+            fifoReadCount++;
+        }
+        fifoFillLevel -= actualRead;
+        if (fifoFillLevel < minFillLevel) {
+            minFillLevel = fifoFillLevel;
+            if (minFillLevel < 0)
+                abort();
+        }
+    }
+    printf("FIFO non-empty writes: %d, non-empty reads: %d\n", fifoWriteCount, fifoReadCount);
+    printf("fill=%d, min=%d, max=%d\n", fifoFillLevel, minFillLevel, maxFillLevel);
+    audio_utils_fifo_deinit(&fifo);
+    delete[] fifoBuffer;
+
+    SF_INFO sfinfoout;
+    memset(&sfinfoout, 0, sizeof(sfinfoout));
+    sfinfoout.samplerate = sfinfoin.samplerate;
+    sfinfoout.channels = sfinfoin.channels;
+    sfinfoout.format = sfinfoin.format;
+    SNDFILE *sfout = sf_open(outputFile, SFM_WRITE, &sfinfoout);
+    if (sfout == NULL) {
+        perror(outputFile);
+        return EXIT_FAILURE;
+    }
+    sf_count_t actualWritten = sf_writef_short(sfout, outputBuffer, framesRead);
+    delete[] inputBuffer;
+    delete[] outputBuffer;
+    if (actualWritten != (sf_count_t) framesRead) {
+        fprintf(stderr, "%s: unexpected error\n", outputFile);
+        sf_close(sfout);
+        return EXIT_FAILURE;
+    }
+    sf_close(sfout);
+    return EXIT_SUCCESS;
+}
diff --git a/media/audio_utils/tests/primitives_tests.cpp b/media/audio_utils/tests/primitives_tests.cpp
new file mode 100644
index 0000000..5b3cd2d
--- /dev/null
+++ b/media/audio_utils/tests/primitives_tests.cpp
@@ -0,0 +1,656 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_primitives_tests"
+
+#include <math.h>
+#include <vector>
+#include <cutils/log.h>
+#include <gtest/gtest.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/format.h>
+#include <audio_utils/channels.h>
+
+#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
+
+static const int32_t lim8pos = 255;
+static const int32_t lim8neg = 0;
+static const int32_t lim16pos = (1 << 15) - 1;
+static const int32_t lim16neg = -(1 << 15);
+static const int32_t lim24pos = (1 << 23) - 1;
+static const int32_t lim24neg = -(1 << 23);
+
+inline void testClamp8(float f)
+{
+    // f is in native u8 scaling to test rounding
+    uint8_t uval = clamp8_from_float((f - 128) / (1 << 7));
+
+    // test clamping
+    ALOGV("clamp8_from_float(%f) = %u\n", f, uval);
+    if (f > lim8pos) {
+        EXPECT_EQ(lim8pos, uval);
+    } else if (f < lim8neg) {
+        EXPECT_EQ(lim8neg, uval);
+    }
+
+    // if in range, make sure round trip clamp and conversion is correct.
+    if (f < lim8pos - 1. && f > lim8neg + 1.) {
+        uint8_t uval2 = clamp8_from_float(float_from_u8(uval));
+        int diff = abs(uval - uval2);
+        EXPECT_LE(diff, 1);
+    }
+}
+
+inline void testClamp16(float f)
+{
+    int16_t ival = clamp16_from_float(f / (1 << 15));
+
+    // test clamping
+    ALOGV("clamp16_from_float(%f) = %d\n", f, ival);
+    if (f > lim16pos) {
+        EXPECT_EQ(lim16pos, ival);
+    } else if (f < lim16neg) {
+        EXPECT_EQ(lim16neg, ival);
+    }
+
+    // if in range, make sure round trip clamp and conversion is correct.
+    if (f < lim16pos - 1. && f > lim16neg + 1.) {
+        int ival2 = clamp16_from_float(float_from_i16(ival));
+        int diff = abs(ival - ival2);
+        EXPECT_LE(diff, 1);
+    }
+}
+
+inline void testClamp24(float f)
+{
+    int32_t ival = clamp24_from_float(f / (1 << 23));
+
+    // test clamping
+    ALOGV("clamp24_from_float(%f) = %d\n", f, ival);
+    if (f > lim24pos) {
+        EXPECT_EQ(lim24pos, ival);
+    } else if (f < lim24neg) {
+        EXPECT_EQ(lim24neg, ival);
+    }
+
+    // if in range, make sure round trip clamp and conversion is correct.
+    if (f < lim24pos - 1. && f > lim24neg + 1.) {
+        int ival2 = clamp24_from_float(float_from_q8_23(ival));
+        int diff = abs(ival - ival2);
+        EXPECT_LE(diff, 1);
+    }
+}
+
+template<typename T>
+void checkMonotone(const T *ary, size_t size)
+{
+    for (size_t i = 1; i < size; ++i) {
+        EXPECT_LT(ary[i-1], ary[i]);
+    }
+}
+
+TEST(audio_utils_primitives, clamp_to_int) {
+    static const float testArray[] = {
+            -NAN, -INFINITY,
+            -1.e20, -32768., 63.9,
+            -3.5, -3.4, -2.5, 2.4, -1.5, -1.4, -0.5, -0.2, 0., 0.2, 0.5, 0.8,
+            1.4, 1.5, 1.8, 2.4, 2.5, 2.6, 3.4, 3.5,
+            32767., 32768., 1.e20,
+            INFINITY, NAN };
+
+    for (size_t i = 0; i < ARRAY_SIZE(testArray); ++i) {
+        testClamp8(testArray[i]);
+    }
+    for (size_t i = 0; i < ARRAY_SIZE(testArray); ++i) {
+        testClamp16(testArray[i]);
+    }
+    for (size_t i = 0; i < ARRAY_SIZE(testArray); ++i) {
+        testClamp24(testArray[i]);
+    }
+
+    // used for ULP testing (tweaking the lsb of the float)
+    union {
+        int32_t i;
+        float f;
+    } val;
+    int32_t res;
+
+    // check clampq4_27_from_float()
+    val.f = 16.;
+    res = clampq4_27_from_float(val.f);
+    EXPECT_EQ(0x7fffffff, res);
+    val.i--;
+    res = clampq4_27_from_float(val.f);
+    EXPECT_LE(res, 0x7fffffff);
+    EXPECT_GE(res, 0x7fff0000);
+    val.f = -16.;
+    res = clampq4_27_from_float(val.f);
+    EXPECT_EQ((int32_t)0x80000000, res); // negative
+    val.i++;
+    res = clampq4_27_from_float(val.f);
+    EXPECT_GE(res, (int32_t)0x80000000); // negative
+    EXPECT_LE(res, (int32_t)0x80008000); // negative
+
+    // check u4_28_from_float and u4_12_from_float
+    uint32_t ures;
+    uint16_t ures16;
+    val.f = 16.;
+    ures = u4_28_from_float(val.f);
+    EXPECT_EQ(0xffffffff, ures);
+    ures16 = u4_12_from_float(val.f);
+    EXPECT_EQ(0xffff, ures16);
+
+    val.f = -1.;
+    ures = u4_28_from_float(val.f);
+    EXPECT_EQ((uint32_t)0, ures);
+    ures16 = u4_12_from_float(val.f);
+    EXPECT_EQ(0, ures16);
+
+    // check float_from_u4_28 and float_from_u4_12 (roundtrip)
+    for (uint32_t v = 0x100000; v <= 0xff000000; v += 0x100000) {
+        ures = u4_28_from_float(float_from_u4_28(v));
+        EXPECT_EQ(ures, v);
+    }
+    for (uint32_t v = 0; v <= 0xffff; ++v) { // uint32_t prevents overflow
+        ures16 = u4_12_from_float(float_from_u4_12(v));
+        EXPECT_EQ(ures16, v);
+    }
+
+    // check infinity
+    EXPECT_EQ(0, clamp8_from_float(-INFINITY));
+    EXPECT_EQ(255, clamp8_from_float(INFINITY));
+}
+
+TEST(audio_utils_primitives, memcpy) {
+    // test round-trip.
+    int16_t *i16ref = new int16_t[65536];
+    int16_t *i16ary = new int16_t[65536];
+    int32_t *i32ary = new int32_t[65536];
+    float *fary = new float[65536];
+    uint8_t *pary = new uint8_t[65536*3];
+
+    for (size_t i = 0; i < 65536; ++i) {
+        i16ref[i] = i16ary[i] = i - 32768;
+    }
+
+    // do round-trip testing i16 and float
+    memcpy_to_float_from_i16(fary, i16ary, 65536);
+    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
+    checkMonotone(fary, 65536);
+
+    memcpy_to_i16_from_float(i16ary, fary, 65536);
+    memset(fary, 0, 65536 * sizeof(fary[0]));
+    checkMonotone(i16ary, 65536);
+
+    // TODO make a template case for the following?
+
+    // do round-trip testing p24 to i16 and float
+    memcpy_to_p24_from_i16(pary, i16ary, 65536);
+    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
+
+    // check an intermediate format at a position(???)
+#if 0
+    printf("pary[%d].0 = %u  pary[%d].1 = %u  pary[%d].2 = %u\n",
+            1025, (unsigned) pary[1025*3],
+            1025, (unsigned) pary[1025*3+1],
+            1025, (unsigned) pary[1025*3+2]
+            );
+#endif
+
+    memcpy_to_float_from_p24(fary, pary, 65536);
+    memset(pary, 0, 65536 * 3 * sizeof(pary[0]));
+    checkMonotone(fary, 65536);
+
+    memcpy_to_p24_from_float(pary, fary, 65536);
+    memset(fary, 0, 65536 * sizeof(fary[0]));
+
+    memcpy_to_i16_from_p24(i16ary, pary, 65536);
+    memset(pary, 0, 65536 * 3 * sizeof(pary[0]));
+    checkMonotone(i16ary, 65536);
+
+    // do round-trip testing q8_23 to i16 and float
+    memcpy_to_q8_23_from_i16(i32ary, i16ary, 65536);
+    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
+    checkMonotone(i32ary, 65536);
+
+    memcpy_to_float_from_q8_23(fary, i32ary, 65536);
+    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
+    checkMonotone(fary, 65536);
+
+    memcpy_to_q8_23_from_float_with_clamp(i32ary, fary, 65536);
+    memset(fary, 0, 65536 * sizeof(fary[0]));
+    checkMonotone(i32ary, 65536);
+
+    memcpy_to_i16_from_q8_23(i16ary, i32ary, 65536);
+    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
+    checkMonotone(i16ary, 65536);
+
+    // do round-trip testing i32 to i16 and float
+    memcpy_to_i32_from_i16(i32ary, i16ary, 65536);
+    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
+    checkMonotone(i32ary, 65536);
+
+    memcpy_to_float_from_i32(fary, i32ary, 65536);
+    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
+    checkMonotone(fary, 65536);
+
+    memcpy_to_i32_from_float(i32ary, fary, 65536);
+    memset(fary, 0, 65536 * sizeof(fary[0]));
+    checkMonotone(i32ary, 65536);
+
+    memcpy_to_i16_from_i32(i16ary, i32ary, 65536);
+    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
+    checkMonotone(i16ary, 65536);
+
+    // do partial round-trip testing q4_27 to i16 and float
+    memcpy_to_float_from_i16(fary, i16ary, 65536);
+    //memset(i16ary, 0, 65536 * sizeof(i16ary[0])); // not cleared: we don't do full roundtrip
+
+    memcpy_to_q4_27_from_float(i32ary, fary, 65536);
+    memset(fary, 0, 65536 * sizeof(fary[0]));
+    checkMonotone(i32ary, 65536);
+
+    memcpy_to_float_from_q4_27(fary, i32ary, 65536);
+    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
+    checkMonotone(fary, 65536);
+
+    // at the end, our i16ary must be the same. (Monotone should be equivalent to this)
+    EXPECT_EQ(0, memcmp(i16ary, i16ref, 65536*sizeof(i16ary[0])));
+
+    // test round-trip for u8 and float.
+    uint8_t *u8ref = new uint8_t[256];
+    uint8_t *u8ary = new uint8_t[256];
+
+    for (unsigned i = 0; i < 256; ++i) {
+        u8ref[i] = i;
+    }
+
+    memcpy_to_float_from_u8(fary, u8ref, 256);
+    memcpy_to_u8_from_float(u8ary, fary, 256);
+
+    EXPECT_EQ(0, memcmp(u8ary, u8ref, 256 * sizeof(u8ary[0])));
+
+    delete[] u8ref;
+    delete[] u8ary;
+    delete[] i16ref;
+    delete[] i16ary;
+    delete[] i32ary;
+    delete[] fary;
+    delete[] pary;
+}
+
+template<typename T>
+void checkMonotoneOrZero(const T *ary, size_t size)
+{
+    T least = 0;
+
+    for (size_t i = 1; i < size; ++i) {
+        if (ary[i]) {
+            EXPECT_LT(least, ary[i]);
+            least = ary[i];
+        }
+    }
+}
+
+TEST(audio_utils_primitives, memcpy_by_channel_mask) {
+    uint32_t dst_mask;
+    uint32_t src_mask;
+    uint16_t *u16ref = new uint16_t[65536];
+    uint16_t *u16ary = new uint16_t[65536];
+
+    for (size_t i = 0; i < 65536; ++i) {
+        u16ref[i] = i;
+    }
+
+    // Test when src mask is 0.  Everything copied is zero.
+    src_mask = 0;
+    dst_mask = 0x8d;
+    memset(u16ary, 0x99, 65536 * sizeof(u16ref[0]));
+    memcpy_by_channel_mask(u16ary, dst_mask, u16ref, src_mask, sizeof(u16ref[0]),
+            65536 / popcount(dst_mask));
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when dst_mask is 0.  Nothing should be copied.
+    src_mask = 0;
+    dst_mask = 0;
+    memset(u16ary, 0, 65536 * sizeof(u16ref[0]));
+    memcpy_by_channel_mask(u16ary, dst_mask, u16ref, src_mask, sizeof(u16ref[0]),
+            65536);
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when masks are the same.  One to one copy.
+    src_mask = dst_mask = 0x8d;
+    memset(u16ary, 0x99, 65536 * sizeof(u16ref[0]));
+    memcpy_by_channel_mask(u16ary, dst_mask, u16ref, src_mask, sizeof(u16ref[0]), 555);
+    EXPECT_EQ(0, memcmp(u16ary, u16ref, 555 * sizeof(u16ref[0]) * popcount(dst_mask)));
+
+    // Test with a gap in source:
+    // Input 3 samples, output 4 samples, one zero inserted.
+    src_mask = 0x8c;
+    dst_mask = 0x8d;
+    memset(u16ary, 0x9, 65536 * sizeof(u16ary[0]));
+    memcpy_by_channel_mask(u16ary, dst_mask, u16ref, src_mask, sizeof(u16ref[0]),
+            65536 / popcount(dst_mask));
+    checkMonotoneOrZero(u16ary, 65536);
+    EXPECT_EQ((size_t)(65536 * 3 / 4 - 1), nonZeroMono16((int16_t*)u16ary, 65536));
+
+    // Test with a gap in destination:
+    // Input 4 samples, output 3 samples, one deleted
+    src_mask = 0x8d;
+    dst_mask = 0x8c;
+    memset(u16ary, 0x9, 65536 * sizeof(u16ary[0]));
+    memcpy_by_channel_mask(u16ary, dst_mask, u16ref, src_mask, sizeof(u16ref[0]),
+            65536 / popcount(src_mask));
+    checkMonotone(u16ary, 65536 * 3 / 4);
+
+    delete[] u16ref;
+    delete[] u16ary;
+}
+
+void memcpy_by_channel_mask2(void *dst, uint32_t dst_mask,
+        const void *src, uint32_t src_mask, size_t sample_size, size_t count)
+{
+    int8_t idxary[32];
+    uint32_t src_channels = popcount(src_mask);
+    uint32_t dst_channels =
+            memcpy_by_index_array_initialization(idxary, 32, dst_mask, src_mask);
+
+    memcpy_by_index_array(dst, dst_channels, src, src_channels, idxary, sample_size, count);
+}
+
+// a modified version of the memcpy_by_channel_mask test
+// but using 24 bit type and memcpy_by_index_array()
+TEST(audio_utils_primitives, memcpy_by_index_array) {
+    uint32_t dst_mask;
+    uint32_t src_mask;
+    typedef struct {uint8_t c[3];} __attribute__((__packed__)) uint8x3_t;
+    uint8x3_t *u24ref = new uint8x3_t[65536];
+    uint8x3_t *u24ary = new uint8x3_t[65536];
+    uint16_t *u16ref = new uint16_t[65536];
+    uint16_t *u16ary = new uint16_t[65536];
+
+    EXPECT_EQ((size_t)3, sizeof(uint8x3_t)); // 3 bytes per struct
+
+    // tests prepare_index_array_from_masks()
+    EXPECT_EQ((size_t)4, memcpy_by_index_array_initialization(NULL, 0, 0x8d, 0x8c));
+    EXPECT_EQ((size_t)3, memcpy_by_index_array_initialization(NULL, 0, 0x8c, 0x8d));
+
+    for (size_t i = 0; i < 65536; ++i) {
+        u16ref[i] = i;
+    }
+    memcpy_to_p24_from_i16((uint8_t*)u24ref, (int16_t*)u16ref, 65536);
+
+    // Test when src mask is 0.  Everything copied is zero.
+    src_mask = 0;
+    dst_mask = 0x8d;
+    memset(u24ary, 0x99, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask2(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(dst_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when dst_mask is 0.  Nothing should be copied.
+    src_mask = 0;
+    dst_mask = 0;
+    memset(u24ary, 0, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask2(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536);
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when masks are the same.  One to one copy.
+    src_mask = dst_mask = 0x8d;
+    memset(u24ary, 0x99, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask2(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]), 555);
+    EXPECT_EQ(0, memcmp(u24ary, u24ref, 555 * sizeof(u24ref[0]) * popcount(dst_mask)));
+
+    // Test with a gap in source:
+    // Input 3 samples, output 4 samples, one zero inserted.
+    src_mask = 0x8c;
+    dst_mask = 0x8d;
+    memset(u24ary, 0x9, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask2(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(dst_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    checkMonotoneOrZero(u16ary, 65536);
+    EXPECT_EQ((size_t)(65536 * 3 / 4 - 1), nonZeroMono16((int16_t*)u16ary, 65536));
+
+    // Test with a gap in destination:
+    // Input 4 samples, output 3 samples, one deleted
+    src_mask = 0x8d;
+    dst_mask = 0x8c;
+    memset(u24ary, 0x9, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask2(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(src_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    checkMonotone(u16ary, 65536 * 3 / 4);
+
+    delete[] u16ref;
+    delete[] u16ary;
+    delete[] u24ref;
+    delete[] u24ary;
+}
+
+void memcpy_by_channel_mask_dst_index(void *dst, uint32_t dst_mask,
+        const void *src, uint32_t src_mask, size_t sample_size, size_t count)
+{
+    int8_t idxary[32];
+    uint32_t src_channels = popcount(src_mask);
+    uint32_t dst_channels =
+            memcpy_by_index_array_initialization_dst_index(idxary, 32, dst_mask, src_mask);
+
+    memcpy_by_index_array(dst, dst_channels, src, src_channels, idxary, sample_size, count);
+}
+
+// a modified version of the memcpy_by_channel_mask test
+// but using 24 bit type and memcpy_by_index_array()
+TEST(audio_utils_primitives, memcpy_by_index_array_dst_index) {
+    uint32_t dst_mask;
+    uint32_t src_mask;
+    typedef struct {uint8_t c[3];} __attribute__((__packed__)) uint8x3_t;
+    uint8x3_t *u24ref = new uint8x3_t[65536];
+    uint8x3_t *u24ary = new uint8x3_t[65536];
+    uint16_t *u16ref = new uint16_t[65536];
+    uint16_t *u16ary = new uint16_t[65536];
+
+    EXPECT_EQ((size_t)3, sizeof(uint8x3_t)); // 3 bytes per struct
+
+    // tests prepare_index_array_from_masks()
+    EXPECT_EQ((size_t)4, memcpy_by_index_array_initialization_dst_index(NULL, 0, 0x8d, 0x8c));
+    EXPECT_EQ((size_t)3, memcpy_by_index_array_initialization_dst_index(NULL, 0, 0x8c, 0x8d));
+
+    for (size_t i = 0; i < 65536; ++i) {
+        u16ref[i] = i;
+    }
+    memcpy_to_p24_from_i16((uint8_t*)u24ref, (int16_t*)u16ref, 65536);
+
+    // Test when src mask is 0.  Everything copied is zero.
+    src_mask = 0;
+    dst_mask = 0x8d;
+    memset(u24ary, 0x99, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_dst_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(dst_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when dst_mask is 0.  Nothing should be copied.
+    src_mask = 0;
+    dst_mask = 0;
+    memset(u24ary, 0, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_dst_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536);
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when dst mask equals source count size.  One to one copy.
+    src_mask = 0x8d;
+    dst_mask = 0x0f;
+    memset(u24ary, 0x99, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_dst_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]), 555);
+    EXPECT_EQ(0, memcmp(u24ary, u24ref, 555 * sizeof(u24ref[0]) * popcount(dst_mask)));
+
+    // Test with a gap in source:
+    // Input 3 samples, output 4 samples, one zero inserted.
+    src_mask = 0x8c;
+    dst_mask = 0x0f;
+    memset(u24ary, 0x9, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_dst_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(dst_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    checkMonotoneOrZero(u16ary, 65536);
+    EXPECT_EQ((size_t)(65536 * 3 / 4 - 1), nonZeroMono16((int16_t*)u16ary, 65536));
+
+    // Test with a gap in destination:
+    // Input 4 samples, output 3 samples, one deleted
+    src_mask = 0x8d;
+    dst_mask = 0x07;
+    memset(u24ary, 0x9, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_dst_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(src_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    checkMonotone(u16ary, 65536 * 3 / 4);
+
+    delete[] u16ref;
+    delete[] u16ary;
+    delete[] u24ref;
+    delete[] u24ary;
+}
+
+void memcpy_by_channel_mask_src_index(void *dst, uint32_t dst_mask,
+        const void *src, uint32_t src_mask, size_t sample_size, size_t count)
+{
+    int8_t idxary[32];
+    uint32_t src_channels = popcount(src_mask);
+    uint32_t dst_channels =
+            memcpy_by_index_array_initialization_src_index(idxary, 32, dst_mask, src_mask);
+
+    memcpy_by_index_array(dst, dst_channels, src, src_channels, idxary, sample_size, count);
+}
+
+// a modified version of the memcpy_by_channel_mask test
+// but using 24 bit type and memcpy_by_index_array()
+TEST(audio_utils_primitives, memcpy_by_index_array_src_index) {
+    uint32_t dst_mask;
+    uint32_t src_mask;
+    typedef struct {uint8_t c[3];} __attribute__((__packed__)) uint8x3_t;
+    uint8x3_t *u24ref = new uint8x3_t[65536];
+    uint8x3_t *u24ary = new uint8x3_t[65536];
+    uint16_t *u16ref = new uint16_t[65536];
+    uint16_t *u16ary = new uint16_t[65536];
+
+    EXPECT_EQ((size_t)3, sizeof(uint8x3_t)); // 3 bytes per struct
+
+    // tests prepare_index_array_from_masks()
+    EXPECT_EQ((size_t)4, memcpy_by_index_array_initialization_src_index(NULL, 0, 0x8d, 0x8c));
+    EXPECT_EQ((size_t)3, memcpy_by_index_array_initialization_src_index(NULL, 0, 0x8c, 0x8d));
+
+    for (size_t i = 0; i < 65536; ++i) {
+        u16ref[i] = i;
+    }
+    memcpy_to_p24_from_i16((uint8_t*)u24ref, (int16_t*)u16ref, 65536);
+
+    // Test when src mask is 0.  Everything copied is zero.
+    src_mask = 0;
+    dst_mask = 0x8d;
+    memset(u24ary, 0x99, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_src_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(dst_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when dst_mask is 0.  Nothing should be copied.
+    src_mask = 0;
+    dst_mask = 0;
+    memset(u24ary, 0, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_src_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536);
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    EXPECT_EQ((size_t)0, nonZeroMono16((int16_t*)u16ary, 65530));
+
+    // Test when source mask must copy to dst mask.  One to one copy.
+    src_mask = 0xf;
+    dst_mask = 0xf;
+    memset(u24ary, 0x99, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_src_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]), 555);
+    EXPECT_EQ(0, memcmp(u24ary, u24ref, 555 * sizeof(u24ref[0]) * popcount(dst_mask)));
+
+    // Test when source mask must copy to dst mask.  One to one copy.
+    src_mask = 0xf;
+    dst_mask = 0x8d;
+    memset(u24ary, 0x99, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_src_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]), 555);
+    EXPECT_EQ(0, memcmp(u24ary, u24ref, 555 * sizeof(u24ref[0]) * popcount(dst_mask)));
+
+    // Test with a gap in source:
+    // Input 3 samples, output 4 samples, one zero inserted.
+    src_mask = 0x07;
+    dst_mask = 0x8d;
+    memset(u24ary, 0x9, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_src_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(dst_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    checkMonotoneOrZero(u16ary, 65536);
+    EXPECT_EQ((size_t)(65536 * 3 / 4 - 1), nonZeroMono16((int16_t*)u16ary, 65536));
+
+    // Test with a gap in destination:
+    // Input 4 samples, output 3 samples, one deleted
+    src_mask = 0x0f;
+    dst_mask = 0x8c;
+    memset(u24ary, 0x9, 65536 * sizeof(u24ary[0]));
+    memcpy_by_channel_mask_src_index(u24ary, dst_mask, u24ref, src_mask, sizeof(u24ref[0]),
+            65536 / popcount(src_mask));
+    memcpy_to_i16_from_p24((int16_t*)u16ary, (uint8_t*)u24ary, 65536);
+    checkMonotone(u16ary, 65536 * 3 / 4);
+
+    delete[] u16ref;
+    delete[] u16ary;
+    delete[] u24ref;
+    delete[] u24ary;
+}
+
+TEST(audio_utils_channels, adjust_channels) {
+    uint16_t *u16ref = new uint16_t[65536];
+    uint16_t *u16expand = new uint16_t[65536*2];
+    uint16_t *u16ary = new uint16_t[65536];
+
+    // reference buffer always increases
+    for (size_t i = 0; i < 65536; ++i) {
+        u16ref[i] = i;
+    }
+
+    // expand channels from stereo to quad.
+    adjust_channels(u16ref /*in_buff*/, 2 /*in_channels*/,
+            u16expand /*out_buff*/, 4 /*out_channels*/,
+            sizeof(u16ref[0]) /*sample_size_in_bytes*/,
+            sizeof(u16ref[0])*65536 /*num_in_bytes*/);
+
+    // expanded buffer must increase (or be zero)
+    checkMonotoneOrZero(u16expand, 65536*2);
+
+    // contract channels back to stereo.
+    adjust_channels(u16expand /*in_buff*/, 4 /*in_channels*/,
+            u16ary /*out_buff*/, 2 /*out_channels*/,
+            sizeof(u16expand[0]) /*sample_size_in_bytes*/,
+            sizeof(u16expand[0])*65536*2 /*num_in_bytes*/);
+
+    // must be identical to original.
+    EXPECT_EQ(0, memcmp(u16ary, u16ref, sizeof(u16ref[0])*65536));
+
+    delete[] u16ref;
+    delete[] u16expand;
+    delete[] u16ary;
+}
diff --git a/media/audio_utils/tinysndfile.c b/media/audio_utils/tinysndfile.c
new file mode 100644
index 0000000..fb9c7d4
--- /dev/null
+++ b/media/audio_utils/tinysndfile.c
@@ -0,0 +1,650 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <audio_utils/sndfile.h>
+#include <audio_utils/primitives.h>
+#ifdef HAVE_STDERR
+#include <stdio.h>
+#endif
+#include <string.h>
+#include <errno.h>
+
+#define WAVE_FORMAT_PCM         1
+#define WAVE_FORMAT_IEEE_FLOAT  3
+#define WAVE_FORMAT_EXTENSIBLE  0xFFFE
+
+struct SNDFILE_ {
+    int mode;
+    uint8_t *temp;  // realloc buffer used for shrinking 16 bits to 8 bits and byte-swapping
+    FILE *stream;
+    size_t bytesPerFrame;
+    size_t remaining;   // frames unread for SFM_READ, frames written for SFM_WRITE
+    SF_INFO info;
+};
+
+static unsigned little2u(unsigned char *ptr)
+{
+    return (ptr[1] << 8) + ptr[0];
+}
+
+static unsigned little4u(unsigned char *ptr)
+{
+    return (ptr[3] << 24) + (ptr[2] << 16) + (ptr[1] << 8) + ptr[0];
+}
+
+static int isLittleEndian(void)
+{
+    static const short one = 1;
+    return *((const char *) &one) == 1;
+}
+
+// "swab" conflicts with OS X <string.h>
+static void my_swab(short *ptr, size_t numToSwap)
+{
+    while (numToSwap > 0) {
+        *ptr = little2u((unsigned char *) ptr);
+        --numToSwap;
+        ++ptr;
+    }
+}
+
+static SNDFILE *sf_open_read(const char *path, SF_INFO *info)
+{
+    FILE *stream = fopen(path, "rb");
+    if (stream == NULL) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "fopen %s failed errno %d\n", path, errno);
+#endif
+        return NULL;
+    }
+
+    SNDFILE *handle = (SNDFILE *) malloc(sizeof(SNDFILE));
+    handle->mode = SFM_READ;
+    handle->temp = NULL;
+    handle->stream = stream;
+    handle->info.format = SF_FORMAT_WAV;
+
+    // don't attempt to parse all valid forms, just the most common ones
+    unsigned char wav[12];
+    size_t actual;
+    actual = fread(wav, sizeof(char), sizeof(wav), stream);
+    if (actual < 12) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "actual %zu < 44\n", actual);
+#endif
+        goto close;
+    }
+    if (memcmp(wav, "RIFF", 4)) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "wav != RIFF\n");
+#endif
+        goto close;
+    }
+    unsigned riffSize = little4u(&wav[4]);
+    if (riffSize < 4) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "riffSize %u < 4\n", riffSize);
+#endif
+        goto close;
+    }
+    if (memcmp(&wav[8], "WAVE", 4)) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "missing WAVE\n");
+#endif
+        goto close;
+    }
+    size_t remaining = riffSize - 4;
+    int hadFmt = 0;
+    int hadData = 0;
+    long dataTell = 0L;
+    while (remaining >= 8) {
+        unsigned char chunk[8];
+        actual = fread(chunk, sizeof(char), sizeof(chunk), stream);
+        if (actual != sizeof(chunk)) {
+#ifdef HAVE_STDERR
+            fprintf(stderr, "actual %zu != %zu\n", actual, sizeof(chunk));
+#endif
+            goto close;
+        }
+        remaining -= 8;
+        unsigned chunkSize = little4u(&chunk[4]);
+        if (chunkSize > remaining) {
+#ifdef HAVE_STDERR
+            fprintf(stderr, "chunkSize %u > remaining %zu\n", chunkSize, remaining);
+#endif
+            goto close;
+        }
+        if (!memcmp(&chunk[0], "fmt ", 4)) {
+            if (hadFmt) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "multiple fmt\n");
+#endif
+                goto close;
+            }
+            if (chunkSize < 2) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "chunkSize %u < 2\n", chunkSize);
+#endif
+                goto close;
+            }
+            unsigned char fmt[40];
+            actual = fread(fmt, sizeof(char), 2, stream);
+            if (actual != 2) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "actual %zu != 2\n", actual);
+#endif
+                goto close;
+            }
+            unsigned format = little2u(&fmt[0]);
+            size_t minSize = 0;
+            switch (format) {
+            case WAVE_FORMAT_PCM:
+            case WAVE_FORMAT_IEEE_FLOAT:
+                minSize = 16;
+                break;
+            case WAVE_FORMAT_EXTENSIBLE:
+                minSize = 40;
+                break;
+            default:
+#ifdef HAVE_STDERR
+                fprintf(stderr, "unsupported format %u\n", format);
+#endif
+                goto close;
+            }
+            if (chunkSize < minSize) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "chunkSize %u < minSize %zu\n", chunkSize, minSize);
+#endif
+                goto close;
+            }
+            actual = fread(&fmt[2], sizeof(char), minSize - 2, stream);
+            if (actual != minSize - 2) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "actual %zu != %zu\n", actual, minSize - 16);
+#endif
+                goto close;
+            }
+            if (chunkSize > minSize) {
+                fseek(stream, (long) (chunkSize - minSize), SEEK_CUR);
+            }
+            unsigned channels = little2u(&fmt[2]);
+            // FIXME FCC_8
+            if (channels != 1 && channels != 2 && channels != 4 && channels != 6 && channels != 8) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "unsupported channels %u\n", channels);
+#endif
+                goto close;
+            }
+            unsigned samplerate = little4u(&fmt[4]);
+            if (samplerate == 0) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "samplerate %u == 0\n", samplerate);
+#endif
+                goto close;
+            }
+            // ignore byte rate
+            // ignore block alignment
+            unsigned bitsPerSample = little2u(&fmt[14]);
+            if (bitsPerSample != 8 && bitsPerSample != 16 && bitsPerSample != 24 &&
+                    bitsPerSample != 32) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "bitsPerSample %u != 8 or 16 or 24 or 32\n", bitsPerSample);
+#endif
+                goto close;
+            }
+            unsigned bytesPerFrame = (bitsPerSample >> 3) * channels;
+            handle->bytesPerFrame = bytesPerFrame;
+            handle->info.samplerate = samplerate;
+            handle->info.channels = channels;
+            switch (bitsPerSample) {
+            case 8:
+                handle->info.format |= SF_FORMAT_PCM_U8;
+                break;
+            case 16:
+                handle->info.format |= SF_FORMAT_PCM_16;
+                break;
+            case 24:
+                handle->info.format |= SF_FORMAT_PCM_24;
+                break;
+            case 32:
+                if (format == WAVE_FORMAT_IEEE_FLOAT)
+                    handle->info.format |= SF_FORMAT_FLOAT;
+                else
+                    handle->info.format |= SF_FORMAT_PCM_32;
+                break;
+            }
+            hadFmt = 1;
+        } else if (!memcmp(&chunk[0], "data", 4)) {
+            if (!hadFmt) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "data not preceded by fmt\n");
+#endif
+                goto close;
+            }
+            if (hadData) {
+#ifdef HAVE_STDERR
+                fprintf(stderr, "multiple data\n");
+#endif
+                goto close;
+            }
+            handle->remaining = chunkSize / handle->bytesPerFrame;
+            handle->info.frames = handle->remaining;
+            dataTell = ftell(stream);
+            if (chunkSize > 0) {
+                fseek(stream, (long) chunkSize, SEEK_CUR);
+            }
+            hadData = 1;
+        } else if (!memcmp(&chunk[0], "fact", 4)) {
+            // ignore fact
+            if (chunkSize > 0) {
+                fseek(stream, (long) chunkSize, SEEK_CUR);
+            }
+        } else {
+            // ignore unknown chunk
+#ifdef HAVE_STDERR
+            fprintf(stderr, "ignoring unknown chunk %c%c%c%c\n",
+                    chunk[0], chunk[1], chunk[2], chunk[3]);
+#endif
+            if (chunkSize > 0) {
+                fseek(stream, (long) chunkSize, SEEK_CUR);
+            }
+        }
+        remaining -= chunkSize;
+    }
+    if (remaining > 0) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "partial chunk at end of RIFF, remaining %zu\n", remaining);
+#endif
+        goto close;
+    }
+    if (!hadData) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "missing data\n");
+#endif
+        goto close;
+    }
+    (void) fseek(stream, dataTell, SEEK_SET);
+    *info = handle->info;
+    return handle;
+
+close:
+    free(handle);
+    fclose(stream);
+    return NULL;
+}
+
+static void write4u(unsigned char *ptr, unsigned u)
+{
+    ptr[0] = u;
+    ptr[1] = u >> 8;
+    ptr[2] = u >> 16;
+    ptr[3] = u >> 24;
+}
+
+static SNDFILE *sf_open_write(const char *path, SF_INFO *info)
+{
+    int sub = info->format & SF_FORMAT_SUBMASK;
+    if (!(
+            (info->samplerate > 0) &&
+            // FIXME FCC_8
+            (info->channels > 0 && info->channels <= 8) &&
+            ((info->format & SF_FORMAT_TYPEMASK) == SF_FORMAT_WAV) &&
+            (sub == SF_FORMAT_PCM_16 || sub == SF_FORMAT_PCM_U8 || sub == SF_FORMAT_FLOAT ||
+                sub == SF_FORMAT_PCM_24 || sub == SF_FORMAT_PCM_32)
+          )) {
+        return NULL;
+    }
+    FILE *stream = fopen(path, "w+b");
+    if (stream == NULL) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "fopen %s failed errno %d\n", path, errno);
+#endif
+        return NULL;
+    }
+    unsigned char wav[58];
+    memset(wav, 0, sizeof(wav));
+    memcpy(wav, "RIFF", 4);
+    memcpy(&wav[8], "WAVEfmt ", 8);
+    if (sub == SF_FORMAT_FLOAT) {
+        wav[4] = 50;    // riffSize
+        wav[16] = 18;   // fmtSize
+        wav[20] = WAVE_FORMAT_IEEE_FLOAT;
+    } else {
+        wav[4] = 36;    // riffSize
+        wav[16] = 16;   // fmtSize
+        wav[20] = WAVE_FORMAT_PCM;
+    }
+    wav[22] = info->channels;
+    write4u(&wav[24], info->samplerate);
+    unsigned bitsPerSample;
+    switch (sub) {
+    case SF_FORMAT_PCM_16:
+        bitsPerSample = 16;
+        break;
+    case SF_FORMAT_PCM_U8:
+        bitsPerSample = 8;
+        break;
+    case SF_FORMAT_FLOAT:
+        bitsPerSample = 32;
+        break;
+    case SF_FORMAT_PCM_24:
+        bitsPerSample = 24;
+        break;
+    case SF_FORMAT_PCM_32:
+        bitsPerSample = 32;
+        break;
+    default:    // not reachable
+        bitsPerSample = 0;
+        break;
+    }
+    unsigned blockAlignment = (bitsPerSample >> 3) * info->channels;
+    unsigned byteRate = info->samplerate * blockAlignment;
+    write4u(&wav[28], byteRate);
+    wav[32] = blockAlignment;
+    wav[34] = bitsPerSample;
+    size_t extra = 0;
+    if (sub == SF_FORMAT_FLOAT) {
+        memcpy(&wav[38], "fact", 4);
+        wav[42] = 4;
+        memcpy(&wav[50], "data", 4);
+        extra = 14;
+    } else
+        memcpy(&wav[36], "data", 4);
+    // dataSize is initially zero
+    (void) fwrite(wav, 44 + extra, 1, stream);
+    SNDFILE *handle = (SNDFILE *) malloc(sizeof(SNDFILE));
+    handle->mode = SFM_WRITE;
+    handle->temp = NULL;
+    handle->stream = stream;
+    handle->bytesPerFrame = blockAlignment;
+    handle->remaining = 0;
+    handle->info = *info;
+    return handle;
+}
+
+SNDFILE *sf_open(const char *path, int mode, SF_INFO *info)
+{
+    if (path == NULL || info == NULL) {
+#ifdef HAVE_STDERR
+        fprintf(stderr, "path=%p info=%p\n", path, info);
+#endif
+        return NULL;
+    }
+    switch (mode) {
+    case SFM_READ:
+        return sf_open_read(path, info);
+    case SFM_WRITE:
+        return sf_open_write(path, info);
+    default:
+#ifdef HAVE_STDERR
+        fprintf(stderr, "mode=%d\n", mode);
+#endif
+        return NULL;
+    }
+}
+
+void sf_close(SNDFILE *handle)
+{
+    if (handle == NULL)
+        return;
+    free(handle->temp);
+    if (handle->mode == SFM_WRITE) {
+        (void) fflush(handle->stream);
+        rewind(handle->stream);
+        unsigned char wav[58];
+        size_t extra = (handle->info.format & SF_FORMAT_SUBMASK) == SF_FORMAT_FLOAT ? 14 : 0;
+        (void) fread(wav, 44 + extra, 1, handle->stream);
+        unsigned dataSize = handle->remaining * handle->bytesPerFrame;
+        write4u(&wav[4], dataSize + 36 + extra);    // riffSize
+        write4u(&wav[40 + extra], dataSize);        // dataSize
+        rewind(handle->stream);
+        (void) fwrite(wav, 44 + extra, 1, handle->stream);
+    }
+    (void) fclose(handle->stream);
+    free(handle);
+}
+
+sf_count_t sf_readf_short(SNDFILE *handle, short *ptr, sf_count_t desiredFrames)
+{
+    if (handle == NULL || handle->mode != SFM_READ || ptr == NULL || !handle->remaining ||
+            desiredFrames <= 0) {
+        return 0;
+    }
+    if (handle->remaining < (size_t) desiredFrames) {
+        desiredFrames = handle->remaining;
+    }
+    // does not check for numeric overflow
+    size_t desiredBytes = desiredFrames * handle->bytesPerFrame;
+    size_t actualBytes;
+    void *temp = NULL;
+    unsigned format = handle->info.format & SF_FORMAT_SUBMASK;
+    if (format == SF_FORMAT_PCM_32 || format == SF_FORMAT_FLOAT || format == SF_FORMAT_PCM_24) {
+        temp = malloc(desiredBytes);
+        actualBytes = fread(temp, sizeof(char), desiredBytes, handle->stream);
+    } else {
+        actualBytes = fread(ptr, sizeof(char), desiredBytes, handle->stream);
+    }
+    size_t actualFrames = actualBytes / handle->bytesPerFrame;
+    handle->remaining -= actualFrames;
+    switch (format) {
+    case SF_FORMAT_PCM_U8:
+        memcpy_to_i16_from_u8(ptr, (unsigned char *) ptr, actualFrames * handle->info.channels);
+        break;
+    case SF_FORMAT_PCM_16:
+        if (!isLittleEndian())
+            my_swab(ptr, actualFrames * handle->info.channels);
+        break;
+    case SF_FORMAT_PCM_32:
+        memcpy_to_i16_from_i32(ptr, (const int *) temp, actualFrames * handle->info.channels);
+        free(temp);
+        break;
+    case SF_FORMAT_FLOAT:
+        memcpy_to_i16_from_float(ptr, (const float *) temp, actualFrames * handle->info.channels);
+        free(temp);
+        break;
+    case SF_FORMAT_PCM_24:
+        memcpy_to_i16_from_p24(ptr, (const uint8_t *) temp, actualFrames * handle->info.channels);
+        free(temp);
+        break;
+    default:
+        memset(ptr, 0, actualFrames * handle->info.channels * sizeof(short));
+        break;
+    }
+    return actualFrames;
+}
+
+sf_count_t sf_readf_float(SNDFILE *handle, float *ptr, sf_count_t desiredFrames)
+{
+    if (handle == NULL || handle->mode != SFM_READ || ptr == NULL || !handle->remaining ||
+            desiredFrames <= 0) {
+        return 0;
+    }
+    if (handle->remaining < (size_t) desiredFrames) {
+        desiredFrames = handle->remaining;
+    }
+    // does not check for numeric overflow
+    size_t desiredBytes = desiredFrames * handle->bytesPerFrame;
+    size_t actualBytes;
+    void *temp = NULL;
+    unsigned format = handle->info.format & SF_FORMAT_SUBMASK;
+    if (format == SF_FORMAT_PCM_16 || format == SF_FORMAT_PCM_U8 || format == SF_FORMAT_PCM_24) {
+        temp = malloc(desiredBytes);
+        actualBytes = fread(temp, sizeof(char), desiredBytes, handle->stream);
+    } else {
+        actualBytes = fread(ptr, sizeof(char), desiredBytes, handle->stream);
+    }
+    size_t actualFrames = actualBytes / handle->bytesPerFrame;
+    handle->remaining -= actualFrames;
+    switch (format) {
+    case SF_FORMAT_PCM_U8:
+#if 0
+        // TODO - implement
+        memcpy_to_float_from_u8(ptr, (const unsigned char *) temp,
+                actualFrames * handle->info.channels);
+#endif
+        free(temp);
+        break;
+    case SF_FORMAT_PCM_16:
+        memcpy_to_float_from_i16(ptr, (const short *) temp, actualFrames * handle->info.channels);
+        free(temp);
+        break;
+    case SF_FORMAT_PCM_32:
+        memcpy_to_float_from_i32(ptr, (const int *) ptr, actualFrames * handle->info.channels);
+        break;
+    case SF_FORMAT_FLOAT:
+        break;
+    case SF_FORMAT_PCM_24:
+        memcpy_to_float_from_p24(ptr, (const uint8_t *) temp, actualFrames * handle->info.channels);
+        free(temp);
+        break;
+    default:
+        memset(ptr, 0, actualFrames * handle->info.channels * sizeof(float));
+        break;
+    }
+    return actualFrames;
+}
+
+sf_count_t sf_readf_int(SNDFILE *handle, int *ptr, sf_count_t desiredFrames)
+{
+    if (handle == NULL || handle->mode != SFM_READ || ptr == NULL || !handle->remaining ||
+            desiredFrames <= 0) {
+        return 0;
+    }
+    if (handle->remaining < (size_t) desiredFrames) {
+        desiredFrames = handle->remaining;
+    }
+    // does not check for numeric overflow
+    size_t desiredBytes = desiredFrames * handle->bytesPerFrame;
+    void *temp = NULL;
+    unsigned format = handle->info.format & SF_FORMAT_SUBMASK;
+    size_t actualBytes;
+    if (format == SF_FORMAT_PCM_16 || format == SF_FORMAT_PCM_U8 || format == SF_FORMAT_PCM_24) {
+        temp = malloc(desiredBytes);
+        actualBytes = fread(temp, sizeof(char), desiredBytes, handle->stream);
+    } else {
+        actualBytes = fread(ptr, sizeof(char), desiredBytes, handle->stream);
+    }
+    size_t actualFrames = actualBytes / handle->bytesPerFrame;
+    handle->remaining -= actualFrames;
+    switch (format) {
+    case SF_FORMAT_PCM_U8:
+#if 0
+        // TODO - implement
+        memcpy_to_i32_from_u8(ptr, (const unsigned char *) temp,
+                actualFrames * handle->info.channels);
+#endif
+        free(temp);
+        break;
+    case SF_FORMAT_PCM_16:
+        memcpy_to_i32_from_i16(ptr, (const short *) temp, actualFrames * handle->info.channels);
+        free(temp);
+        break;
+    case SF_FORMAT_PCM_32:
+        break;
+    case SF_FORMAT_FLOAT:
+        memcpy_to_i32_from_float(ptr, (const float *) ptr, actualFrames * handle->info.channels);
+        break;
+    case SF_FORMAT_PCM_24:
+        memcpy_to_i32_from_p24(ptr, (const uint8_t *) temp, actualFrames * handle->info.channels);
+        free(temp);
+        break;
+    default:
+        memset(ptr, 0, actualFrames * handle->info.channels * sizeof(int));
+        break;
+    }
+    return actualFrames;
+}
+
+sf_count_t sf_writef_short(SNDFILE *handle, const short *ptr, sf_count_t desiredFrames)
+{
+    if (handle == NULL || handle->mode != SFM_WRITE || ptr == NULL || desiredFrames <= 0)
+        return 0;
+    size_t desiredBytes = desiredFrames * handle->bytesPerFrame;
+    size_t actualBytes = 0;
+    switch (handle->info.format & SF_FORMAT_SUBMASK) {
+    case SF_FORMAT_PCM_U8:
+        handle->temp = realloc(handle->temp, desiredBytes);
+        memcpy_to_u8_from_i16(handle->temp, ptr, desiredBytes);
+        actualBytes = fwrite(handle->temp, sizeof(char), desiredBytes, handle->stream);
+        break;
+    case SF_FORMAT_PCM_16:
+        // does not check for numeric overflow
+        if (isLittleEndian()) {
+            actualBytes = fwrite(ptr, sizeof(char), desiredBytes, handle->stream);
+        } else {
+            handle->temp = realloc(handle->temp, desiredBytes);
+            memcpy(handle->temp, ptr, desiredBytes);
+            my_swab((short *) handle->temp, desiredFrames * handle->info.channels);
+            actualBytes = fwrite(handle->temp, sizeof(char), desiredBytes, handle->stream);
+        }
+        break;
+    case SF_FORMAT_FLOAT:
+        handle->temp = realloc(handle->temp, desiredBytes);
+        memcpy_to_float_from_i16((float *) handle->temp, ptr,
+                desiredFrames * handle->info.channels);
+        actualBytes = fwrite(handle->temp, sizeof(char), desiredBytes, handle->stream);
+        break;
+    default:
+        break;
+    }
+    size_t actualFrames = actualBytes / handle->bytesPerFrame;
+    handle->remaining += actualFrames;
+    return actualFrames;
+}
+
+sf_count_t sf_writef_float(SNDFILE *handle, const float *ptr, sf_count_t desiredFrames)
+{
+    if (handle == NULL || handle->mode != SFM_WRITE || ptr == NULL || desiredFrames <= 0)
+        return 0;
+    size_t desiredBytes = desiredFrames * handle->bytesPerFrame;
+    size_t actualBytes = 0;
+    switch (handle->info.format & SF_FORMAT_SUBMASK) {
+    case SF_FORMAT_FLOAT:
+        actualBytes = fwrite(ptr, sizeof(char), desiredBytes, handle->stream);
+        break;
+    case SF_FORMAT_PCM_16:
+        handle->temp = realloc(handle->temp, desiredBytes);
+        memcpy_to_i16_from_float((short *) handle->temp, ptr,
+                desiredFrames * handle->info.channels);
+        actualBytes = fwrite(handle->temp, sizeof(char), desiredBytes, handle->stream);
+        break;
+    case SF_FORMAT_PCM_U8:  // transcoding from float to byte not yet implemented
+    default:
+        break;
+    }
+    size_t actualFrames = actualBytes / handle->bytesPerFrame;
+    handle->remaining += actualFrames;
+    return actualFrames;
+}
+
+sf_count_t sf_writef_int(SNDFILE *handle, const int *ptr, sf_count_t desiredFrames)
+{
+    if (handle == NULL || handle->mode != SFM_WRITE || ptr == NULL || desiredFrames <= 0)
+        return 0;
+    size_t desiredBytes = desiredFrames * handle->bytesPerFrame;
+    size_t actualBytes = 0;
+    switch (handle->info.format & SF_FORMAT_SUBMASK) {
+    case SF_FORMAT_PCM_32:
+    case SF_FORMAT_PCM_24:
+        actualBytes = fwrite(ptr, sizeof(char), desiredBytes, handle->stream);
+        break;
+    default:    // transcoding from other formats not yet implemented
+        break;
+    }
+    size_t actualFrames = actualBytes / handle->bytesPerFrame;
+    handle->remaining += actualFrames;
+    return actualFrames;
+}
diff --git a/media/brillo/audio/audioservice/Android.mk b/media/brillo/audio/audioservice/Android.mk
new file mode 100644
index 0000000..f1ec8dd
--- /dev/null
+++ b/media/brillo/audio/audioservice/Android.mk
@@ -0,0 +1,108 @@
+# Copyright 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+audio_service_shared_libraries := \
+  libbinder \
+  libbinderwrapper \
+  libbrillo \
+  libbrillo-binder \
+  libc \
+  libchrome \
+  libmedia \
+  libutils
+
+audio_client_sources := \
+  aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl \
+  aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl \
+  audio_service_callback.cpp \
+  brillo_audio_client.cpp \
+  brillo_audio_client_helpers.cpp \
+  brillo_audio_device_info.cpp \
+  brillo_audio_device_info_internal.cpp \
+  brillo_audio_manager.cpp
+
+audio_service_sources := \
+  aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl \
+  aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl \
+  audio_daemon.cpp \
+  audio_device_handler.cpp \
+  audio_volume_handler.cpp \
+  brillo_audio_service_impl.cpp
+
+# Audio service.
+# =============================================================================
+include $(CLEAR_VARS)
+LOCAL_MODULE := brilloaudioservice
+LOCAL_SRC_FILES := \
+  $(audio_service_sources) \
+  main_audio_service.cpp
+LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
+LOCAL_SHARED_LIBRARIES := $(audio_service_shared_libraries)
+LOCAL_CFLAGS := -Wall
+LOCAL_INIT_RC := brilloaudioserv.rc
+include $(BUILD_EXECUTABLE)
+
+# Audio client library.
+# =============================================================================
+include $(CLEAR_VARS)
+LOCAL_MODULE := libbrilloaudio
+LOCAL_SRC_FILES := \
+  $(audio_client_sources)
+LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
+LOCAL_SHARED_LIBRARIES := $(audio_service_shared_libraries)
+LOCAL_CFLAGS := -Wall -std=c++14
+include $(BUILD_SHARED_LIBRARY)
+
+# Unit tests for the Brillo audio service.
+# =============================================================================
+include $(CLEAR_VARS)
+LOCAL_MODULE := brilloaudioservice_test
+LOCAL_SRC_FILES := \
+  $(audio_service_sources) \
+  test/audio_daemon_test.cpp \
+  test/audio_device_handler_test.cpp \
+  test/audio_volume_handler_test.cpp
+LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
+LOCAL_SHARED_LIBRARIES := \
+  $(audio_service_shared_libraries) \
+  libbinderwrapper_test_support
+LOCAL_STATIC_LIBRARIES := \
+  libBionicGtestMain \
+  libchrome_test_helpers \
+  libgmock
+LOCAL_CFLAGS := -Wno-sign-compare -Wall
+include $(BUILD_NATIVE_TEST)
+
+# Unit tests for the Brillo audio client.
+# =============================================================================
+include $(CLEAR_VARS)
+LOCAL_MODULE := brilloaudioclient_test
+LOCAL_SRC_FILES := \
+  $(audio_client_sources) \
+  test/audio_service_callback_test.cpp \
+  test/brillo_audio_client_test.cpp \
+  test/brillo_audio_device_info_internal_test.cpp \
+  test/brillo_audio_manager_test.cpp
+LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
+LOCAL_SHARED_LIBRARIES := \
+  $(audio_service_shared_libraries) \
+  libbinderwrapper_test_support
+LOCAL_STATIC_LIBRARIES := \
+  libBionicGtestMain \
+  libchrome_test_helpers \
+  libgmock
+LOCAL_CFLAGS := -Wno-sign-compare -Wall
+include $(BUILD_NATIVE_TEST)
diff --git a/media/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl b/media/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl
new file mode 100644
index 0000000..841c4ae
--- /dev/null
+++ b/media/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.brillo.brilloaudioservice;
+
+/*
+ * Interface for the callback object registered with IBrilloAudioService. Used
+ * to notify clients about changes to the audio system.
+ */
+interface IAudioServiceCallback {
+  // Oneway call triggered when audio devices are connected to the system.
+  oneway void OnAudioDevicesConnected(in int[] added_devices);
+
+  // Oneway call triggered when audio devices are disconnected from the system.
+  oneway void OnAudioDevicesDisconnected(in int[] removed_devices);
+
+  // Oneway call triggered when the volume is changed. If there are
+  // multiple active streams, this call will be called multiple times.
+  oneway void OnVolumeChanged(
+      int stream_type, int old_volume_index, int new_volume_index);
+}
diff --git a/media/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl b/media/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl
new file mode 100644
index 0000000..209b651
--- /dev/null
+++ b/media/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.brillo.brilloaudioservice;
+
+import android.brillo.brilloaudioservice.IAudioServiceCallback;
+
+/*
+ * Interface for BrilloAudioService that clients can use to get the list of
+ * devices currently connected to the system as well as to control volume.
+ * Clients can also register callbacks to be notified about changes.
+ */
+interface IBrilloAudioService {
+  // Constants for device enumeration.
+  const int GET_DEVICES_INPUTS = 1;
+  const int GET_DEVICES_OUTPUTS = 2;
+
+  // Constants for volume control.
+  const int VOLUME_BUTTON_PRESS_DOWN = 1;
+  const int VOLUME_BUTTON_PRESS_UP = 2;
+
+  // Get the list of devices connected. If flag is GET_DEVICES_INPUTS, then
+  // return input devices. Otherwise, return output devices.
+  int[] GetDevices(int flag);
+
+  // Set device for a given usage.
+  // usage is an int of type audio_policy_force_use_t.
+  // config is an int of type audio_policy_forced_cfg_t.
+  void SetDevice(int usage, int config);
+
+  // Get the maximum number of steps used for a given stream.
+  int GetMaxVolumeSteps(int stream);
+
+  // Set the maximum number of steps to use for a given stream.
+  void SetMaxVolumeSteps(int stream, int max_steps);
+
+  // Set the volume for a given (stream, device) tuple.
+  void SetVolumeIndex(int stream, int device, int index);
+
+  // Get the current volume for a given (stream, device) tuple.
+  int GetVolumeIndex(int stream, int device);
+
+  // Get stream used when volume buttons are pressed.
+  int GetVolumeControlStream();
+
+  // Set default stream to use when volume buttons are pressed.
+  void SetVolumeControlStream(int stream);
+
+  // Increment volume.
+  void IncrementVolume();
+
+  // Decrement volume.
+  void DecrementVolume();
+
+  // Register a callback object with the service.
+  void RegisterServiceCallback(IAudioServiceCallback callback);
+
+  // Unregister a callback object.
+  void UnregisterServiceCallback(IAudioServiceCallback callback);
+}
diff --git a/media/brillo/audio/audioservice/audio_daemon.cpp b/media/brillo/audio/audioservice/audio_daemon.cpp
new file mode 100644
index 0000000..08ff548
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_daemon.cpp
@@ -0,0 +1,191 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of audio_daemon.h.
+
+#include "audio_daemon.h"
+
+#include <sysexits.h>
+
+#include <base/bind.h>
+#include <base/files/file_enumerator.h>
+#include <base/files/file_path.h>
+#include <base/time/time.h>
+#include <binderwrapper/binder_wrapper.h>
+#include <linux/input.h>
+
+#include "brillo_audio_service_impl.h"
+
+namespace brillo {
+
+static const char kAPSServiceName[] = "media.audio_policy";
+static const char kInputDeviceDir[] = "/dev/input";
+static const char kServiceName[] =
+    "android.brillo.brilloaudioservice.BrilloAudioService";
+
+AudioDaemon::~AudioDaemon() {}
+
+void AudioDaemon::InitializeHandlers() {
+  // Start and initialize the audio daemon handlers.
+  audio_device_handler_ =
+      std::shared_ptr<AudioDeviceHandler>(new AudioDeviceHandler());
+  audio_volume_handler_ =
+      std::unique_ptr<AudioVolumeHandler>(new AudioVolumeHandler());
+
+  // Register a callback with the audio device handler to call when device state
+  // changes.
+  auto device_callback =
+      base::Bind(&AudioDaemon::DeviceCallback, weak_ptr_factory_.GetWeakPtr());
+  audio_device_handler_->RegisterDeviceCallback(device_callback);
+
+  // Register a callback with the audio volume handler.
+  auto volume_callback =
+      base::Bind(&AudioDaemon::VolumeCallback, weak_ptr_factory_.GetWeakPtr());
+  audio_volume_handler_->RegisterCallback(volume_callback);
+
+  audio_device_handler_->Init(aps_);
+  audio_volume_handler_->Init(aps_);
+
+  // Poll on all files in kInputDeviceDir.
+  base::FileEnumerator fenum(base::FilePath(kInputDeviceDir),
+                             false /*recursive*/, base::FileEnumerator::FILES);
+  for (base::FilePath name = fenum.Next(); !name.empty(); name = fenum.Next()) {
+    base::File file(name, base::File::FLAG_OPEN | base::File::FLAG_READ);
+    if (file.IsValid()) {
+      MessageLoop* message_loop = MessageLoop::current();
+      int fd = file.GetPlatformFile();
+      // Move file to files_ and ensure that when binding we get a pointer from
+      // the object in files_.
+      files_.emplace(std::move(file));
+      base::Closure file_callback =
+          base::Bind(&AudioDaemon::EventCallback, weak_ptr_factory_.GetWeakPtr(),
+                     &files_.top());
+      message_loop->WatchFileDescriptor(fd, MessageLoop::kWatchRead,
+                                        true /*persistent*/, file_callback);
+    } else {
+      LOG(WARNING) << "Could not open " << name.value() << " for reading. ("
+                   << base::File::ErrorToString(file.error_details()) << ")";
+    }
+  }
+
+  handlers_initialized_ = true;
+  // Once the handlers have been initialized, we can register with service
+  // manager.
+  InitializeBrilloAudioService();
+}
+
+void AudioDaemon::InitializeBrilloAudioService() {
+  brillo_audio_service_ = new BrilloAudioServiceImpl();
+  brillo_audio_service_->RegisterHandlers(
+      std::weak_ptr<AudioDeviceHandler>(audio_device_handler_),
+      std::weak_ptr<AudioVolumeHandler>(audio_volume_handler_));
+  android::BinderWrapper::Get()->RegisterService(kServiceName,
+                                                 brillo_audio_service_);
+  VLOG(1) << "Registered brilloaudioservice with the service manager.";
+}
+
+void AudioDaemon::ConnectToAPS() {
+  android::BinderWrapper* binder_wrapper = android::BinderWrapper::Get();
+  auto binder = binder_wrapper->GetService(kAPSServiceName);
+  // If we didn't get the audio policy service, try again in 500 ms.
+  if (!binder.get()) {
+    LOG(INFO) << "Could not connect to audio policy service. Trying again...";
+    brillo::MessageLoop::current()->PostDelayedTask(
+        base::Bind(&AudioDaemon::ConnectToAPS, weak_ptr_factory_.GetWeakPtr()),
+        base::TimeDelta::FromMilliseconds(500));
+    return;
+  }
+  LOG(INFO) << "Connected to audio policy service.";
+  binder_wrapper->RegisterForDeathNotifications(
+      binder,
+      base::Bind(&AudioDaemon::OnAPSDisconnected,
+                 weak_ptr_factory_.GetWeakPtr()));
+  VLOG(1) << "Registered death notification.";
+  aps_ = android::interface_cast<android::IAudioPolicyService>(binder);
+  if (!handlers_initialized_) {
+    InitializeHandlers();
+  } else {
+    audio_device_handler_->APSConnect(aps_);
+    audio_volume_handler_->APSConnect(aps_);
+  }
+}
+
+void AudioDaemon::OnAPSDisconnected() {
+  LOG(INFO) << "Audio policy service died. Will try to reconnect.";
+  audio_device_handler_->APSDisconnect();
+  audio_volume_handler_->APSDisconnect();
+  aps_ = nullptr;
+  ConnectToAPS();
+}
+
+// OnInit, we want to do the following:
+//   - Get a binder to the audio policy service.
+//   - Initialize the audio device and volume handlers.
+//   - Set up polling on files in /dev/input.
+int AudioDaemon::OnInit() {
+  int exit_code = Daemon::OnInit();
+  if (exit_code != EX_OK) return exit_code;
+  // Initialize a binder wrapper.
+  android::BinderWrapper::Create();
+  // Initialize a binder watcher.
+  binder_watcher_.Init();
+  ConnectToAPS();
+  return EX_OK;
+}
+
+void AudioDaemon::EventCallback(base::File* file) {
+  input_event event;
+  int bytes_read =
+      file->ReadAtCurrentPos(reinterpret_cast<char*>(&event), sizeof(event));
+  if (bytes_read != sizeof(event)) {
+    LOG(WARNING) << "Couldn't read an input event.";
+    return;
+  }
+  audio_device_handler_->ProcessEvent(event);
+  audio_volume_handler_->ProcessEvent(event);
+}
+
+void AudioDaemon::DeviceCallback(
+    AudioDeviceHandler::DeviceConnectionState state,
+    const std::vector<int>& devices) {
+  VLOG(1) << "Triggering device callback.";
+  if (!brillo_audio_service_.get()) {
+    LOG(ERROR) << "The Brillo audio service object is unavailble. Will try to "
+               << "call the clients again once the service is up.";
+    InitializeBrilloAudioService();
+    DeviceCallback(state, devices);
+    return;
+  }
+  if (state == AudioDeviceHandler::DeviceConnectionState::kDevicesConnected)
+    brillo_audio_service_->OnDevicesConnected(devices);
+  else
+    brillo_audio_service_->OnDevicesDisconnected(devices);
+}
+
+void AudioDaemon::VolumeCallback(audio_stream_type_t stream,
+                                 int previous_index,
+                                 int current_index) {
+  VLOG(1) << "Triggering volume button press callback.";
+  if (!brillo_audio_service_.get()) {
+    LOG(ERROR) << "The Brillo audio service object is unavailble. Will try to "
+               << "call the clients again once the service is up.";
+    InitializeBrilloAudioService();
+    VolumeCallback(stream, previous_index, current_index);
+    return;
+  }
+  brillo_audio_service_->OnVolumeChanged(stream, previous_index, current_index);
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/audio_daemon.h b/media/brillo/audio/audioservice/audio_daemon.h
new file mode 100644
index 0000000..5fc01fd
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_daemon.h
@@ -0,0 +1,112 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Main loop of the brillo audio service.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_H_
+
+#include <memory>
+#include <stack>
+#include <vector>
+
+#include <base/files/file.h>
+#include <base/memory/weak_ptr.h>
+#include <brillo/binder_watcher.h>
+#include <brillo/daemons/daemon.h>
+#include <media/IAudioPolicyService.h>
+
+#include "audio_device_handler.h"
+#include "audio_volume_handler.h"
+#include "brillo_audio_service.h"
+
+namespace brillo {
+
+class AudioDaemon : public Daemon {
+ public:
+  AudioDaemon() {}
+  virtual ~AudioDaemon();
+
+ protected:
+  // Initialize the audio daemon handlers and start pollig the files in
+  // /dev/input.
+  int OnInit() override;
+
+ private:
+  friend class AudioDaemonTest;
+  FRIEND_TEST(AudioDaemonTest, RegisterService);
+  FRIEND_TEST(AudioDaemonTest, TestAPSConnectInitializesHandlersOnlyOnce);
+  FRIEND_TEST(AudioDaemonTest, TestDeviceCallbackInitializesBASIfNULL);
+
+  // Callback function for input events. Events are handled by the audio device
+  // handler.
+  void EventCallback(base::File* file);
+
+  // Callback function for device state changes. Events are handler by the
+  // audio service.
+  //
+  // |mode| is kDevicesConnected when |devices| are connected.
+  // |devices| is a vector of integers representing audio_devices_t.
+  void DeviceCallback(AudioDeviceHandler::DeviceConnectionState,
+                      const std::vector<int>& devices);
+
+  // Callback function when volume changes.
+  //
+  // |stream| is an audio_stream_type_t representing the stream.
+  // |previous_index| is the volume index before the key press.
+  // |current_index| is the volume index after the key press.
+  void VolumeCallback(audio_stream_type_t stream,
+                      int previous_index,
+                      int current_index);
+
+  // Callback function for audio policy service death notification.
+  void OnAPSDisconnected();
+
+  // Connect to the audio policy service and register a callback to be invoked
+  // if the audio policy service dies.
+  void ConnectToAPS();
+
+  // Register the brillo audio service with the service manager.
+  void InitializeBrilloAudioService();
+
+  // Initialize all audio daemon handlers.
+  //
+  // Note: This can only occur after we have connected to the audio policy
+  // service.
+  virtual void InitializeHandlers();
+
+  // Store the file objects that are created during initialization for the files
+  // being polled. This is done so these objects can be freed when the
+  // AudioDaemon object is destroyed.
+  std::stack<base::File> files_;
+  // Handler for audio device input events.
+  std::shared_ptr<AudioDeviceHandler> audio_device_handler_;
+  // Handler for volume key press input events.
+  std::shared_ptr<AudioVolumeHandler> audio_volume_handler_;
+  // Used to generate weak_ptr to AudioDaemon for use in base::Bind.
+  base::WeakPtrFactory<AudioDaemon> weak_ptr_factory_{this};
+  // Pointer to the audio policy service.
+  android::sp<android::IAudioPolicyService> aps_;
+  // Flag to indicate whether the handlers have been initialized.
+  bool handlers_initialized_ = false;
+  // Binder watcher to watch for binder messages.
+  BinderWatcher binder_watcher_;
+  // Brillo audio service. Used for scheduling callbacks to clients.
+  android::sp<BrilloAudioService> brillo_audio_service_;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_H_
diff --git a/media/brillo/audio/audioservice/audio_daemon_handler.h b/media/brillo/audio/audioservice/audio_daemon_handler.h
new file mode 100644
index 0000000..ea147c2
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_daemon_handler.h
@@ -0,0 +1,58 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Handler for input events in /dev/input. AudioDaemonHandler is the base class
+// that other handlers inherit.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_HANDLER_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_HANDLER_H_
+
+#include <linux/input.h>
+#include <media/IAudioPolicyService.h>
+
+namespace brillo {
+
+class AudioDaemonHandler {
+ public:
+  virtual ~AudioDaemonHandler(){};
+
+  // Initialize the handler.
+  //
+  // |aps| is a pointer to the binder object.
+  virtual void Init(android::sp<android::IAudioPolicyService> aps) = 0;
+
+  // Process input events from the kernel.
+  //
+  // |event| is a pointer to an input_event. This function should be able to
+  // gracefully handle input events that are not relevant to the functionality
+  // provided by this class.
+  virtual void ProcessEvent(const struct input_event& event) = 0;
+
+  // Inform the handler that the audio policy service has been disconnected.
+  virtual void APSDisconnect() = 0;
+
+  // Inform the handler that the audio policy service is reconnected.
+  //
+  // |aps| is a pointer to the binder object.
+  virtual void APSConnect(android::sp<android::IAudioPolicyService> aps) = 0;
+
+ protected:
+  // Pointer to the audio policy service.
+  android::sp<android::IAudioPolicyService> aps_;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_HANDLER_H_
diff --git a/media/brillo/audio/audioservice/audio_device_handler.cpp b/media/brillo/audio/audioservice/audio_device_handler.cpp
new file mode 100644
index 0000000..dc7e454
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_device_handler.cpp
@@ -0,0 +1,233 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of audio_device_handler.h
+
+#include "audio_device_handler.h"
+
+#include <base/files/file.h>
+#include <base/logging.h>
+#include <brillo/message_loops/message_loop.h>
+#include <media/AudioSystem.h>
+
+namespace brillo {
+
+// All input devices currently supported by AudioDeviceHandler.
+const std::vector<audio_devices_t> AudioDeviceHandler::kSupportedInputDevices_ =
+    {AUDIO_DEVICE_IN_WIRED_HEADSET};
+
+const std::vector<audio_devices_t>
+    AudioDeviceHandler::kSupportedOutputDevices_ = {
+        AUDIO_DEVICE_OUT_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADPHONE};
+
+static const char kH2WStateFile[] = "/sys/class/switch/h2w/state";
+
+AudioDeviceHandler::AudioDeviceHandler() {
+  headphone_ = false;
+  microphone_ = false;
+}
+
+AudioDeviceHandler::~AudioDeviceHandler() {}
+
+void AudioDeviceHandler::GetInputDevices(std::vector<int>* devices_list) {
+  std::copy(connected_input_devices_.begin(),
+            connected_input_devices_.end(),
+            std::back_inserter(*devices_list));
+}
+
+void AudioDeviceHandler::GetOutputDevices(std::vector<int>* devices_list) {
+  std::copy(connected_output_devices_.begin(),
+            connected_output_devices_.end(),
+            std::back_inserter(*devices_list));
+}
+
+void AudioDeviceHandler::RegisterDeviceCallback(
+      base::Callback<void(DeviceConnectionState,
+                          const std::vector<int>& )>& callback) {
+  callback_ = callback;
+}
+
+void AudioDeviceHandler::TriggerCallback(DeviceConnectionState state) {
+  // If no devices have changed, don't bother triggering a callback.
+  if (changed_devices_.size() == 0)
+    return;
+  base::Closure closure = base::Bind(callback_, state, changed_devices_);
+  MessageLoop::current()->PostTask(closure);
+  // We can clear changed_devices_ here since base::Bind makes a copy of
+  // changed_devices_.
+  changed_devices_.clear();
+}
+
+void AudioDeviceHandler::APSDisconnect() {
+  aps_.clear();
+}
+
+void AudioDeviceHandler::APSConnect(
+    android::sp<android::IAudioPolicyService> aps) {
+  aps_ = aps;
+  // Reset the state
+  connected_input_devices_.clear();
+  connected_output_devices_.clear();
+  // Inform audio policy service about the currently connected devices.
+  VLOG(1) << "Calling GetInitialAudioDeviceState on APSConnect.";
+  GetInitialAudioDeviceState(base::FilePath(kH2WStateFile));
+}
+
+void AudioDeviceHandler::Init(android::sp<android::IAudioPolicyService> aps) {
+  aps_ = aps;
+  // Reset audio policy service state in case this service crashed and there is
+  // a mismatch between the current system state and what audio policy service
+  // was previously told.
+  VLOG(1) << "Calling DisconnectAllSupportedDevices.";
+  DisconnectAllSupportedDevices();
+  TriggerCallback(kDevicesDisconnected);
+
+  // Get headphone jack state and update audio policy service with new state.
+  VLOG(1) << "Calling ReadInitialAudioDeviceState.";
+  GetInitialAudioDeviceState(base::FilePath(kH2WStateFile));
+}
+
+void AudioDeviceHandler::GetInitialAudioDeviceState(
+    const base::FilePath& path) {
+  base::File file(path, base::File::FLAG_OPEN | base::File::FLAG_READ);
+  if (!file.IsValid()) {
+    LOG(WARNING) << "Kernel does not have wired headset support. Could not "
+                 << "open " << path.value() << " ("
+                 << base::File::ErrorToString(file.error_details()) << ").";
+    return;
+  }
+  int state = 0;
+  int bytes_read = file.ReadAtCurrentPos(reinterpret_cast<char*>(&state), 1);
+  state -= '0';
+  if (bytes_read == 0) {
+    LOG(WARNING) << "Could not read from " << path.value();
+    return;
+  }
+  VLOG(1) << "Initial audio jack state is " << state;
+  static const int kHeadPhoneMask = 0x1;
+  bool headphone = state & kHeadPhoneMask;
+  static const int kMicrophoneMask = 0x2;
+  bool microphone = (state & kMicrophoneMask) >> 1;
+
+  UpdateAudioSystem(headphone, microphone);
+}
+
+void AudioDeviceHandler::NotifyAudioPolicyService(
+    audio_devices_t device, audio_policy_dev_state_t state) {
+  if (aps_ == nullptr) {
+    LOG(INFO) << "Audio device handler cannot call audio policy service. Will "
+              << "try again later.";
+    return;
+  }
+  VLOG(1) << "Calling Audio Policy Service to change " << device << " to state "
+          << state;
+  aps_->setDeviceConnectionState(device, state, "", "");
+}
+
+int AudioDeviceHandler::SetDevice(audio_policy_force_use_t usage,
+                                  audio_policy_forced_cfg_t config) {
+  if (aps_ == nullptr) {
+    LOG(WARNING) << "Audio policy service cannot be reached. Please try again.";
+    return EAGAIN;
+  }
+  VLOG(1) << "Calling audio policy service to set " << usage << " to "
+          << config;
+  return aps_->setForceUse(usage, config);
+}
+
+void AudioDeviceHandler::ConnectAudioDevice(audio_devices_t device) {
+  audio_policy_dev_state_t state = AUDIO_POLICY_DEVICE_STATE_AVAILABLE;
+  NotifyAudioPolicyService(device, state);
+  if (audio_is_input_device(device))
+    connected_input_devices_.insert(device);
+  else
+    connected_output_devices_.insert(device);
+  changed_devices_.push_back(device);
+}
+
+void AudioDeviceHandler::DisconnectAudioDevice(audio_devices_t device) {
+  audio_policy_dev_state_t state = AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
+  NotifyAudioPolicyService(device, state);
+  if (audio_is_input_device(device))
+    connected_input_devices_.erase(device);
+  else
+    connected_output_devices_.erase(device);
+  changed_devices_.push_back(device);
+}
+
+void AudioDeviceHandler::DisconnectAllSupportedDevices() {
+  for (auto device : kSupportedInputDevices_) {
+    DisconnectAudioDevice(device);
+  }
+  for (auto device : kSupportedOutputDevices_) {
+    DisconnectAudioDevice(device);
+  }
+}
+
+void AudioDeviceHandler::DisconnectAllConnectedDevices() {
+  while (!connected_input_devices_.empty()) {
+    audio_devices_t device = *(connected_input_devices_.begin());
+    DisconnectAudioDevice(device);
+  }
+  while (!connected_output_devices_.empty()) {
+    audio_devices_t device = *(connected_output_devices_.begin());
+    DisconnectAudioDevice(device);
+  }
+}
+
+void AudioDeviceHandler::UpdateAudioSystem(bool headphone, bool microphone) {
+  if (microphone) {
+    ConnectAudioDevice(AUDIO_DEVICE_IN_WIRED_HEADSET);
+  }
+  if (headphone && microphone) {
+    ConnectAudioDevice(AUDIO_DEVICE_OUT_WIRED_HEADSET);
+  } else if (headphone) {
+    ConnectAudioDevice(AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
+  } else if (!microphone) {
+    // No devices are connected. Inform the audio policy service that all
+    // connected devices have been disconnected.
+    DisconnectAllConnectedDevices();
+    TriggerCallback(kDevicesDisconnected);
+    return;
+  }
+  TriggerCallback(kDevicesConnected);
+  return;
+}
+
+void AudioDeviceHandler::ProcessEvent(const struct input_event& event) {
+  VLOG(1) << event.type << " " << event.code << " " << event.value;
+  if (event.type == EV_SW) {
+    switch (event.code) {
+      case SW_HEADPHONE_INSERT:
+        headphone_ = event.value;
+        break;
+      case SW_MICROPHONE_INSERT:
+        microphone_ = event.value;
+        break;
+      default:
+        // This event code is not supported by this handler.
+        break;
+    }
+  } else if (event.type == EV_SYN) {
+    // We have received all input events. Update the audio system.
+    UpdateAudioSystem(headphone_, microphone_);
+    // Reset the headphone and microphone flags that are used to track
+    // information across multiple calls to ProcessEvent.
+    headphone_ = false;
+    microphone_ = false;
+  }
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/audio_device_handler.h b/media/brillo/audio/audioservice/audio_device_handler.h
new file mode 100644
index 0000000..af20420
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_device_handler.h
@@ -0,0 +1,201 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Handler for input events in /dev/input. AudioDeviceHandler handles events
+// only for audio devices being plugged in/removed from the system. Implements
+// some of the functionality present in WiredAccessoryManager.java.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DEVICE_HANDLER_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DEVICE_HANDLER_H_
+
+#include <set>
+#include <vector>
+
+#include <base/bind.h>
+#include <base/files/file_path.h>
+#include <gtest/gtest_prod.h>
+#include <linux/input.h>
+#include <media/IAudioPolicyService.h>
+#include <system/audio.h>
+#include <system/audio_policy.h>
+
+#include "audio_daemon_handler.h"
+
+namespace brillo {
+
+class AudioDeviceHandler : public AudioDaemonHandler {
+ public:
+  AudioDeviceHandler();
+  virtual ~AudioDeviceHandler();
+
+  // Get the current state of the headset jack and update AudioSystem based on
+  // the initial state.
+  //
+  // |aps| is a pointer to the binder object.
+  virtual void Init(android::sp<android::IAudioPolicyService> aps) override;
+
+  // Process input events from the kernel. Connecting/disconnecting an audio
+  // device will result in multiple calls to this method.
+  //
+  // |event| is a pointer to an input_event. This function should be able to
+  // gracefully handle input events that are not relevant to the functionality
+  // provided by this class.
+  virtual void ProcessEvent(const struct input_event& event) override;
+
+  // Inform the handler that the audio policy service has been disconnected.
+  void APSDisconnect();
+
+  // Inform the handler that the audio policy service is reconnected.
+  //
+  // |aps| is a pointer to the binder object.
+  virtual void APSConnect(
+      android::sp<android::IAudioPolicyService> aps) override;
+
+  // Get the list of connected devices.
+  //
+  // |devices_list| is the vector to copy list of connected input devices to.
+  void GetInputDevices(std::vector<int>* devices_list);
+
+  // Get the list of connected output devices.
+  //
+  // |devices_list| is the vector to copy the list of connected output devices
+  // to.
+  void GetOutputDevices(std::vector<int>* devices_list);
+
+  // Set device.
+  //
+  // |usage| is an int of type audio_policy_force_use_t
+  // |config| is an int of type audio_policy_forced_cfg_t.
+  //
+  // Returns 0 on sucess and errno on failure.
+  int SetDevice(audio_policy_force_use_t usage,
+                audio_policy_forced_cfg_t config);
+
+  // Enum used to represent whether devices are being connected or not. This is
+  // used when triggering callbacks.
+  enum DeviceConnectionState {
+    kDevicesConnected,
+    kDevicesDisconnected
+  };
+
+  // Register a callback function to call when device state changes.
+  //
+  // |callback| is an object of type base::Callback that accepts a
+  // DeviceConnectionState and a vector of ints. See DeviceCallback() in
+  // audio_daemon.h.
+  void RegisterDeviceCallback(
+      base::Callback<void(DeviceConnectionState,
+                          const std::vector<int>& )>& callback);
+
+ private:
+  friend class AudioDeviceHandlerTest;
+  friend class AudioVolumeHandler;
+  friend class AudioVolumeHandlerTest;
+  FRIEND_TEST(AudioDeviceHandlerTest,
+              DisconnectAllSupportedDevicesCallsDisconnect);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitCallsDisconnectAllSupportedDevices);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateMic);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadphone);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadset);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateNone);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateInvalid);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventEmpty);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophonePresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphonePresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophoneNotPresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphoneNotPresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventInvalid);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemNone);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectMic);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadphone);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadset);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectMic);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadphone);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadset);
+  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceInput);
+  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceOutput);
+  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceInput);
+  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceOutput);
+  FRIEND_TEST(AudioVolumeHandlerTest, FileGeneration);
+
+  // Read the initial state of audio devices in /sys/class/* and update
+  // the audio policy service.
+  //
+  // |path| is the file that contains the initial audio jack state.
+  void GetInitialAudioDeviceState(const base::FilePath& path);
+
+  // Update the audio policy service once an input_event has completed.
+  //
+  // |headphone| is true is headphones are connected.
+  // |microphone| is true is microphones are connected.
+  void UpdateAudioSystem(bool headphone, bool microphone);
+
+  // Notify the audio policy service that this device has been removed.
+  //
+  // |device| is the audio device whose state is to be changed.
+  // |state| is the current state of |device|.
+  virtual void NotifyAudioPolicyService(audio_devices_t device,
+                                        audio_policy_dev_state_t state);
+
+  // Connect an audio device by calling aps and add it to the appropriate set
+  // (either connected_input_devices_ or connected_output_devices_).
+  //
+  // |device| is the audio device that has been added.
+  void ConnectAudioDevice(audio_devices_t device);
+
+  // Disconnect an audio device by calling aps and remove it from the
+  // appropriate set (either connected_input_devices_ or
+  // connected_output_devices_).
+  //
+  // |device| is the audio device that has been disconnected.
+  void DisconnectAudioDevice(audio_devices_t device);
+
+  // Disconnected all connected audio devices.
+  void DisconnectAllConnectedDevices();
+
+  // Disconnect all supported audio devices.
+  void DisconnectAllSupportedDevices();
+
+  // Trigger a callback when a device is either connected or disconnected.
+  //
+  // |state| is kDevicesConnected when |devices| are being connected.
+  virtual void TriggerCallback(DeviceConnectionState state);
+
+  // All input devices currently supported by AudioDeviceHandler.
+  static const std::vector<audio_devices_t> kSupportedInputDevices_;
+  // All output devices currently supported by AudioDeviceHandler.
+  static const std::vector<audio_devices_t> kSupportedOutputDevices_;
+
+ protected:
+  // Set of connected input devices.
+  std::set<audio_devices_t> connected_input_devices_;
+  // Set of connected output devices.
+  std::set<audio_devices_t> connected_output_devices_;
+  // Vector of devices changed (used for callbacks to clients).
+  std::vector<int> changed_devices_;
+  // Keeps track of whether a headphone has been connected. Used by ProcessEvent
+  // and UpdateAudioSystem.
+  bool headphone_;
+  // Keeps track of whether a microphone has been connected. Used by
+  // ProcessEvent and UpdateAudioSystem.
+  bool microphone_;
+  // Callback object to call when device state changes.
+  base::Callback<void(DeviceConnectionState,
+                      const std::vector<int>& )> callback_;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DEVICE_HANDLER_H_
diff --git a/media/brillo/audio/audioservice/audio_service_callback.cpp b/media/brillo/audio/audioservice/audio_service_callback.cpp
new file mode 100644
index 0000000..3baee23
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_service_callback.cpp
@@ -0,0 +1,78 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of audio_service_callback.
+
+#include "audio_service_callback.h"
+
+#include <base/bind.h>
+#include <base/logging.h>
+
+#include "brillo_audio_client_helpers.h"
+#include "brillo_audio_device_info_def.h"
+
+using android::binder::Status;
+
+namespace brillo {
+
+AudioServiceCallback::AudioServiceCallback(const BAudioCallback* callback,
+                                           void* user_data) {
+  connected_callback_ = base::Bind(callback->OnAudioDeviceAdded);
+  disconnected_callback_ = base::Bind(callback->OnAudioDeviceRemoved);
+  volume_callback_ = base::Bind(callback->OnVolumeChanged);
+  user_data_ = user_data;
+}
+
+Status AudioServiceCallback::OnAudioDevicesConnected(
+    const std::vector<int>& devices) {
+  for (auto device : devices) {
+    BAudioDeviceInfo device_info;
+    device_info.internal_ = std::unique_ptr<BAudioDeviceInfoInternal>(
+        BAudioDeviceInfoInternal::CreateFromAudioDevicesT(device));
+    connected_callback_.Run(&device_info, user_data_);
+  }
+  return Status::ok();
+}
+
+Status AudioServiceCallback::OnAudioDevicesDisconnected(
+    const std::vector<int>& devices) {
+  for (auto device : devices) {
+    BAudioDeviceInfo device_info;
+    device_info.internal_ = std::unique_ptr<BAudioDeviceInfoInternal>(
+        BAudioDeviceInfoInternal::CreateFromAudioDevicesT(device));
+    disconnected_callback_.Run(&device_info, user_data_);
+  }
+  return Status::ok();
+}
+
+Status AudioServiceCallback::OnVolumeChanged(int stream,
+                                             int previous_index,
+                                             int current_index) {
+  auto usage = BrilloAudioClientHelpers::GetBAudioUsage(
+      static_cast<audio_stream_type_t>(stream));
+  volume_callback_.Run(usage, previous_index, current_index, user_data_);
+  return Status::ok();
+}
+
+bool AudioServiceCallback::Equals(const android::sp<AudioServiceCallback>& callback) {
+  if (callback->connected_callback_.Equals(connected_callback_) &&
+      callback->disconnected_callback_.Equals(disconnected_callback_) &&
+      callback->volume_callback_.Equals(volume_callback_) &&
+      callback->user_data_ == user_data_)
+    return true;
+  return false;
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/audio_service_callback.h b/media/brillo/audio/audioservice/audio_service_callback.h
new file mode 100644
index 0000000..3a5a289
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_service_callback.h
@@ -0,0 +1,80 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Callback object to be passed to brilloaudioservice.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_SERVICE_CALLBACK_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_SERVICE_CALLBACK_H_
+
+#include <vector>
+
+#include <base/callback.h>
+#include <binder/Status.h>
+
+#include "android/brillo/brilloaudioservice/BnAudioServiceCallback.h"
+#include "include/brillo_audio_manager.h"
+
+using android::binder::Status;
+using android::brillo::brilloaudioservice::BnAudioServiceCallback;
+
+namespace brillo {
+
+class AudioServiceCallback : public BnAudioServiceCallback {
+ public:
+  // Constructor for AudioServiceCallback.
+  //
+  // |callback| is an object of type BAudioCallback.
+  // |user_data| is an object to be passed to the callbacks.
+  AudioServiceCallback(const BAudioCallback* callback, void* user_data);
+
+  // Callback function triggered when a device is connected.
+  //
+  // |devices| is a vector of audio_devices_t.
+  Status OnAudioDevicesConnected(const std::vector<int>& devices);
+
+  // Callback function triggered when a device is disconnected.
+  //
+  // |devices| is a vector of audio_devices_t.
+  Status OnAudioDevicesDisconnected(const std::vector<int>& devices);
+
+  // Callback function triggered when volume is changed.
+  //
+  // |stream| is an int representing the stream.
+  // |previous_index| is the volume index before the key press.
+  // |current_index| is the volume index after the key press.
+  Status OnVolumeChanged(int stream, int previous_index, int current_index);
+
+  // Method to compare two AudioServiceCallback objects.
+  //
+  // |callback| is a ref counted pointer to a AudioServiceCallback object to be
+  // compared with this.
+  //
+  // Returns true if |callback| equals this.
+  bool Equals(const android::sp<AudioServiceCallback>& callback);
+
+ private:
+  // Callback when devices are connected.
+  base::Callback<void(const BAudioDeviceInfo*, void*)> connected_callback_;
+  // Callback when devices are disconnected.
+  base::Callback<void(const BAudioDeviceInfo*, void*)> disconnected_callback_;
+  // Callback when the volume button is pressed.
+  base::Callback<void(BAudioUsage, int, int, void*)> volume_callback_;
+  // User data passed to the callbacks.
+  void* user_data_;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_SERVICE_CALLBACK_H_
diff --git a/media/brillo/audio/audioservice/audio_volume_handler.cpp b/media/brillo/audio/audioservice/audio_volume_handler.cpp
new file mode 100644
index 0000000..d95b2c2
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_volume_handler.cpp
@@ -0,0 +1,236 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of audio_volume_handler.h
+
+#include "audio_volume_handler.h"
+
+#include <base/files/file.h>
+#include <base/files/file_util.h>
+#include <base/logging.h>
+#include <brillo/map_utils.h>
+#include <brillo/message_loops/message_loop.h>
+#include <brillo/strings/string_utils.h>
+
+#include "audio_device_handler.h"
+
+namespace brillo {
+
+static const char kVolumeStateFilePath[] =
+    "/data/misc/brilloaudioservice/volume.dat";
+
+AudioVolumeHandler::AudioVolumeHandler() {
+  for (auto stream : kSupportedStreams_) {
+    step_sizes_.emplace(stream, kDefaultStepSize_);
+  }
+  selected_stream_ = AUDIO_STREAM_DEFAULT;
+  volume_state_file_ = base::FilePath(kVolumeStateFilePath);
+}
+
+AudioVolumeHandler::~AudioVolumeHandler() {}
+
+void AudioVolumeHandler::APSDisconnect() { aps_.clear(); }
+
+void AudioVolumeHandler::APSConnect(
+    android::sp<android::IAudioPolicyService> aps) {
+  aps_ = aps;
+  InitAPSAllStreams();
+}
+
+void AudioVolumeHandler::RegisterCallback(
+    base::Callback<void(audio_stream_type_t, int, int)>& callback) {
+  callback_ = callback;
+}
+
+int AudioVolumeHandler::ConvertToUserDefinedIndex(audio_stream_type_t stream,
+                                                  int index) {
+  return index / step_sizes_[stream];
+}
+
+int AudioVolumeHandler::ConvertToInternalIndex(audio_stream_type_t stream,
+                                               int index) {
+  return index * step_sizes_[stream];
+}
+
+void AudioVolumeHandler::TriggerCallback(audio_stream_type_t stream,
+                                         int previous_index,
+                                         int current_index) {
+  int user_defined_previous_index =
+      ConvertToUserDefinedIndex(stream, previous_index);
+  int user_defined_current_index =
+      ConvertToUserDefinedIndex(stream, current_index);
+  MessageLoop::current()->PostTask(base::Bind(callback_,
+                                              stream,
+                                              user_defined_previous_index,
+                                              user_defined_current_index));
+}
+
+void AudioVolumeHandler::GenerateVolumeFile() {
+  for (auto stream : kSupportedStreams_) {
+    for (auto device : AudioDeviceHandler::kSupportedOutputDevices_) {
+      PersistVolumeConfiguration(stream, device, kDefaultCurrentIndex_);
+    }
+  }
+  if (!kv_store_->Save(volume_state_file_)) {
+    LOG(ERROR) << "Could not save volume data file!";
+  }
+}
+
+int AudioVolumeHandler::GetVolumeMaxSteps(audio_stream_type_t stream) {
+  return ConvertToUserDefinedIndex(stream, kMaxIndex_);
+}
+
+int AudioVolumeHandler::SetVolumeMaxSteps(audio_stream_type_t stream,
+                                          int max_steps) {
+  if (max_steps <= kMinIndex_ || max_steps > kMaxIndex_)
+    return EINVAL;
+  step_sizes_[stream] = kMaxIndex_ / max_steps;
+  return 0;
+}
+
+int AudioVolumeHandler::GetVolumeCurrentIndex(audio_stream_type_t stream,
+                                              audio_devices_t device) {
+  auto key = kCurrentIndexKey_ + "." + string_utils::ToString(stream) + "." +
+             string_utils::ToString(device);
+  std::string value;
+  kv_store_->GetString(key, &value);
+  return std::stoi(value);
+}
+
+int AudioVolumeHandler::GetVolumeIndex(audio_stream_type_t stream,
+                                       audio_devices_t device) {
+  return ConvertToUserDefinedIndex(stream,
+                                   GetVolumeCurrentIndex(stream, device));
+}
+
+int AudioVolumeHandler::SetVolumeIndex(audio_stream_type_t stream,
+                                       audio_devices_t device,
+                                       int index) {
+  if (index < kMinIndex_ ||
+      index > ConvertToUserDefinedIndex(stream, kMaxIndex_))
+    return EINVAL;
+  int previous_index = GetVolumeCurrentIndex(stream, device);
+  int current_absolute_index = ConvertToInternalIndex(stream, index);
+  PersistVolumeConfiguration(stream, device, current_absolute_index);
+  TriggerCallback(stream, previous_index, current_absolute_index);
+  return 0;
+}
+
+void AudioVolumeHandler::PersistVolumeConfiguration(audio_stream_type_t stream,
+                                                    audio_devices_t device,
+                                                    int index) {
+  auto key = kCurrentIndexKey_ + "." + string_utils::ToString(stream) + "." +
+             string_utils::ToString(device);
+  kv_store_->SetString(key, string_utils::ToString(index));
+  kv_store_->Save(volume_state_file_);
+}
+
+void AudioVolumeHandler::InitAPSAllStreams() {
+  for (auto stream : kSupportedStreams_) {
+    aps_->initStreamVolume(stream, kMinIndex_, kMaxIndex_);
+    for (auto device : AudioDeviceHandler::kSupportedOutputDevices_) {
+      int current_index = GetVolumeCurrentIndex(stream, device);
+      aps_->setStreamVolumeIndex(stream, current_index, device);
+    }
+  }
+}
+
+void AudioVolumeHandler::SetVolumeFilePathForTesting(
+    const base::FilePath& path) {
+  volume_state_file_ = path;
+}
+
+void AudioVolumeHandler::Init(android::sp<android::IAudioPolicyService> aps) {
+  aps_ = aps;
+  kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore());
+  if (!base::PathExists(volume_state_file_)) {
+    // Generate key-value store and save it to a file.
+    GenerateVolumeFile();
+  } else {
+    // Load the file. If loading fails, generate the file.
+    if (!kv_store_->Load(volume_state_file_)) {
+      LOG(ERROR) << "Could not load volume data file!";
+      GenerateVolumeFile();
+    }
+  }
+  // Inform APS.
+  InitAPSAllStreams();
+}
+
+audio_stream_type_t AudioVolumeHandler::GetVolumeControlStream() {
+  return selected_stream_;
+}
+
+void AudioVolumeHandler::SetVolumeControlStream(audio_stream_type_t stream) {
+  selected_stream_ = stream;
+}
+
+int AudioVolumeHandler::GetNewVolumeIndex(int previous_index, int direction,
+                                          audio_stream_type_t stream) {
+  int current_index =
+      previous_index + ConvertToInternalIndex(stream, direction);
+  if (current_index < kMinIndex_) {
+    return kMinIndex_;
+  } else if (current_index > kMaxIndex_) {
+    return kMaxIndex_;
+  } else
+    return current_index;
+}
+
+void AudioVolumeHandler::AdjustStreamVolume(audio_stream_type_t stream,
+                                            int direction) {
+  VLOG(1) << "Adjusting volume of stream " << selected_stream_
+          << " in direction " << direction;
+  auto device = aps_->getDevicesForStream(stream);
+  int previous_index = GetVolumeCurrentIndex(stream, device);
+  int current_index = GetNewVolumeIndex(previous_index, direction, stream);
+  VLOG(1) << "Current index is " << current_index << " for stream " << stream
+          << " and device " << device;
+  aps_->setStreamVolumeIndex(stream, current_index, device);
+  PersistVolumeConfiguration(selected_stream_, device, current_index);
+  TriggerCallback(stream, previous_index, current_index);
+}
+
+void AudioVolumeHandler::AdjustVolumeActiveStreams(int direction) {
+  if (selected_stream_ != AUDIO_STREAM_DEFAULT) {
+    AdjustStreamVolume(selected_stream_, direction);
+    return;
+  }
+  for (auto stream : kSupportedStreams_) {
+    if (aps_->isStreamActive(stream)) {
+      AdjustStreamVolume(stream, direction);
+      return;
+    }
+  }
+}
+
+void AudioVolumeHandler::ProcessEvent(const struct input_event& event) {
+  VLOG(1) << event.type << " " << event.code << " " << event.value;
+  if (event.type == EV_KEY) {
+    switch (event.code) {
+      case KEY_VOLUMEDOWN:
+        AdjustVolumeActiveStreams(-1);
+        break;
+      case KEY_VOLUMEUP:
+        AdjustVolumeActiveStreams(1);
+        break;
+      default:
+        // This event code is not supported by this handler.
+        break;
+    }
+  }
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/audio_volume_handler.h b/media/brillo/audio/audioservice/audio_volume_handler.h
new file mode 100644
index 0000000..fb95c2f
--- /dev/null
+++ b/media/brillo/audio/audioservice/audio_volume_handler.h
@@ -0,0 +1,248 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Handler for input events in /dev/input. AudioVolumeHandler handles events
+// only for volume key presses.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_VOLUME_HANDLER_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_VOLUME_HANDLER_H_
+
+#include <base/bind.h>
+#include <base/files/file_path.h>
+#include <brillo/key_value_store.h>
+#include <gtest/gtest_prod.h>
+#include <linux/input.h>
+#include <media/IAudioPolicyService.h>
+#include <system/audio.h>
+
+#include "audio_daemon_handler.h"
+
+namespace brillo {
+
+class AudioVolumeHandler : public AudioDaemonHandler {
+ public:
+  AudioVolumeHandler();
+  virtual ~AudioVolumeHandler();
+
+  // Get the current state of the headset jack and update AudioSystem based on
+  // the initial state.
+  //
+  // |aps| is a pointer to the binder object.
+  virtual void Init(android::sp<android::IAudioPolicyService> aps) override;
+
+  // Process input events from the kernel. Connecting/disconnecting an audio
+  // device will result in multiple calls to this method.
+  //
+  // |event| is a pointer to an input_event. This function should be able to
+  // gracefully handle input events that are not relevant to the functionality
+  // provided by this class.
+  virtual void ProcessEvent(const struct input_event& event) override;
+
+  // Inform the handler that the audio policy service has been disconnected.
+  virtual void APSDisconnect() override;
+
+  // Inform the handler that the audio policy service is reconnected.
+  //
+  // |aps| is a pointer to the binder object.
+  virtual void APSConnect(
+      android::sp<android::IAudioPolicyService> aps) override;
+
+  // Get the stream used when volume buttons are pressed.
+  //
+  // Returns an audio_stream_t representing the stream. If
+  // SetVolumeControlStream isn't called before calling this method,
+  // STREAM_DEFAULT is returned.
+  audio_stream_type_t GetVolumeControlStream();
+
+  // Set the stream to use when volume buttons are pressed.
+  //
+  // |stream| is an int representing the stream. Passing STREAM_DEFAULT to this
+  // method can be used to reset selected_stream_.
+  void SetVolumeControlStream(audio_stream_type_t stream);
+
+  // Register a callback to be triggered when keys are pressed.
+  //
+  // |callback| is an object of type base::Callback.
+  void RegisterCallback(
+      base::Callback<void(audio_stream_type_t, int, int)>& callback);
+
+  // Set the max steps for an audio stream.
+  //
+  // |stream| is an int representing the stream.
+  // |max_index| is an int representing the maximum index to set for |stream|.
+  //
+  // Returns 0 on success and errno on failure.
+  int SetVolumeMaxSteps(audio_stream_type_t stream, int max_steps);
+
+  // Get the max steps for an audio stream.
+  //
+  // |stream| is an int representing the stream.
+  //
+  // Returns the maximum possible index for |stream|.
+  int GetVolumeMaxSteps(audio_stream_type_t stream);
+
+  // Get the volume of a given key.
+  //
+  // |stream| is an int representing the stream.
+  // |device| is an int representing the device.
+  //
+  // Returns an int which corresponds to the current index.
+  int GetVolumeCurrentIndex(audio_stream_type_t stream, audio_devices_t device);
+
+  // Set the volume for a given (stream, device) tuple.
+  //
+  // |stream| is an int representing the stream.
+  // |device| is an int representing the device.
+  // |index| is an int representing the volume.
+  //
+  // Returns 0 on success and errno on failure.
+  int SetVolumeIndex(audio_stream_type_t stream,
+                     audio_devices_t device,
+                     int index);
+
+  // Get the volume for a given (stream, device) tuple.
+  //
+  // |stream| is an int representing the stream.
+  // |device| is an int representing the device.
+  //
+  // Returns the index for the (stream, device) tuple. This index is between 0
+  // and the user defined maximum value.
+  int GetVolumeIndex(audio_stream_type_t stream, audio_devices_t device);
+
+  // Update the volume index for a given stream.
+  //
+  // |previous_index| is the current index of the stream/device tuple before the
+  // volume button is pressed.
+  // |direction| is an int which is multiplied to step_. +1 for volume up and -1
+  // for volume down.
+  // |stream| is an int representing the stream.
+  //
+  // Returns the new volume index.
+  int GetNewVolumeIndex(int previous_index, int direction,
+                        audio_stream_type_t stream);
+
+  // Adjust the volume of the active streams in the direction indicated. If
+  // SetDefaultStream() is called, then only the volume for that stream will be
+  // changed. Calling this method always triggers a callback.
+  //
+  // |direction| is an int which is multiplied to step_. +1 for volume up and -1
+  // for volume down.
+  virtual void AdjustVolumeActiveStreams(int direction);
+
+ private:
+  friend class AudioVolumeHandlerTest;
+  FRIEND_TEST(AudioVolumeHandlerTest, FileGeneration);
+  FRIEND_TEST(AudioVolumeHandlerTest, GetVolumeForStreamDeviceTuple);
+  FRIEND_TEST(AudioVolumeHandlerTest, SetVolumeForStreamDeviceTuple);
+  FRIEND_TEST(AudioVolumeHandlerTest, InitNoFile);
+  FRIEND_TEST(AudioVolumeHandlerTest, InitFilePresent);
+  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventEmpty);
+  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyUp);
+  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyDown);
+  FRIEND_TEST(AudioVolumeHandlerTest, SelectStream);
+  FRIEND_TEST(AudioVolumeHandlerTest, ComputeNewVolume);
+  FRIEND_TEST(AudioVolumeHandlerTest, GetSetVolumeIndex);
+
+  // Save the volume for a given (stream, device) tuple.
+  //
+  // |stream| is an int representing the stream.
+  // |device| is an int representing the device.
+  // |index| is an int representing the volume.
+  void PersistVolumeConfiguration(audio_stream_type_t stream,
+                                  audio_devices_t device,
+                                  int index);
+
+  // Read the initial volume of audio streams.
+  //
+  // |path| is the file that contains the initial volume state.
+  void GetInitialVolumeState(const base::FilePath& path);
+
+  // Adjust the volume of a given stream in the direction specified.
+  //
+  // |stream| is an int representing the stream.
+  // |direction| is an int which is multiplied to step_. +1 for volume up and -1
+  // for volume down.
+  void AdjustStreamVolume(audio_stream_type_t stream, int direction);
+
+  // Set the file path for testing.
+  //
+  // |path| to use while running tests.
+  void SetVolumeFilePathForTesting(const base::FilePath& path);
+
+  // Initialize all the streams in the audio policy service.
+  virtual void InitAPSAllStreams();
+
+  // Generate the volume config file.
+  void GenerateVolumeFile();
+
+  // Trigger a callback when a volume button is pressed.
+  //
+  // |stream| is an audio_stream_t representing the stream.
+  // |previous_index| is the volume index before the key press. This is an
+  // absolute index from 0 - 100.
+  // |current_index| is the volume index after the key press. This is an
+  // absolute index from 0 - 100.
+  virtual void TriggerCallback(audio_stream_type_t stream,
+                               int previous_index,
+                               int current_index);
+
+  // Convert internal index to user defined index scale.
+  //
+  // |stream| is an audio_stream_t representing the stream.
+  // |index| is the volume index before the key press. This is an absolute
+  // index from 0 - 100.
+  //
+  // Returns an int between 0 and the user defined max.
+  int ConvertToUserDefinedIndex(audio_stream_type_t stream, int index);
+
+  // Convert user defined index to internal index scale.
+  //
+  // |stream| is an audio_stream_t representing the stream.
+  // |index| is the volume index before the key press. This is an index from 0
+  // and the user defined max.
+  //
+  // Returns an int between 0 and 100.
+  int ConvertToInternalIndex(audio_stream_type_t stream, int index);
+
+  // Stream to use for volume control.
+  audio_stream_type_t selected_stream_;
+  // File backed key-value store of the current index (as seen by the audio
+  // policy service).
+  std::unique_ptr<KeyValueStore> kv_store_;
+  // Supported stream names. The order of this vector defines the priority from
+  // high to low.
+  std::vector<audio_stream_type_t> kSupportedStreams_{
+      AUDIO_STREAM_ALARM, AUDIO_STREAM_NOTIFICATION, AUDIO_STREAM_SYSTEM,
+      AUDIO_STREAM_MUSIC};
+  // Step size for each stream. This is used to translate between user defined
+  // stream ranges and the range as seen by audio policy service. This value is
+  // not file-backed and is intended to be re-applied by the user on reboots and
+  // brilloaudioservice crashes.
+  std::map<audio_stream_type_t, double> step_sizes_;
+  // Callback to call when volume buttons are pressed.
+  base::Callback<void(audio_stream_type_t, int, int)> callback_;
+  // Key indicies.
+  const std::string kCurrentIndexKey_ = "current_index";
+  // Default values.
+  const int kMinIndex_ = 0;
+  const int kDefaultCurrentIndex_ = 30;
+  const int kMaxIndex_ = 100;
+  const int kDefaultStepSize_ = 1;
+  base::FilePath volume_state_file_;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_VOLUME_HANDLER_H_
diff --git a/media/brillo/audio/audioservice/brillo_audio_client.cpp b/media/brillo/audio/audioservice/brillo_audio_client.cpp
new file mode 100644
index 0000000..f347c56
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_client.cpp
@@ -0,0 +1,224 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of brillo_audio_client.h
+
+#include "brillo_audio_client.h"
+
+#include <base/logging.h>
+#include <binder/Status.h>
+#include <binderwrapper/binder_wrapper.h>
+
+#include "brillo_audio_client_helpers.h"
+#include "brillo_audio_device_info_def.h"
+#include "brillo_audio_device_info_internal.h"
+
+using android::binder::Status;
+
+namespace brillo {
+
+static const char kBrilloAudioServiceName[] =
+    "android.brillo.brilloaudioservice.BrilloAudioService";
+
+std::shared_ptr<BrilloAudioClient> BrilloAudioClient::instance_ = nullptr;
+
+int BrilloAudioClient::callback_id_counter_ = 1;
+
+BrilloAudioClient::~BrilloAudioClient() {}
+
+std::weak_ptr<BrilloAudioClient> BrilloAudioClient::GetClientInstance() {
+  if (!instance_) {
+    instance_ = std::shared_ptr<BrilloAudioClient>(new BrilloAudioClient());
+    if (!instance_->Initialize()) {
+      LOG(ERROR) << "Could not Initialize the brillo audio client.";
+      instance_.reset();
+      return instance_;
+    }
+  }
+  return instance_;
+}
+
+android::sp<android::IBinder> BrilloAudioClient::ConnectToService(
+    const std::string& service_name, const base::Closure& callback) {
+  android::BinderWrapper* binder_wrapper =
+      android::BinderWrapper::GetOrCreateInstance();
+  auto service = binder_wrapper->GetService(service_name);
+  if (!service.get()) {
+    return service;
+  }
+  binder_wrapper->RegisterForDeathNotifications(service, callback);
+  return service;
+}
+
+void BrilloAudioClient::OnBASDisconnect() {
+  LOG(WARNING) << "The brillo audio service died! Please reset the "
+               << "BAudioManager.";
+  instance_.reset();
+}
+
+bool BrilloAudioClient::Initialize() {
+  auto service = ConnectToService(
+      kBrilloAudioServiceName, base::Bind(&BrilloAudioClient::OnBASDisconnect,
+                                          weak_ptr_factory_.GetWeakPtr()));
+  if (!service.get()) {
+    LOG(ERROR) << "Could not connect to brillo audio service.";
+    return false;
+  }
+  brillo_audio_service_ = android::interface_cast<IBrilloAudioService>(service);
+  return true;
+}
+
+int BrilloAudioClient::GetDevices(int flag, std::vector<int>& devices) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->GetDevices(flag, &devices);
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::SetDevice(audio_policy_force_use_t usage,
+                                 audio_policy_forced_cfg_t config) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->SetDevice(usage, config);
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::GetMaxVolumeSteps(BAudioUsage usage, int* max_steps) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->GetMaxVolumeSteps(
+      BrilloAudioClientHelpers::GetStreamType(usage), max_steps);
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::SetMaxVolumeSteps(BAudioUsage usage, int max_steps) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->SetMaxVolumeSteps(
+      BrilloAudioClientHelpers::GetStreamType(usage), max_steps);
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::SetVolumeIndex(BAudioUsage usage,
+                                      audio_devices_t device,
+                                      int index) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->SetVolumeIndex(
+      BrilloAudioClientHelpers::GetStreamType(usage), device, index);
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::GetVolumeIndex(BAudioUsage usage,
+                                      audio_devices_t device,
+                                      int* index) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->GetVolumeIndex(
+      BrilloAudioClientHelpers::GetStreamType(usage), device, index);
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::GetVolumeControlStream(BAudioUsage* usage) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  int stream;
+  auto status = brillo_audio_service_->GetVolumeControlStream(&stream);
+  *usage = BrilloAudioClientHelpers::GetBAudioUsage(
+      static_cast<audio_stream_type_t>(stream));
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::SetVolumeControlStream(BAudioUsage usage) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->SetVolumeControlStream(
+      BrilloAudioClientHelpers::GetStreamType(usage));
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::IncrementVolume() {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->IncrementVolume();
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::DecrementVolume() {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto status = brillo_audio_service_->DecrementVolume();
+  return status.serviceSpecificErrorCode();
+}
+
+int BrilloAudioClient::RegisterAudioCallback(
+    android::sp<AudioServiceCallback> callback, int* callback_id) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  if (!brillo_audio_service_->RegisterServiceCallback(callback).isOk()) {
+    *callback_id = 0;
+    return ECONNABORTED;
+  }
+  for (auto& entry : callback_map_) {
+    if (entry.second->Equals(callback)) {
+      LOG(ERROR) << "Callback has already been registered.";
+      *callback_id = 0;
+      return EINVAL;
+    }
+  }
+  *callback_id = callback_id_counter_++;
+  callback_map_.emplace(*callback_id, callback);
+  return 0;
+}
+
+int BrilloAudioClient::UnregisterAudioCallback(int callback_id) {
+  if (!brillo_audio_service_.get()) {
+    OnBASDisconnect();
+    return ECONNABORTED;
+  }
+  auto callback_elem = callback_map_.find(callback_id);
+  if (callback_elem == callback_map_.end()) {
+    // If we were passed an invalid callback_id, do nothing.
+    LOG(ERROR) << "Unregister called with invalid callback ID.";
+    return EINVAL;
+  }
+  brillo_audio_service_->UnregisterServiceCallback(callback_elem->second.get());
+  callback_map_.erase(callback_elem);
+  return 0;
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/brillo_audio_client.h b/media/brillo/audio/audioservice/brillo_audio_client.h
new file mode 100644
index 0000000..00c431a
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_client.h
@@ -0,0 +1,183 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Client for the brilloaudioservice.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_H_
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include <base/bind.h>
+#include <base/memory/weak_ptr.h>
+#include <gtest/gtest_prod.h>
+#include <media/IAudioPolicyService.h>
+
+#include "android/brillo/brilloaudioservice/IBrilloAudioService.h"
+#include "audio_service_callback.h"
+
+using android::brillo::brilloaudioservice::IBrilloAudioService;
+
+namespace brillo {
+
+class BrilloAudioClient {
+ public:
+  virtual ~BrilloAudioClient();
+
+  // Get or create a pointer to the client instance.
+  //
+  // Returns a weak_ptr to a BrilloAudioClient object.
+  static std::weak_ptr<BrilloAudioClient> GetClientInstance();
+
+  // Query brillo audio service to get list of connected audio devices.
+  //
+  // |flag| is an int which is either GET_DEVICES_INPUTS or GET_DEVICES_OUTPUTS.
+  // |devices| is a reference to a vector of audio_devices_t.
+  //
+  // Returns 0 on success and errno on failure.
+  int GetDevices(int flag, std::vector<int>& devices);
+
+  // Register a callback object with the service.
+  //
+  // |callback| is a ref pointer to a callback object to be register with the
+  // brillo audio service.
+  // |callback_id| is a pointer to an int that represents a callback id token on
+  // success and 0 on failure.
+  //
+  // Returns 0 on success and errno on failure.
+  int RegisterAudioCallback(android::sp<AudioServiceCallback> callback,
+                            int* callback_id);
+
+  // Unregister a callback object with the service.
+  //
+  // |callback_id| is an int referring to the callback object.
+  //
+  // Returns 0 on success and errno on failure.
+  int UnregisterAudioCallback(int callback_id);
+
+  // Set a device to be the default. This does not communicate with the brillo
+  // audio service but instead communicates directly with the audio policy
+  // service.
+  //
+  // Please see system/audio_policy.h for details on these arguments.
+  //
+  // Returns 0 on success and errno on failure.
+  int SetDevice(audio_policy_force_use_t usage,
+                audio_policy_forced_cfg_t config);
+
+  // Get the maximum number of steps for a given BAudioUsage.
+  //
+  // |usage| is an enum of type BAudioUsage.
+  // |max_steps| is a pointer to the maximum number of steps.
+  //
+  // Returns 0 on success and errno on failure.
+  int GetMaxVolumeSteps(BAudioUsage usage, int* max_steps);
+
+  // Set the maximum number of steps to use for a given BAudioUsage.
+  //
+  // |usage| is an enum of type BAudioUsage.
+  // |max_steps| is an int between 0 and 100.
+  //
+  // Returns 0 on success and errno on failure.
+  int SetMaxVolumeSteps(BAudioUsage usage, int max_steps);
+
+  // Set the volume index for a given BAudioUsage and device.
+  //
+  // |usage| is an enum of type BAudioUsage.
+  // |device| is of type audio_devices_t.
+  // |index| is an int representing the current index.
+  //
+  // Returns 0 on success and errno on failure.
+  int SetVolumeIndex(BAudioUsage usage, audio_devices_t device, int index);
+
+  // Get the volume index for a given BAudioUsage and device.
+  //
+  // |usage| is an enum of type BAudioUsage.
+  // |device| is of type audio_devices_t.
+  // |index| is a pointer to an int representing the current index.
+  //
+  // Returns 0 on success and errno on failure.
+  int GetVolumeIndex(BAudioUsage usage, audio_devices_t device, int* index);
+
+  // Get default stream to use for volume buttons.
+  //
+  // |usage| is a pointer to a BAudioUsage.
+  //
+  // Returns 0 on success and errno on failure.
+  int GetVolumeControlStream(BAudioUsage* usage);
+
+  // Set default stream to use for volume buttons.
+  //
+  // |usage| is an enum of type BAudioUsage.
+  //
+  // Returns 0 on success and errno on failure.
+  int SetVolumeControlStream(BAudioUsage usage);
+
+  // Increment the volume.
+  //
+  // Returns 0 on success and errno on failure.
+  int IncrementVolume();
+
+  // Decrement the volume.
+  //
+  // Returns 0 on success and errno on failure.
+  int DecrementVolume();
+
+ protected:
+  BrilloAudioClient() = default;
+
+ private:
+  friend class BrilloAudioClientTest;
+  FRIEND_TEST(BrilloAudioClientTest, InitializeNoService);
+  FRIEND_TEST(BrilloAudioClientTest,
+              CheckInitializeRegistersForDeathNotifications);
+
+  // Initialize the BrilloAudioClient object and connects to the brillo audio
+  // service and the audio policy service. It also registers for death
+  // notifications.
+  bool Initialize();
+
+  // Callback to be triggered when the brillo audio service dies. It attempts to
+  // reconnect to the service.
+  virtual void OnBASDisconnect();
+
+  // Helper method to connect to a service and register a callback to receive
+  // death notifications.
+  //
+  // |service_name| is a string representing the name of the service.
+  // |callback| is a base::Closure which will be called if the service dies.
+  android::sp<android::IBinder> ConnectToService(const std::string& service_name,
+                                                 const base::Closure& callback);
+
+  // Pointer to the BrilloAudioClient object.
+  static std::shared_ptr<BrilloAudioClient> instance_;
+
+  // Used to generate weak_ptr to BrilloAudioClient for use in base::Bind.
+  base::WeakPtrFactory<BrilloAudioClient> weak_ptr_factory_{this};
+  // Pointer to the brillo audio service.
+  android::sp<IBrilloAudioService> brillo_audio_service_;
+  // Counter for callback IDs.
+  static int callback_id_counter_;
+  // Map of callback ids to callback objects.
+  std::map<int, android::sp<AudioServiceCallback> > callback_map_;
+
+  DISALLOW_COPY_AND_ASSIGN(BrilloAudioClient);
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_H_
diff --git a/media/brillo/audio/audioservice/brillo_audio_client_helpers.cpp b/media/brillo/audio/audioservice/brillo_audio_client_helpers.cpp
new file mode 100644
index 0000000..871c7a9
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_client_helpers.cpp
@@ -0,0 +1,59 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#include "brillo_audio_client_helpers.h"
+
+namespace brillo {
+
+audio_policy_force_use_t BrilloAudioClientHelpers::GetForceUse(
+    BAudioUsage usage) {
+  if (usage == kUsageMedia)
+    return AUDIO_POLICY_FORCE_FOR_MEDIA;
+  else
+    return AUDIO_POLICY_FORCE_FOR_SYSTEM;
+}
+
+audio_stream_type_t BrilloAudioClientHelpers::GetStreamType(BAudioUsage usage) {
+  switch (usage) {
+    case kUsageAlarm:
+      return AUDIO_STREAM_ALARM;
+    case kUsageMedia:
+      return AUDIO_STREAM_MUSIC;
+    case kUsageNotifications:
+      return AUDIO_STREAM_NOTIFICATION;
+    case kUsageSystem:
+      return AUDIO_STREAM_SYSTEM;
+    default:
+      return AUDIO_STREAM_DEFAULT;
+  }
+}
+
+BAudioUsage BrilloAudioClientHelpers::GetBAudioUsage(
+    audio_stream_type_t stream) {
+  switch (stream) {
+    case AUDIO_STREAM_ALARM:
+      return kUsageAlarm;
+    case AUDIO_STREAM_MUSIC:
+      return kUsageMedia;
+    case AUDIO_STREAM_NOTIFICATION:
+      return kUsageNotifications;
+    case AUDIO_STREAM_SYSTEM:
+      return kUsageSystem;
+    default:
+      return kUsageInvalid;
+  }
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/brillo_audio_client_helpers.h b/media/brillo/audio/audioservice/brillo_audio_client_helpers.h
new file mode 100644
index 0000000..a5bb7ba
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_client_helpers.h
@@ -0,0 +1,38 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Helpers for the brillo audio client.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_HELPERS_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_HELPERS_H_
+
+#include <gtest/gtest_prod.h>
+#include <system/audio.h>
+#include <system/audio_policy.h>
+
+#include "include/brillo_audio_manager.h"
+
+namespace brillo {
+
+class BrilloAudioClientHelpers {
+ public:
+  static audio_policy_force_use_t GetForceUse(BAudioUsage usage);
+  static audio_stream_type_t GetStreamType(BAudioUsage usage);
+  static BAudioUsage GetBAudioUsage(audio_stream_type_t stream);
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_HELPERS_H_
diff --git a/media/brillo/audio/audioservice/brillo_audio_device_info.cpp b/media/brillo/audio/audioservice/brillo_audio_device_info.cpp
new file mode 100644
index 0000000..611bcc5
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_device_info.cpp
@@ -0,0 +1,38 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of brillo_audio_device_info.h.
+
+#include "include/brillo_audio_device_info.h"
+
+#include "brillo_audio_device_info_def.h"
+#include "brillo_audio_device_info_internal.h"
+
+using brillo::BAudioDeviceInfoInternal;
+
+BAudioDeviceInfo* BAudioDeviceInfo_new(int device) {
+  BAudioDeviceInfo* audio_device_info = new BAudioDeviceInfo;
+  audio_device_info->internal_ =
+      std::make_unique<BAudioDeviceInfoInternal>(device);
+  return audio_device_info;
+}
+
+int BAudioDeviceInfo_getType(BAudioDeviceInfo* device) {
+  return device->internal_->GetDeviceId();
+}
+
+void BAudioDeviceInfo_delete(BAudioDeviceInfo* device) {
+  delete device;
+}
diff --git a/media/brillo/audio/audioservice/brillo_audio_device_info_def.h b/media/brillo/audio/audioservice/brillo_audio_device_info_def.h
new file mode 100644
index 0000000..3bf1f66
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_device_info_def.h
@@ -0,0 +1,33 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Definition of BAudioDeviceInfo.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_DEF_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_DEF_H_
+
+
+#include <memory>
+
+#include "brillo_audio_device_info_internal.h"
+#include "include/brillo_audio_device_info.h"
+
+using brillo::BAudioDeviceInfoInternal;
+
+struct BAudioDeviceInfo {
+  std::unique_ptr<BAudioDeviceInfoInternal> internal_;
+};
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_DEF_H_
diff --git a/media/brillo/audio/audioservice/brillo_audio_device_info_internal.cpp b/media/brillo/audio/audioservice/brillo_audio_device_info_internal.cpp
new file mode 100644
index 0000000..215da21
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_device_info_internal.cpp
@@ -0,0 +1,89 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Internal helpers for BAudioDeviceInfo.
+
+#include "brillo_audio_device_info_internal.h"
+
+#include <base/logging.h>
+
+#include "brillo_audio_device_info_def.h"
+
+namespace brillo {
+
+BAudioDeviceInfoInternal::BAudioDeviceInfoInternal(int device_id) {
+  device_id_ = device_id;
+}
+
+int BAudioDeviceInfoInternal::GetDeviceId() {
+  return device_id_;
+}
+
+audio_policy_forced_cfg_t BAudioDeviceInfoInternal::GetConfig() {
+  switch (device_id_) {
+    case TYPE_BUILTIN_SPEAKER:
+      return AUDIO_POLICY_FORCE_SPEAKER;
+    case TYPE_WIRED_HEADSET:
+      return AUDIO_POLICY_FORCE_HEADPHONES;
+    case TYPE_WIRED_HEADSET_MIC:
+      return AUDIO_POLICY_FORCE_HEADPHONES;
+    case TYPE_WIRED_HEADPHONES:
+      return AUDIO_POLICY_FORCE_HEADPHONES;
+    case TYPE_BUILTIN_MIC:
+      return AUDIO_POLICY_FORCE_NONE;
+    default:
+      return AUDIO_POLICY_FORCE_NONE;
+  }
+}
+
+audio_devices_t BAudioDeviceInfoInternal::GetAudioDevicesT() {
+  switch (device_id_) {
+    case TYPE_BUILTIN_SPEAKER:
+      return AUDIO_DEVICE_OUT_SPEAKER;
+    case TYPE_WIRED_HEADSET:
+      return AUDIO_DEVICE_OUT_WIRED_HEADSET;
+    case TYPE_WIRED_HEADSET_MIC:
+      return AUDIO_DEVICE_IN_WIRED_HEADSET;
+    case TYPE_WIRED_HEADPHONES:
+      return AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
+    case TYPE_BUILTIN_MIC:
+      return AUDIO_DEVICE_IN_BUILTIN_MIC;
+    default:
+      return AUDIO_DEVICE_NONE;
+  }
+}
+
+BAudioDeviceInfoInternal* BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
+    unsigned int device) {
+  int device_id = TYPE_UNKNOWN;
+  switch (device) {
+    case AUDIO_DEVICE_OUT_WIRED_HEADSET:
+      device_id = TYPE_WIRED_HEADSET;
+      break;
+    case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
+      device_id = TYPE_WIRED_HEADPHONES;
+      break;
+    case AUDIO_DEVICE_IN_WIRED_HEADSET:
+      device_id = TYPE_WIRED_HEADSET_MIC;
+      break;
+  }
+  if (device_id == TYPE_UNKNOWN) {
+    LOG(ERROR) << "Unsupported device.";
+    return nullptr;
+  }
+  return new BAudioDeviceInfoInternal(device_id);
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/brillo_audio_device_info_internal.h b/media/brillo/audio/audioservice/brillo_audio_device_info_internal.h
new file mode 100644
index 0000000..2e60c6f
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_device_info_internal.h
@@ -0,0 +1,74 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Internal class to represent BAudioDeviceInfo.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_INTERNAL_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_INTERNAL_H_
+
+#include <vector>
+
+#include <gtest/gtest_prod.h>
+#include <hardware/audio_policy.h>
+
+#include "include/brillo_audio_device_info.h"
+
+namespace brillo {
+
+class BAudioDeviceInfoInternal {
+ public:
+  // Constructor for BAudioDeviceInfoInternal.
+  //
+  // |device_id| is an integer representing an audio device type as defined in
+  // brillo_audio_device_info.h.
+  explicit BAudioDeviceInfoInternal(int device_id);
+
+  // Get audio policy config.
+  //
+  // Returns an audio_policy_forced_cfg_t.
+  audio_policy_forced_cfg_t GetConfig();
+
+  // Create a BAudioDeviceInfoInternal object from a audio_devices_t device
+  // type.
+  //
+  // |devices_t| is an audio device of type audio_devices_t which is represented
+  // using an int.
+  //
+  // Returns a pointer to a BAudioDeviceInfoInternal that has been created.
+  static BAudioDeviceInfoInternal* CreateFromAudioDevicesT(unsigned int device);
+
+  // Get the device id.
+  //
+  // Returns an int which is the device_id.
+  int GetDeviceId();
+
+  // Get audio_devices_t that corresponds to device_id;
+  //
+  // Returns an audio_devices_t.
+  audio_devices_t GetAudioDevicesT();
+
+ private:
+  FRIEND_TEST(BrilloAudioDeviceInfoInternalTest, InWiredHeadset);
+  FRIEND_TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadset);
+  FRIEND_TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadphone);
+
+  // An int representing the underlying audio device. The int is one of the
+  // constants defined in brillo_audio_device_info.h.
+  int device_id_;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_INTERNAL_H_
diff --git a/media/brillo/audio/audioservice/brillo_audio_manager.cpp b/media/brillo/audio/audioservice/brillo_audio_manager.cpp
new file mode 100644
index 0000000..4c09824
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_manager.cpp
@@ -0,0 +1,227 @@
+  // Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of brillo_audio_manager.h.
+
+#include "include/brillo_audio_manager.h"
+
+#include <memory>
+#include <stdlib.h>
+
+#include "audio_service_callback.h"
+#include "brillo_audio_client.h"
+#include "brillo_audio_client_helpers.h"
+#include "brillo_audio_device_info_def.h"
+#include "brillo_audio_device_info_internal.h"
+
+using brillo::AudioServiceCallback;
+using brillo::BrilloAudioClient;
+using brillo::BrilloAudioClientHelpers;
+
+struct BAudioManager {
+  std::weak_ptr<BrilloAudioClient> client_;
+};
+
+BAudioManager* BAudioManager_new() {
+  auto client = BrilloAudioClient::GetClientInstance();
+  if (!client.lock())
+    return nullptr;
+  BAudioManager* bam = new BAudioManager;
+  bam->client_ = client;
+  return bam;
+}
+
+int BAudioManager_getDevices(
+    const BAudioManager* brillo_audio_manager, int flag,
+    BAudioDeviceInfo* device_array[], unsigned int size,
+    unsigned int* num_devices) {
+  if (!brillo_audio_manager || !num_devices ||
+      (flag != GET_DEVICES_INPUTS && flag != GET_DEVICES_OUTPUTS))
+    return EINVAL;
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    *num_devices = 0;
+    return ECONNABORTED;
+  }
+  std::vector<int> devices;
+  auto rc = client->GetDevices(flag, devices);
+  if (rc) {
+    *num_devices = 0;
+    return rc;
+  }
+  unsigned int num_elems = (devices.size() < size) ? devices.size() : size;
+  for (size_t i = 0; i < num_elems; i++) {
+    device_array[i] = new BAudioDeviceInfo;
+    device_array[i]->internal_ = std::unique_ptr<BAudioDeviceInfoInternal>(
+        BAudioDeviceInfoInternal::CreateFromAudioDevicesT(devices[i]));
+  }
+  *num_devices = devices.size();
+  return 0;
+}
+
+int BAudioManager_setInputDevice(const BAudioManager* brillo_audio_manager,
+                                 const BAudioDeviceInfo* device) {
+  if (!brillo_audio_manager || !device)
+    return EINVAL;
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    return ECONNABORTED;
+  }
+  return client->SetDevice(AUDIO_POLICY_FORCE_FOR_RECORD,
+                           device->internal_->GetConfig());
+}
+
+int BAudioManager_setOutputDevice(
+    const BAudioManager* brillo_audio_manager, const BAudioDeviceInfo* device,
+    BAudioUsage usage) {
+  if (!brillo_audio_manager || !device)
+    return EINVAL;
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client)
+    return ECONNABORTED;
+  return client->SetDevice(BrilloAudioClientHelpers::GetForceUse(usage),
+                           device->internal_->GetConfig());
+}
+
+int BAudioManager_getMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
+                                    BAudioUsage usage,
+                                    int* max_steps) {
+  if (!brillo_audio_manager || !max_steps)
+    return EINVAL;
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client)
+    return ECONNABORTED;
+  return client->GetMaxVolumeSteps(usage, max_steps);
+}
+
+int BAudioManager_setMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
+                                    BAudioUsage usage,
+                                    int max_steps) {
+  if (!brillo_audio_manager || max_steps < 0 || max_steps > 100)
+    return EINVAL;
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client)
+    return ECONNABORTED;
+  return client->SetMaxVolumeSteps(usage, max_steps);
+}
+
+int BAudioManager_setVolumeIndex(const BAudioManager* brillo_audio_manager,
+                                 BAudioUsage usage,
+                                 const BAudioDeviceInfo* device,
+                                 int index) {
+  if (!brillo_audio_manager || !device) {
+    return EINVAL;
+  }
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    return ECONNABORTED;
+  }
+  return client->SetVolumeIndex(
+      usage, device->internal_->GetAudioDevicesT(), index);
+}
+
+int BAudioManager_getVolumeIndex(const BAudioManager* brillo_audio_manager,
+                                 BAudioUsage usage,
+                                 const BAudioDeviceInfo* device,
+                                 int* index) {
+  if (!brillo_audio_manager || !device || !index) {
+    return EINVAL;
+  }
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    return ECONNABORTED;
+  }
+  return client->GetVolumeIndex(
+      usage, device->internal_->GetAudioDevicesT(), index);
+}
+
+int BAudioManager_getVolumeControlUsage(
+    const BAudioManager* brillo_audio_manager, BAudioUsage* usage) {
+  if (!brillo_audio_manager || !usage) {
+    return EINVAL;
+  }
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    return ECONNABORTED;
+  }
+  return client->GetVolumeControlStream(usage);
+}
+
+int BAudioManager_setVolumeControlUsage(
+    const BAudioManager* brillo_audio_manager, BAudioUsage usage) {
+  if (!brillo_audio_manager) {
+    return EINVAL;
+  }
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    return ECONNABORTED;
+  }
+  return client->SetVolumeControlStream(usage);
+}
+
+int BAudioManager_incrementVolume(const BAudioManager* brillo_audio_manager) {
+  if (!brillo_audio_manager) {
+    return EINVAL;
+  }
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    return ECONNABORTED;
+  }
+  return client->IncrementVolume();
+}
+
+int BAudioManager_decrementVolume(const BAudioManager* brillo_audio_manager) {
+  if (!brillo_audio_manager) {
+    return EINVAL;
+  }
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    return ECONNABORTED;
+  }
+  return client->DecrementVolume();
+}
+
+int BAudioManager_registerAudioCallback(
+    const BAudioManager* brillo_audio_manager, const BAudioCallback* callback,
+    void* user_data, int* callback_id) {
+  if (!brillo_audio_manager || !callback || !callback_id)
+    return EINVAL;
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client) {
+    *callback_id = 0;
+    return ECONNABORTED;
+  }
+  // This copies the BAudioCallback into AudioServiceCallback so the
+  // BAudioCallback can be safely deleted.
+  return client->RegisterAudioCallback(
+      new AudioServiceCallback(callback, user_data), callback_id);
+}
+
+int BAudioManager_unregisterAudioCallback(
+    const BAudioManager* brillo_audio_manager, int callback_id) {
+  if (!brillo_audio_manager)
+    return EINVAL;
+  auto client = brillo_audio_manager->client_.lock();
+  if (!client)
+    return ECONNABORTED;
+  return client->UnregisterAudioCallback(callback_id);
+}
+
+int BAudioManager_delete(BAudioManager* brillo_audio_manager) {
+  if (!brillo_audio_manager)
+    return EINVAL;
+  delete brillo_audio_manager;
+  return 0;
+}
diff --git a/media/brillo/audio/audioservice/brillo_audio_service.h b/media/brillo/audio/audioservice/brillo_audio_service.h
new file mode 100644
index 0000000..87ca0d7
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_service.h
@@ -0,0 +1,87 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_H_
+
+#include "android/brillo/brilloaudioservice/BnBrilloAudioService.h"
+
+#include <memory>
+#include <set>
+#include <vector>
+
+#include <binder/Status.h>
+
+#include "android/brillo/brilloaudioservice/IAudioServiceCallback.h"
+#include "audio_device_handler.h"
+#include "audio_volume_handler.h"
+
+using android::binder::Status;
+using android::brillo::brilloaudioservice::BnBrilloAudioService;
+using android::brillo::brilloaudioservice::IAudioServiceCallback;
+
+namespace brillo {
+
+class BrilloAudioService : public BnBrilloAudioService {
+ public:
+  virtual ~BrilloAudioService() {}
+
+  // From AIDL.
+  virtual Status GetDevices(int flag, std::vector<int>* _aidl_return) = 0;
+  virtual Status SetDevice(int usage, int config) = 0;
+  virtual Status GetMaxVolumeSteps(int stream, int* _aidl_return) = 0;
+  virtual Status SetMaxVolumeSteps(int stream, int max_steps) = 0;
+  virtual Status SetVolumeIndex(int stream, int device, int index) = 0;
+  virtual Status GetVolumeIndex(int stream, int device, int* _aidl_return) = 0;
+  virtual Status GetVolumeControlStream(int* _aidl_return) = 0;
+  virtual Status SetVolumeControlStream(int stream) = 0;
+  virtual Status IncrementVolume() = 0;
+  virtual Status DecrementVolume() = 0;
+  virtual Status RegisterServiceCallback(
+      const android::sp<IAudioServiceCallback>& callback) = 0;
+  virtual Status UnregisterServiceCallback(
+      const android::sp<IAudioServiceCallback>& callback) = 0;
+
+  // Register daemon handlers.
+  //
+  // |audio_device_handler| is a weak pointer to an audio device handler object.
+  // |audio_volume_handler| is a weak pointer to an audio volume handler object.
+  virtual void RegisterHandlers(
+      std::weak_ptr<AudioDeviceHandler> audio_device_handler,
+      std::weak_ptr<AudioVolumeHandler> audio_volume_handler) = 0;
+
+  // Callback to be called when a device is connected.
+  //
+  // |devices| is a vector of ints representing the audio_devices_t.
+  virtual void OnDevicesConnected(const std::vector<int>& device) = 0;
+
+  // Callback to be called when a device is disconnected.
+  //
+  // |devices| is a vector of ints representing the audio_devices_t.
+  virtual void OnDevicesDisconnected(const std::vector<int>& device) = 0;
+
+  // Callback to be called when the volume is changed.
+  //
+  // |stream| is an audio_stream_type_t representing the stream.
+  // |previous_index| is the volume index before the key press.
+  // |current_index| is the volume index after the key press.
+  virtual void OnVolumeChanged(audio_stream_type_t stream,
+                               int previous_index,
+                               int current_index) = 0;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_H_
diff --git a/media/brillo/audio/audioservice/brillo_audio_service_impl.cpp b/media/brillo/audio/audioservice/brillo_audio_service_impl.cpp
new file mode 100644
index 0000000..1585755
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_service_impl.cpp
@@ -0,0 +1,193 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Implementation of brillo_audio_service_impl.h
+
+#include "brillo_audio_service_impl.h"
+
+using android::binder::Status;
+
+namespace brillo {
+
+Status BrilloAudioServiceImpl::GetDevices(int flag,
+                                          std::vector<int>* _aidl_return) {
+  auto device_handler = audio_device_handler_.lock();
+  if (!device_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio device handler died."));
+  }
+  if (flag == BrilloAudioService::GET_DEVICES_INPUTS) {
+    device_handler->GetInputDevices(_aidl_return);
+  } else if (flag == BrilloAudioService::GET_DEVICES_OUTPUTS) {
+    device_handler->GetOutputDevices(_aidl_return);
+  } else {
+    return Status::fromServiceSpecificError(EINVAL,
+                                            android::String8("Invalid flag."));
+  }
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::SetDevice(int usage, int config) {
+  auto device_handler = audio_device_handler_.lock();
+  if (!device_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio device handler died."));
+  }
+  int rc =
+      device_handler->SetDevice(static_cast<audio_policy_force_use_t>(usage),
+                                static_cast<audio_policy_forced_cfg_t>(config));
+  if (rc) return Status::fromServiceSpecificError(rc);
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::RegisterServiceCallback(
+    const android::sp<IAudioServiceCallback>& callback) {
+  callbacks_set_.insert(callback);
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::UnregisterServiceCallback(
+    const android::sp<IAudioServiceCallback>& callback) {
+  callbacks_set_.erase(callback);
+  return Status::ok();
+}
+
+void BrilloAudioServiceImpl::RegisterHandlers(
+    std::weak_ptr<AudioDeviceHandler> audio_device_handler,
+    std::weak_ptr<AudioVolumeHandler> audio_volume_handler) {
+  audio_device_handler_ = audio_device_handler;
+  audio_volume_handler_ = audio_volume_handler;
+}
+
+Status BrilloAudioServiceImpl::GetMaxVolumeSteps(int stream,
+                                                 int* _aidl_return) {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  *_aidl_return = volume_handler->GetVolumeMaxSteps(
+      static_cast<audio_stream_type_t>(stream));
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::SetMaxVolumeSteps(int stream, int max_steps) {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  int rc = volume_handler->SetVolumeMaxSteps(
+      static_cast<audio_stream_type_t>(stream), max_steps);
+  if (rc)
+    return Status::fromServiceSpecificError(rc);
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::SetVolumeIndex(int stream,
+                                              int device,
+                                              int index) {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  int rc =
+      volume_handler->SetVolumeIndex(static_cast<audio_stream_type_t>(stream),
+                                     static_cast<audio_devices_t>(device),
+                                     index);
+  if (rc)
+    return Status::fromServiceSpecificError(rc);
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::GetVolumeIndex(int stream,
+                                              int device,
+                                              int* _aidl_return) {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  *_aidl_return =
+      volume_handler->GetVolumeIndex(static_cast<audio_stream_type_t>(stream),
+                                     static_cast<audio_devices_t>(device));
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::IncrementVolume() {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  volume_handler->AdjustVolumeActiveStreams(1);
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::GetVolumeControlStream(int* _aidl_return) {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  *_aidl_return = volume_handler->GetVolumeControlStream();
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::SetVolumeControlStream(int stream) {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  volume_handler->SetVolumeControlStream(
+      static_cast<audio_stream_type_t>(stream));
+  return Status::ok();
+}
+
+Status BrilloAudioServiceImpl::DecrementVolume() {
+  auto volume_handler = audio_volume_handler_.lock();
+  if (!volume_handler) {
+    return Status::fromServiceSpecificError(
+        EREMOTEIO, android::String8("The audio volume handler died."));
+  }
+  volume_handler->AdjustVolumeActiveStreams(-1);
+  return Status::ok();
+}
+
+void BrilloAudioServiceImpl::OnDevicesConnected(
+    const std::vector<int>& devices) {
+  for (const auto& callback : callbacks_set_) {
+    callback->OnAudioDevicesConnected(devices);
+  }
+}
+
+void BrilloAudioServiceImpl::OnDevicesDisconnected(
+    const std::vector<int>& devices) {
+  for (const auto& callback : callbacks_set_) {
+    callback->OnAudioDevicesDisconnected(devices);
+  }
+}
+
+void BrilloAudioServiceImpl::OnVolumeChanged(audio_stream_type_t stream,
+                                             int previous_index,
+                                             int current_index) {
+  for (const auto& callback : callbacks_set_) {
+    callback->OnVolumeChanged(stream, previous_index, current_index);
+  }
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/brillo_audio_service_impl.h b/media/brillo/audio/audioservice/brillo_audio_service_impl.h
new file mode 100644
index 0000000..af53b66
--- /dev/null
+++ b/media/brillo/audio/audioservice/brillo_audio_service_impl.h
@@ -0,0 +1,83 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_IMPL_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_IMPL_H_
+
+// Server side implementation of brillo audio service.
+
+#include "brillo_audio_service.h"
+
+namespace brillo {
+
+class BrilloAudioServiceImpl : public BrilloAudioService {
+ public:
+  ~BrilloAudioServiceImpl() = default;
+
+  // From AIDL.
+  Status GetDevices(int flag, std::vector<int>* _aidl_return) override;
+  Status SetDevice(int usage, int config) override;
+  Status GetMaxVolumeSteps(int stream, int* _aidl_return) override;
+  Status SetMaxVolumeSteps(int stream, int max_steps) override;
+  Status SetVolumeIndex(int stream, int device, int index) override;
+  Status GetVolumeIndex(int stream, int device, int* _aidl_return) override;
+  Status GetVolumeControlStream(int* _aidl_return) override;
+  Status SetVolumeControlStream(int stream) override;
+  Status IncrementVolume() override;
+  Status DecrementVolume() override;
+  Status RegisterServiceCallback(
+      const android::sp<IAudioServiceCallback>& callback) override;
+  Status UnregisterServiceCallback(
+      const android::sp<IAudioServiceCallback>& callback) override;
+
+  // Register daemon handlers.
+  //
+  // |audio_device_handler| is a weak pointer to an audio device handler object.
+  // |audio_volume_handler| is a weak pointer to an audio volume handler object.
+  void RegisterHandlers(
+      std::weak_ptr<AudioDeviceHandler> audio_device_handler,
+      std::weak_ptr<AudioVolumeHandler> audio_volume_handler) override;
+
+  // Callback to be called when a device is connected.
+  //
+  // |devices| is a vector of ints representing the audio_devices_t.
+  void OnDevicesConnected(const std::vector<int>& device) override;
+
+  // Callback to be called when a device is disconnected.
+  //
+  // |devices| is a vector of ints representing the audio_devices_t.
+  void OnDevicesDisconnected(const std::vector<int>& device) override;
+
+  // Callback to be called when volume is changed.
+  //
+  // |stream| is an int representing the stream.
+  // |previous_index| is the volume index before the key press.
+  // |current_index| is the volume index after the key press.
+  void OnVolumeChanged(audio_stream_type_t stream,
+                       int previous_index,
+                       int current_index) override;
+
+ private:
+  // A weak pointer to the audio device handler.
+  std::weak_ptr<AudioDeviceHandler> audio_device_handler_;
+  // A weak pointer to the audio volume handler.
+  std::weak_ptr<AudioVolumeHandler> audio_volume_handler_;
+  // List of all callbacks objects registered with the service.
+  std::set<android::sp<IAudioServiceCallback> > callbacks_set_;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_IMPL_H_
diff --git a/media/brillo/audio/audioservice/brilloaudioserv.rc b/media/brillo/audio/audioservice/brilloaudioserv.rc
new file mode 100644
index 0000000..0595c33
--- /dev/null
+++ b/media/brillo/audio/audioservice/brilloaudioserv.rc
@@ -0,0 +1,4 @@
+service brilloaudioserv /system/bin/brilloaudioservice
+    class late_start
+    user audioserver
+    group input
diff --git a/media/brillo/audio/audioservice/include/brillo_audio_device_info.h b/media/brillo/audio/audioservice/include/brillo_audio_device_info.h
new file mode 100644
index 0000000..5c386b4
--- /dev/null
+++ b/media/brillo/audio/audioservice/include/brillo_audio_device_info.h
@@ -0,0 +1,74 @@
+// copyright 2016 the android open source project
+//
+// licensed under the apache license, version 2.0 (the "license");
+// you may not use this file except in compliance with the license.
+// you may obtain a copy of the license at
+//
+//      http://www.apache.org/licenses/license-2.0
+//
+// unless required by applicable law or agreed to in writing, software
+// distributed under the license is distributed on an "as is" basis,
+// without warranties or conditions of any kind, either express or implied.
+// see the license for the specific language governing permissions and
+// limitations under the license.
+//
+
+// Type to represent audio devices in a brillo system.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_H_
+
+#include <sys/cdefs.h>
+
+__BEGIN_DECLS
+
+struct BAudioDeviceInfo;
+
+typedef struct BAudioDeviceInfo BAudioDeviceInfo;
+
+// A device type associated with an unknown or uninitialized device.
+static const int TYPE_UNKNOWN = 0;
+
+// A device type describing the speaker system (i.e. a mono speaker or stereo
+// speakers) built in a device.
+static const int TYPE_BUILTIN_SPEAKER = 1;
+
+// A device type describing a headset, which is the combination of a headphones
+// and microphone. This type represents just the transducer in the headset.
+static const int TYPE_WIRED_HEADSET = 2;
+
+// A device type describing a headset, which is the combination of a headphones
+// and microphone. This type represents the microphone in the headset.
+static const int TYPE_WIRED_HEADSET_MIC = 3;
+
+// A device type describing a pair of wired headphones.
+static const int TYPE_WIRED_HEADPHONES = 4;
+
+// A device type describing the microphone(s) built in a device.
+static const int TYPE_BUILTIN_MIC = 5;
+
+// Create a BAudioDeviceInfo based on a type described above.
+//
+// Arg:
+//   device: An int representing an audio type as defined above.
+//
+// Returns a pointer to a BAudioDeviceInfo object.
+BAudioDeviceInfo* BAudioDeviceInfo_new(int device);
+
+// Get the type of the device.
+//
+// Arg:
+//   device: A pointer to a BAudioDeviceInfo object to be freed.
+//
+// Returns an int representing the type of the device.
+int BAudioDeviceInfo_getType(BAudioDeviceInfo* device);
+
+// Free a BAudioDeviceInfo.
+//
+// Arg:
+//   device: A pointer to a BAudioDeviceInfo object to be freed.
+void BAudioDeviceInfo_delete(BAudioDeviceInfo* device);
+
+__END_DECLS
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_H_
diff --git a/media/brillo/audio/audioservice/include/brillo_audio_manager.h b/media/brillo/audio/audioservice/include/brillo_audio_manager.h
new file mode 100644
index 0000000..ff80daa
--- /dev/null
+++ b/media/brillo/audio/audioservice/include/brillo_audio_manager.h
@@ -0,0 +1,258 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Class to manage audio devices in Brillo.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_MANAGER_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_MANAGER_H_
+
+#include <sys/cdefs.h>
+
+#include "brillo_audio_device_info.h"
+
+__BEGIN_DECLS
+
+struct BAudioManager;
+
+typedef struct BAudioManager BAudioManager;
+
+// Get a pointer to a BAudioManager. This object will refer to the same
+// underlying client object no matter how many times it is called.
+//
+// Returns a pointer to a BAudioManager. Returns NULL on failure.
+BAudioManager* BAudioManager_new();
+
+// Flag to get input devices.
+static const int GET_DEVICES_INPUTS = 1;
+// Flag to get output devices.
+static const int GET_DEVICES_OUTPUTS = 2;
+
+// Returns the list of input/output devices connected to the system.
+//
+// Arg:
+//   brillo_audio_manager: A pointer to a BAudioManager.
+//   flag: Either GET_DEVICES_INPUTS or GET_DEVICES_OUTPUTS.
+//   device_array: An array of BAudioDeviceInfo pointers. The caller has to
+//                 allocate this array.
+//   size: The size of device_array.
+//   num_devices: A pointer to an unsigned int which will represent the number
+//                of audio devices connected to the device.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_getDevices(
+    const BAudioManager* brillo_audio_manager, int flag,
+    BAudioDeviceInfo* device_array[], unsigned int size,
+    unsigned int* num_devices);
+
+// Select the input device to be used for recording.
+//
+// Arg:
+//   brillo_audio_manager: A pointer to a BAudioManager.
+//   device: Device to set as the input device. Note that the device has to be
+//           an input device.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_setInputDevice(const BAudioManager* brillo_audio_manager,
+                                 const BAudioDeviceInfo* device);
+
+// Usage types.
+enum BAudioUsage {
+  kUsageAlarm,
+  kUsageMedia,
+  kUsageNotifications,
+  kUsageSystem,
+  kUsageInvalid
+};
+
+// Select the output device to be used for playback.
+//
+// Arg:
+//   brillo_audio_manager: A pointer to a BAudioManager.
+//   device: Device to set as the output device. Note that the device has to
+//           be an output device.
+//   usage: A BAudioUsage type representing a usage to route to |device|.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_setOutputDevice(
+    const BAudioManager* brillo_audio_manager, const BAudioDeviceInfo* device,
+    BAudioUsage usage);
+
+// Get the number of steps for a given stream type.
+//
+// Args:
+//   brillo_audio_manager: A pointer to a BAudioManager object.
+//   usage: A BAudioUsage representing the audio stream.
+//   max_steps: A pointer to an int representing the number of steps for a given
+//              usage.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_getMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
+                                    BAudioUsage usage,
+                                    int* max_steps);
+
+// Set the number of steps for a given stream type.
+//
+// Args:
+//   brillo_audio_manager: A pointer to a BAudioManager object.
+//   usage: A BAudioUsage representing the audio stream.
+//   max_steps: An int representing the number of steps to use for a given
+//              usage.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_setMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
+                                    BAudioUsage usage,
+                                    int max_steps);
+
+// Set the volume for a given stream type.
+//
+// Args:
+//   brillo_audio_manager: A pointer to a BAudioManager object.
+//   usage: A BAudioUsage representing the audio stream.
+//   device: A pointer to a BAudioDeviceInfo object.
+//   value: An int representing the index to set the volume to. The index must
+//           be less than max_steps if BAudioManager_setMaxVolumeSteps was
+//           called or 100 otherwise.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_setVolumeIndex(const BAudioManager* brillo_audio_manager,
+                                 BAudioUsage usage,
+                                 const BAudioDeviceInfo* device,
+                                 int index);
+
+// Get the volume for a given stream type.
+//
+// Args:
+//   brillo_audio_manager: A pointer to a BAudioManager object.
+//   usage: A BAudioUsage representing the audio stream.
+//   device: A pointer to a BAudioDeviceInfo object.
+//   value: A pointer to int. This will be set to an int representing the volume
+//          index for |usage|.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_getVolumeIndex(const BAudioManager* brillo_audio_manager,
+                                 BAudioUsage usage,
+                                 const BAudioDeviceInfo* device,
+                                 int* index);
+
+// Get the default stream for volume buttons. If
+// BAudioManager_setVolumeControlUsage has not been called, this will return
+// kInvalidUsage.
+//
+// Args:
+//  brillo_audio_manager: A pointer to a BAudioManager object.
+//  usage: A pointer to a BAudioUsage representing the audio stream.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_getVolumeControlUsage(
+    const BAudioManager* brillo_audio_manager, BAudioUsage* usage);
+
+// Set the default stream to use for volume buttons. By default, streams will be
+// ordered by priority:
+//   1. kUsageAlarm
+//   2. kUsageNotifications
+//   3. kUsageSystem
+//   4. kUsageMedia
+//
+// Calling BAudioMananager_setVolumeControlUsage with kInvalidUsage will reset
+// the volume control stream to its default priorities and undo the effects of
+// previous calls to BAudioManager_setVolumeControlUsage.
+//
+// Args:
+//  brillo_audio_manager: A pointer to a BAudioManager object.
+//  usage: A BAudioUsage representing the audio stream.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_setVolumeControlUsage(
+    const BAudioManager* brillo_audio_manager, BAudioUsage usage);
+
+// Increment the volume of active streams or stream selected using
+// BAudioManager_setVolumeControlUsage.
+//
+// Args:
+//   brillo_audio_manager: A pointer to a BAudioManager object.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_incrementVolume(const BAudioManager* brillo_audio_manager);
+
+// Decrement the volume of active streams or stream selected using
+// BAudioManager_setVolumeControlUsage.
+//
+// Args:
+//   brillo_audio_manager: A pointer to a BAudioManager object.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_decrementVolume(const BAudioManager* brillo_audio_manager);
+
+// Object used for callbacks.
+struct BAudioCallback {
+  // Function to be called when an audio device is added. If multiple audio
+  // devices are added, then this function will be called multiple times. The
+  // user is not responsible for freeing added_device.
+  void (*OnAudioDeviceAdded)(const BAudioDeviceInfo* added_device,
+                             void* user_data);
+
+  // Function to be called when an audio device is removed. If multiple audio
+  // devices are removed, then this function will be called multiple times. The
+  // user is not responsible for freeing removed_device.
+  void (*OnAudioDeviceRemoved)(const BAudioDeviceInfo* removed_device,
+                               void* user_data);
+
+  // Function to be called when the volume button is pressed.
+  void (*OnVolumeChanged)(BAudioUsage usage,
+                          int old_volume_index,
+                          int new_volume_index,
+                          void* user_data);
+};
+
+typedef struct BAudioCallback BAudioCallback;
+
+// Registers a callback object that lets clients know when audio devices have
+// been added/removed from the system.
+//
+// Arg:
+//   brillo_audio_manager: A pointer to a BAudioManager.
+//   callback: An object of type BAudioCallback. The BAudioManager
+//             maintains ownership of this object.
+//   user_data : A pointer to user data. This is not used by BAudioManager and
+//               is passed as an arg to callbacks.
+//   callback_id: A pointer to an int. The int represents a token that can be
+//                used to de-register this callback. Contains 0 on failure.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_registerAudioCallback(
+    const BAudioManager* brillo_audio_manager, const BAudioCallback* callback,
+    void* user_data, int* callback_id);
+
+// Unregisters a callback object.
+//
+// Arg:
+//   brillo_audio_manager: A pointer to a BAudioManager.
+//   callback_id: A token correspoding to the callback object.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_unregisterAudioCallback(
+    const BAudioManager* brillo_audio_manager, int callback_id);
+
+// Free a Brillo audio manager object.
+//
+// Arg:
+//   brillo_audio_manager: A pointer to a BAudioManager to be freed.
+//
+// Returns 0 on success and errno on failure.
+int BAudioManager_delete(BAudioManager* brillo_audio_manager);
+
+__END_DECLS
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_MANAGER_H_
diff --git a/media/brillo/audio/audioservice/main_audio_service.cpp b/media/brillo/audio/audioservice/main_audio_service.cpp
new file mode 100644
index 0000000..e8cb605
--- /dev/null
+++ b/media/brillo/audio/audioservice/main_audio_service.cpp
@@ -0,0 +1,27 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#include <brillo/flag_helper.h>
+#include <brillo/syslog_logging.h>
+
+#include "audio_daemon.h"
+
+int main(int argc, char** argv) {
+  brillo::FlagHelper::Init(argc, argv, "Brillo audio service,");
+  brillo::InitLog(brillo::kLogToSyslog | brillo::kLogHeader);
+  LOG(INFO) << "Starting brilloaudioservice.";
+  brillo::AudioDaemon audio_daemon;
+  return audio_daemon.Run();
+}
diff --git a/media/brillo/audio/audioservice/test/audio_daemon_mock.h b/media/brillo/audio/audioservice/test/audio_daemon_mock.h
new file mode 100644
index 0000000..c5ed43e
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/audio_daemon_mock.h
@@ -0,0 +1,44 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Mock of audio daemon.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DAEMON_MOCK_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DAEMON_MOCK_H_
+
+#include <gmock/gmock.h>
+#include <gtest/gtest_prod.h>
+
+#include "audio_daemon.h"
+
+namespace brillo {
+
+class AudioDaemonMock : public AudioDaemon {
+ public:
+  AudioDaemonMock() = default;
+  ~AudioDaemonMock() {}
+
+ private:
+  friend class AudioDaemonTest;
+  FRIEND_TEST(AudioDaemonTest, RegisterService);
+  FRIEND_TEST(AudioDaemonTest, TestAPSConnectInitializesHandlersOnlyOnce);
+  FRIEND_TEST(AudioDaemonTest, TestDeviceCallbackInitializesBASIfNULL);
+
+  MOCK_METHOD0(InitializeHandlers, void());
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DAEMON_MOCK_H_
diff --git a/media/brillo/audio/audioservice/test/audio_daemon_test.cpp b/media/brillo/audio/audioservice/test/audio_daemon_test.cpp
new file mode 100644
index 0000000..3ff5482
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/audio_daemon_test.cpp
@@ -0,0 +1,68 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Tests for audio daemon.
+
+#include "audio_daemon_mock.h"
+
+#include <memory>
+#include <vector>
+
+#include <binder/Binder.h>
+#include <binderwrapper/binder_test_base.h>
+#include <binderwrapper/stub_binder_wrapper.h>
+#include <gmock/gmock.h>
+
+#include "audio_device_handler_mock.h"
+
+using android::BinderTestBase;
+using android::IInterface;
+using std::make_shared;
+using testing::_;
+using testing::AnyNumber;
+
+namespace brillo {
+
+class AudioDaemonTest : public BinderTestBase {
+ public:
+  AudioDaemonMock daemon_;
+  AudioDeviceHandlerMock device_handler_;
+};
+
+TEST_F(AudioDaemonTest, RegisterService) {
+  daemon_.InitializeBrilloAudioService();
+  EXPECT_EQ(daemon_.brillo_audio_service_,
+            binder_wrapper()->GetRegisteredService(
+                "android.brillo.brilloaudioservice.BrilloAudioService"));
+}
+
+TEST_F(AudioDaemonTest, TestAPSConnectInitializesHandlersOnlyOnce) {
+  binder_wrapper()->SetBinderForService("media.audio_policy",
+                                        binder_wrapper()->CreateLocalBinder());
+  daemon_.handlers_initialized_ = false;
+  EXPECT_CALL(daemon_, InitializeHandlers()).Times(1);
+  daemon_.ConnectToAPS();
+}
+
+TEST_F(AudioDaemonTest, TestDeviceCallbackInitializesBASIfNULL) {
+  daemon_.DeviceCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected,
+      std::vector<int>());
+  EXPECT_EQ(daemon_.brillo_audio_service_,
+            binder_wrapper()->GetRegisteredService(
+                "android.brillo.brilloaudioservice.BrilloAudioService"));
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/test/audio_device_handler_mock.h b/media/brillo/audio/audioservice/test/audio_device_handler_mock.h
new file mode 100644
index 0000000..fcc711f
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/audio_device_handler_mock.h
@@ -0,0 +1,80 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Mock of AudioDeviceHandler.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DEVICE_HANDLER_MOCK_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DEVICE_HANDLER_MOCK_H_
+
+#include <base/files/file_path.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest_prod.h>
+#include <system/audio.h>
+#include <system/audio_policy.h>
+
+#include "audio_device_handler.h"
+
+namespace brillo {
+
+class AudioDeviceHandlerMock : public AudioDeviceHandler {
+ public:
+  AudioDeviceHandlerMock() = default;
+  ~AudioDeviceHandlerMock() {}
+
+  // Reset all local data.
+  void Reset() {
+    connected_input_devices_.clear();
+    connected_output_devices_.clear();
+    headphone_ = false;
+    microphone_ = false;
+  }
+
+ private:
+  friend class AudioDeviceHandlerTest;
+  FRIEND_TEST(AudioDeviceHandlerTest,
+              DisconnectAllSupportedDevicesCallsDisconnect);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitCallsDisconnectAllSupportedDevices);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateMic);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadphone);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadset);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateNone);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateInvalid);
+  FRIEND_TEST(AudioDeviceHandlerTest, InitCallsDisconnect);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventEmpty);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophonePresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphonePresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophoneNotPresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphoneNotPresent);
+  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventInvalid);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemNone);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectMic);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadphone);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadset);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectMic);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadphone);
+  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadset);
+  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceInput);
+  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceOutput);
+  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceInput);
+  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceOutput);
+
+  MOCK_METHOD2(NotifyAudioPolicyService,
+               void(audio_devices_t device, audio_policy_dev_state_t state));
+  MOCK_METHOD1(TriggerCallback, void(DeviceConnectionState));
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DEVICE_HANDLER_MOCK_H_
diff --git a/media/brillo/audio/audioservice/test/audio_device_handler_test.cpp b/media/brillo/audio/audioservice/test/audio_device_handler_test.cpp
new file mode 100644
index 0000000..d14faa0
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/audio_device_handler_test.cpp
@@ -0,0 +1,408 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Tests for audio device handler.
+
+#include "audio_device_handler_mock.h"
+
+#include <string>
+
+#include <base/files/file_path.h>
+#include <base/files/file_util.h>
+#include <base/files/scoped_temp_dir.h>
+#include <base/strings/string_number_conversions.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+using base::FilePath;
+using base::IntToString;
+using base::ScopedTempDir;
+using base::WriteFile;
+using brillo::AudioDeviceHandlerMock;
+using testing::_;
+using testing::AnyNumber;
+using testing::AtLeast;
+
+namespace brillo {
+
+class AudioDeviceHandlerTest : public testing::Test {
+ public:
+  void SetUp() override {
+    EXPECT_TRUE(temp_dir_.CreateUniqueTempDir());
+    h2w_file_path_ = temp_dir_.path().Append("h2wstate");
+  }
+
+  void TearDown() override { handler_.Reset(); }
+
+  // Method to store the current state of the audio jack to a file.
+  //
+  // |value| - Value in the h2w file.
+  void WriteToH2WFile(int value) {
+    std::string value_string = IntToString(value);
+    WriteFile(h2w_file_path_, value_string.c_str(), value_string.length());
+  }
+
+  AudioDeviceHandlerMock handler_;
+  FilePath h2w_file_path_;
+
+ private:
+  ScopedTempDir temp_dir_;
+};
+
+// Test that DisconnectAllSupportedDevices() calls NotifyAudioPolicyService()
+// the right number of times.
+TEST_F(AudioDeviceHandlerTest, DisconnectAllSupportedDevicesCallsDisconnect) {
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(
+                  _, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)).Times(3);
+  handler_.DisconnectAllSupportedDevices();
+  EXPECT_EQ(handler_.changed_devices_.size(), 3);
+}
+
+// Test that Init() calls DisconnectAllSupportedDevices().
+TEST_F(AudioDeviceHandlerTest, InitCallsDisconnectAllSupportedDevices) {
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(
+                  _, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)).Times(3);
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected))
+      .Times(AtLeast(1));
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(
+                  _, AUDIO_POLICY_DEVICE_STATE_AVAILABLE)).Times(AnyNumber());
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected))
+      .Times(AnyNumber());
+  handler_.Init(nullptr);
+}
+
+// Test GetInitialAudioDeviceState() with just a microphone.
+TEST_F(AudioDeviceHandlerTest, InitialAudioStateMic) {
+  WriteToH2WFile(2);
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
+  handler_.GetInitialAudioDeviceState(h2w_file_path_);
+  EXPECT_NE(
+      handler_.connected_input_devices_.find(AUDIO_DEVICE_IN_WIRED_HEADSET),
+      handler_.connected_input_devices_.end());
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_IN_WIRED_HEADSET);
+}
+
+// Test GetInitialAudioDeviceState() with a headphone.
+TEST_F(AudioDeviceHandlerTest, InitialAudioStateHeadphone) {
+  WriteToH2WFile(1);
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
+  handler_.GetInitialAudioDeviceState(h2w_file_path_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_NE(
+      handler_.connected_output_devices_.find(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
+      handler_.connected_output_devices_.end());
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
+}
+
+// Test GetInitialAudioDeviceState() with a headset.
+TEST_F(AudioDeviceHandlerTest, InitialAudioStateHeadset) {
+  WriteToH2WFile(3);
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  handler_.GetInitialAudioDeviceState(h2w_file_path_);
+  EXPECT_NE(
+      handler_.connected_input_devices_.find(AUDIO_DEVICE_IN_WIRED_HEADSET),
+      handler_.connected_input_devices_.end());
+  EXPECT_NE(
+      handler_.connected_output_devices_.find(AUDIO_DEVICE_OUT_WIRED_HEADSET),
+      handler_.connected_output_devices_.end());
+  EXPECT_EQ(handler_.changed_devices_.size(), 2);
+}
+
+// Test GetInitialAudioDeviceState() without any devices connected to the audio
+// jack. No need to call NotifyAudioPolicyService() since that's already handled
+// by Init().
+TEST_F(AudioDeviceHandlerTest, InitialAudioStateNone) {
+  WriteToH2WFile(0);
+  EXPECT_CALL(handler_, TriggerCallback(_));
+  handler_.GetInitialAudioDeviceState(h2w_file_path_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_EQ(handler_.changed_devices_.size(), 0);
+}
+
+// Test GetInitialAudioDeviceState() with an invalid file. The audio handler
+// should not fail in this case because it should work on boards that don't
+// support audio jacks.
+TEST_F(AudioDeviceHandlerTest, InitialAudioStateInvalid) {
+  FilePath path = h2w_file_path_;
+  handler_.GetInitialAudioDeviceState(h2w_file_path_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+}
+
+// Test ProcessEvent() with an empty input_event arg.
+TEST_F(AudioDeviceHandlerTest, ProcessEventEmpty) {
+  struct input_event event;
+  event.type = 0;
+  event.code = 0;
+  event.value = 0;
+  EXPECT_CALL(handler_, TriggerCallback(_));
+  handler_.ProcessEvent(event);
+  EXPECT_FALSE(handler_.headphone_);
+  EXPECT_FALSE(handler_.microphone_);
+}
+
+// Test ProcessEvent() with a microphone present input_event arg.
+TEST_F(AudioDeviceHandlerTest, ProcessEventMicrophonePresent) {
+  struct input_event event;
+  event.type = EV_SW;
+  event.code = SW_MICROPHONE_INSERT;
+  event.value = 1;
+  handler_.ProcessEvent(event);
+  EXPECT_FALSE(handler_.headphone_);
+  EXPECT_TRUE(handler_.microphone_);
+}
+
+// Test ProcessEvent() with a headphone present input_event arg.
+TEST_F(AudioDeviceHandlerTest, ProcessEventHeadphonePresent) {
+  struct input_event event;
+  event.type = EV_SW;
+  event.code = SW_HEADPHONE_INSERT;
+  event.value = 1;
+  handler_.ProcessEvent(event);
+  EXPECT_TRUE(handler_.headphone_);
+  EXPECT_FALSE(handler_.microphone_);
+}
+
+// Test ProcessEvent() with a microphone not present input_event arg.
+TEST_F(AudioDeviceHandlerTest, ProcessEventMicrophoneNotPresent) {
+  struct input_event event;
+  event.type = EV_SW;
+  event.code = SW_MICROPHONE_INSERT;
+  event.value = 0;
+  handler_.ProcessEvent(event);
+  EXPECT_FALSE(handler_.headphone_);
+  EXPECT_FALSE(handler_.microphone_);
+}
+
+// Test ProcessEvent() with a headphone not preset input_event arg.
+TEST_F(AudioDeviceHandlerTest, ProcessEventHeadphoneNotPresent) {
+  struct input_event event;
+  event.type = EV_SW;
+  event.code = SW_HEADPHONE_INSERT;
+  event.value = 0;
+  handler_.ProcessEvent(event);
+  EXPECT_FALSE(handler_.headphone_);
+  EXPECT_FALSE(handler_.microphone_);
+}
+
+// Test ProcessEvent() with an unsupported input_event arg.
+TEST_F(AudioDeviceHandlerTest, ProcessEventInvalid) {
+  struct input_event event;
+  event.type = EV_SW;
+  event.code = SW_MAX;
+  event.value = 0;
+  handler_.ProcessEvent(event);
+  EXPECT_FALSE(handler_.headphone_);
+  EXPECT_FALSE(handler_.microphone_);
+}
+
+// Test UpdateAudioSystem() without any devices connected.
+TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemNone) {
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(
+                  _, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)).Times(0);
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
+  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
+  EXPECT_EQ(handler_.changed_devices_.size(), 0);
+}
+
+// Test UpdateAudioSystem() when disconnecting a microphone.
+TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectMic) {
+  audio_devices_t device = AUDIO_DEVICE_IN_WIRED_HEADSET;
+  handler_.connected_input_devices_.insert(device);
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(device,
+                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
+  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], device);
+}
+
+// Test UpdateAudioSystem() when disconnecting a headphone.
+TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadphone) {
+  audio_devices_t device = AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
+  handler_.connected_output_devices_.insert(device);
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(device,
+                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
+  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], device);
+}
+
+// Test UpdateAudioSystem() when disconnecting a headset & headphones.
+TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadset) {
+  handler_.connected_input_devices_.insert(AUDIO_DEVICE_IN_WIRED_HEADSET);
+  handler_.connected_output_devices_.insert(AUDIO_DEVICE_OUT_WIRED_HEADSET);
+  handler_.connected_output_devices_.insert(AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
+                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
+  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_EQ(handler_.changed_devices_.size(), 3);
+}
+
+// Test UpdateAudioSystem() when connecting a microphone.
+TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemConnectMic) {
+  handler_.microphone_ = true;
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
+  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 1);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_IN_WIRED_HEADSET);
+}
+
+// Test UpdateAudioSystem() when connecting a headphone.
+TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadphone) {
+  handler_.headphone_ = true;
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
+  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
+}
+
+// Test UpdateAudioSystem() when connecting a headset.
+TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadset) {
+  handler_.headphone_ = true;
+  handler_.microphone_ = true;
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADSET,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  EXPECT_CALL(handler_, TriggerCallback(
+      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
+  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 1);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_.size(), 2);
+}
+
+// Test ConnectAudioDevice() with an input device.
+TEST_F(AudioDeviceHandlerTest, ConnectAudioDeviceInput) {
+  audio_devices_t device = AUDIO_DEVICE_IN_WIRED_HEADSET;
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(device,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  handler_.ConnectAudioDevice(device);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_NE(
+      handler_.connected_input_devices_.find(device),
+      handler_.connected_input_devices_.end());
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], device);
+}
+
+// Test ConnectAudioDevice() with an output device.
+TEST_F(AudioDeviceHandlerTest, ConnectAudioDeviceOutput) {
+  audio_devices_t device = AUDIO_DEVICE_OUT_WIRED_HEADSET;
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(device,
+                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+  handler_.ConnectAudioDevice(device);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_NE(
+      handler_.connected_output_devices_.find(device),
+      handler_.connected_output_devices_.end());
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], device);
+}
+
+// Test DisconnectAudioDevice() with an input device.
+TEST_F(AudioDeviceHandlerTest, DisconnectAudioDeviceInput) {
+  audio_devices_t device = AUDIO_DEVICE_IN_WIRED_HEADSET;
+  handler_.connected_input_devices_.insert(device);
+  handler_.connected_output_devices_.insert(device);
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(device,
+                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+  handler_.DisconnectAudioDevice(device);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], device);
+}
+
+// Test DisconnectAudioDevice() with an output device.
+TEST_F(AudioDeviceHandlerTest, DisconnectAudioDeviceOutput) {
+  audio_devices_t device = AUDIO_DEVICE_OUT_WIRED_HEADSET;
+  handler_.connected_input_devices_.insert(device);
+  handler_.connected_output_devices_.insert(device);
+  EXPECT_CALL(handler_,
+              NotifyAudioPolicyService(device,
+                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+  handler_.DisconnectAudioDevice(device);
+  EXPECT_EQ(handler_.connected_input_devices_.size(), 1);
+  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
+  EXPECT_EQ(handler_.changed_devices_.size(), 1);
+  EXPECT_EQ(handler_.changed_devices_[0], device);
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/test/audio_service_callback_test.cpp b/media/brillo/audio/audioservice/test/audio_service_callback_test.cpp
new file mode 100644
index 0000000..38ced10
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/audio_service_callback_test.cpp
@@ -0,0 +1,62 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Tests for the audio service callback object.
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <hardware/audio.h>
+
+#include "audio_service_callback.h"
+
+namespace brillo {
+
+class AudioServiceCallbackTest : public testing::Test {
+ public:
+  void SetUp() override {
+    connected_call_count_ = 0;
+    disconnected_call_count_ = 0;
+    callback_.OnAudioDeviceAdded = OnDeviceConnectedMock;
+    callback_.OnAudioDeviceRemoved = OnDeviceDisconnectedMock;
+    user_data_ = static_cast<void*>(this);
+  }
+
+  static void OnDeviceConnectedMock(const BAudioDeviceInfo*, void* user_data) {
+    static_cast<AudioServiceCallbackTest*>(user_data)->connected_call_count_++;
+  }
+
+  static void OnDeviceDisconnectedMock(const BAudioDeviceInfo*, void* user_data) {
+    static_cast<AudioServiceCallbackTest*>(
+        user_data)->disconnected_call_count_++;
+  }
+
+  BAudioCallback callback_;
+  void* user_data_;
+  int connected_call_count_;
+  int disconnected_call_count_;
+};
+
+TEST_F(AudioServiceCallbackTest, CallbackCallCount) {
+  std::vector<int> devices = {AUDIO_DEVICE_OUT_WIRED_HEADSET,
+    AUDIO_DEVICE_OUT_WIRED_HEADPHONE};
+  AudioServiceCallback service_callback(&callback_, user_data_);
+  service_callback.OnAudioDevicesConnected(devices);
+  EXPECT_EQ(connected_call_count_, devices.size());
+  service_callback.OnAudioDevicesDisconnected(devices);
+  EXPECT_EQ(disconnected_call_count_, devices.size());
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/test/audio_volume_handler_mock.h b/media/brillo/audio/audioservice/test/audio_volume_handler_mock.h
new file mode 100644
index 0000000..32028ca
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/audio_volume_handler_mock.h
@@ -0,0 +1,55 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Mock of AudioVolumeHandler.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_VOLUME_HANDLER_MOCK_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_VOLUME_HANDLER_MOCK_H_
+
+#include <gmock/gmock.h>
+#include <gtest/gtest_prod.h>
+
+#include "audio_volume_handler.h"
+
+namespace brillo {
+
+class AudioVolumeHandlerMock : public AudioVolumeHandler {
+ public:
+  AudioVolumeHandlerMock() = default;
+  ~AudioVolumeHandlerMock() {}
+
+ private:
+  friend class AudioVolumeHandlerTest;
+  FRIEND_TEST(AudioVolumeHandlerTest, FileGeneration);
+  FRIEND_TEST(AudioVolumeHandlerTest, GetVolumeForKey);
+  FRIEND_TEST(AudioVolumeHandlerTest, GetVolumeForStreamDeviceTuple);
+  FRIEND_TEST(AudioVolumeHandlerTest, SetVolumeForStreamDeviceTuple);
+  FRIEND_TEST(AudioVolumeHandlerTest, InitNoFile);
+  FRIEND_TEST(AudioVolumeHandlerTest, InitFilePresent);
+  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventEmpty);
+  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyUp);
+  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyDown);
+  FRIEND_TEST(AudioVolumeHandlerTest, SelectStream);
+  FRIEND_TEST(AudioVolumeHandlerTest, ComputeNewVolume);
+  FRIEND_TEST(AudioVolumeHandlerTest, GetSetVolumeIndex);
+
+  MOCK_METHOD3(TriggerCallback, void(audio_stream_type_t, int, int));
+  MOCK_METHOD0(InitAPSAllStreams, void());
+  MOCK_METHOD1(AdjustVolumeActiveStreams, void(int));
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_VOLUME_HANDLER_MOCK_H_
diff --git a/media/brillo/audio/audioservice/test/audio_volume_handler_test.cpp b/media/brillo/audio/audioservice/test/audio_volume_handler_test.cpp
new file mode 100644
index 0000000..47ef236
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/audio_volume_handler_test.cpp
@@ -0,0 +1,212 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Tests for audio volume handler.
+
+#include "audio_volume_handler_mock.h"
+
+#include <memory>
+#include <string>
+
+#include <base/files/file_path.h>
+#include <base/files/file_util.h>
+#include <base/files/scoped_temp_dir.h>
+#include <brillo/key_value_store.h>
+#include <brillo/strings/string_utils.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include "audio_device_handler.h"
+
+using base::FilePath;
+using base::PathExists;
+using base::ScopedTempDir;
+using brillo::string_utils::ToString;
+using std::stoi;
+using testing::_;
+
+namespace brillo {
+
+class AudioVolumeHandlerTest : public testing::Test {
+ public:
+  void SetUp() override {
+    EXPECT_TRUE(temp_dir_.CreateUniqueTempDir());
+    volume_file_path_ = temp_dir_.path().Append("vol_file");
+    handler_.SetVolumeFilePathForTesting(volume_file_path_);
+  }
+
+  void SetupHandlerVolumeFile() {
+    handler_.kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore);
+    handler_.GenerateVolumeFile();
+  }
+
+  AudioVolumeHandlerMock handler_;
+  FilePath volume_file_path_;
+
+ private:
+  ScopedTempDir temp_dir_;
+};
+
+// Test that the volume file is formatted correctly.
+TEST_F(AudioVolumeHandlerTest, FileGeneration) {
+  SetupHandlerVolumeFile();
+  KeyValueStore kv_store;
+  kv_store.Load(volume_file_path_);
+  for (auto stream : handler_.kSupportedStreams_) {
+    std::string value;
+    ASSERT_EQ(handler_.kMinIndex_, 0);
+    ASSERT_EQ(handler_.kMaxIndex_, 100);
+    for (auto device : AudioDeviceHandler::kSupportedOutputDevices_) {
+      ASSERT_TRUE(kv_store.GetString(handler_.kCurrentIndexKey_ + "." +
+                                         ToString(stream) + "." +
+                                         ToString(device),
+                                     &value));
+      ASSERT_EQ(handler_.kDefaultCurrentIndex_, stoi(value));
+    }
+  }
+}
+
+// Test GetVolumeCurrentIndex.
+TEST_F(AudioVolumeHandlerTest, GetVolumeForStreamDeviceTuple) {
+  handler_.kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore);
+  handler_.kv_store_->SetString(handler_.kCurrentIndexKey_ + ".1.2", "100");
+  ASSERT_EQ(
+      handler_.GetVolumeCurrentIndex(static_cast<audio_stream_type_t>(1), 2),
+      100);
+}
+
+// Test SetVolumeCurrentIndex.
+TEST_F(AudioVolumeHandlerTest, SetVolumeForStreamDeviceTuple) {
+  handler_.kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore);
+  handler_.PersistVolumeConfiguration(
+      static_cast<audio_stream_type_t>(1), 2, 100);
+  std::string value;
+  auto key = handler_.kCurrentIndexKey_ + ".1.2";
+  handler_.kv_store_->GetString(key, &value);
+  ASSERT_EQ(stoi(value), 100);
+}
+
+// Test that a new volume file is generated if it doesn't exist.
+TEST_F(AudioVolumeHandlerTest, InitNoFile) {
+  EXPECT_CALL(handler_, InitAPSAllStreams());
+  handler_.Init(nullptr);
+  EXPECT_TRUE(PathExists(volume_file_path_));
+}
+
+// Test that a new volume file isn't generated it already exists.
+TEST_F(AudioVolumeHandlerTest, InitFilePresent) {
+  KeyValueStore kv_store;
+  kv_store.SetString("foo", "100");
+  kv_store.Save(volume_file_path_);
+  EXPECT_CALL(handler_, InitAPSAllStreams());
+  handler_.Init(nullptr);
+  EXPECT_TRUE(PathExists(volume_file_path_));
+  std::string value;
+  handler_.kv_store_->GetString("foo", &value);
+  EXPECT_EQ(stoi(value), 100);
+}
+
+TEST_F(AudioVolumeHandlerTest, ProcessEventEmpty) {
+  struct input_event event;
+  event.type = 0;
+  event.code = 0;
+  event.value = 0;
+  EXPECT_CALL(handler_, AdjustVolumeActiveStreams(_)).Times(0);
+  handler_.ProcessEvent(event);
+}
+
+TEST_F(AudioVolumeHandlerTest, ProcessEventKeyUp) {
+  struct input_event event;
+  event.type = EV_KEY;
+  event.code = KEY_VOLUMEUP;
+  event.value = 1;
+  EXPECT_CALL(handler_, AdjustVolumeActiveStreams(1));
+  handler_.ProcessEvent(event);
+}
+
+TEST_F(AudioVolumeHandlerTest, ProcessEventKeyDown) {
+  struct input_event event;
+  event.type = EV_KEY;
+  event.code = KEY_VOLUMEDOWN;
+  event.value = 1;
+  EXPECT_CALL(handler_, AdjustVolumeActiveStreams(-1));
+  handler_.ProcessEvent(event);
+}
+
+TEST_F(AudioVolumeHandlerTest, SelectStream) {
+  EXPECT_EQ(handler_.GetVolumeControlStream(), AUDIO_STREAM_DEFAULT);
+  handler_.SetVolumeControlStream(AUDIO_STREAM_MUSIC);
+  EXPECT_EQ(handler_.GetVolumeControlStream(), AUDIO_STREAM_MUSIC);
+}
+
+TEST_F(AudioVolumeHandlerTest, ComputeNewVolume) {
+  EXPECT_EQ(handler_.GetNewVolumeIndex(50, 1, AUDIO_STREAM_MUSIC), 51);
+  EXPECT_EQ(handler_.GetNewVolumeIndex(50, -1, AUDIO_STREAM_MUSIC), 49);
+  handler_.step_sizes_[AUDIO_STREAM_MUSIC] = 10;
+  EXPECT_EQ(handler_.GetNewVolumeIndex(50, 1, AUDIO_STREAM_MUSIC), 60);
+  EXPECT_EQ(handler_.GetNewVolumeIndex(50, -1, AUDIO_STREAM_MUSIC), 40);
+  SetupHandlerVolumeFile();
+  EXPECT_EQ(handler_.GetNewVolumeIndex(100, 1, AUDIO_STREAM_MUSIC), 100);
+  EXPECT_EQ(handler_.GetNewVolumeIndex(0, -1, AUDIO_STREAM_MUSIC), 0);
+}
+
+TEST_F(AudioVolumeHandlerTest, GetSetMaxSteps) {
+  EXPECT_EQ(handler_.GetVolumeMaxSteps(AUDIO_STREAM_MUSIC), 100);
+  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 0), EINVAL);
+  EXPECT_EQ(handler_.GetVolumeMaxSteps(AUDIO_STREAM_MUSIC), 100);
+  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 100), 0);
+  EXPECT_EQ(handler_.GetVolumeMaxSteps(AUDIO_STREAM_MUSIC), 100);
+  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, -1), EINVAL);
+  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 101), EINVAL);
+}
+
+TEST_F(AudioVolumeHandlerTest, GetSetVolumeIndex) {
+  SetupHandlerVolumeFile();
+  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, _, 0));
+  EXPECT_EQ(handler_.SetVolumeIndex(
+                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 0),
+            0);
+  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, 0, 50));
+  EXPECT_EQ(handler_.SetVolumeIndex(
+                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 50),
+            0);
+  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, 50, 100));
+  EXPECT_EQ(handler_.SetVolumeIndex(
+                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 100),
+            0);
+  EXPECT_EQ(handler_.SetVolumeIndex(
+                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, -1),
+            EINVAL);
+  EXPECT_EQ(handler_.SetVolumeIndex(
+                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 101),
+            EINVAL);
+  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 10), 0);
+  EXPECT_EQ(handler_.GetVolumeIndex(AUDIO_STREAM_MUSIC,
+                                    AUDIO_DEVICE_OUT_WIRED_HEADSET),
+            10);
+  EXPECT_EQ(handler_.SetVolumeIndex(
+                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 11),
+            EINVAL);
+  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, 100, 50));
+  EXPECT_EQ(handler_.SetVolumeIndex(
+                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 5),
+            0);
+  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 20), 0);
+  EXPECT_EQ(handler_.GetVolumeIndex(AUDIO_STREAM_MUSIC,
+                                    AUDIO_DEVICE_OUT_WIRED_HEADSET),
+            10);
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/test/brillo_audio_client_mock.h b/media/brillo/audio/audioservice/test/brillo_audio_client_mock.h
new file mode 100644
index 0000000..047c7c3
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/brillo_audio_client_mock.h
@@ -0,0 +1,45 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Mock for the brillo audio client.
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_MOCK_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_MOCK_H_
+
+#include <gmock/gmock.h>
+#include <gtest/gtest_prod.h>
+
+#include "brillo_audio_client.h"
+
+namespace brillo {
+
+class BrilloAudioClientMock : public BrilloAudioClient {
+ public:
+  virtual ~BrilloAudioClientMock() = default;
+
+  MOCK_METHOD0(OnBASDisconnect, void());
+
+ private:
+  friend class BrilloAudioClientTest;
+  FRIEND_TEST(BrilloAudioClientTest,
+              CheckInitializeRegistersForDeathNotifications);
+  FRIEND_TEST(BrilloAudioClientTest, InitializeNoService);
+
+  BrilloAudioClientMock() = default;
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_MOCK_H_
diff --git a/media/brillo/audio/audioservice/test/brillo_audio_client_test.cpp b/media/brillo/audio/audioservice/test/brillo_audio_client_test.cpp
new file mode 100644
index 0000000..3616c7b
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/brillo_audio_client_test.cpp
@@ -0,0 +1,287 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Tests for the brillo audio client.
+
+#include <binderwrapper/binder_test_base.h>
+#include <binderwrapper/stub_binder_wrapper.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include "audio_service_callback.h"
+#include "brillo_audio_client.h"
+#include "include/brillo_audio_manager.h"
+#include "test/brillo_audio_client_mock.h"
+#include "test/brillo_audio_service_mock.h"
+
+using android::sp;
+using android::String8;
+using testing::Return;
+using testing::_;
+
+namespace brillo {
+
+static const char kBrilloAudioServiceName[] =
+    "android.brillo.brilloaudioservice.BrilloAudioService";
+
+class BrilloAudioClientTest : public android::BinderTestBase {
+ public:
+  bool ConnectClientToBAS() {
+    bas_ = new BrilloAudioServiceMock();
+    binder_wrapper()->SetBinderForService(kBrilloAudioServiceName, bas_);
+    return client_.Initialize();
+  }
+
+  BrilloAudioClientMock client_;
+  sp<BrilloAudioServiceMock> bas_;
+};
+
+TEST_F(BrilloAudioClientTest, SetDeviceNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(
+      client_.SetDevice(AUDIO_POLICY_FORCE_USE_MAX, AUDIO_POLICY_FORCE_NONE),
+      ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, GetDevicesNoService) {
+  std::vector<int> foo;
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.GetDevices(0, foo), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, RegisterCallbackNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.RegisterAudioCallback(nullptr, nullptr), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, UnregisterAudioCallbackNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.UnregisterAudioCallback(0), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, InitializeNoService) {
+  EXPECT_FALSE(client_.Initialize());
+}
+
+TEST_F(BrilloAudioClientTest, CheckInitializeRegistersForDeathNotifications) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_CALL(client_, OnBASDisconnect());
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+}
+
+TEST_F(BrilloAudioClientTest, GetDevicesWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  std::vector<int> foo;
+  EXPECT_CALL(*bas_.get(), GetDevices(0, &foo)).WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.GetDevices(0, foo), 0);
+}
+
+TEST_F(BrilloAudioClientTest, SetDeviceWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  std::vector<int> foo;
+  EXPECT_CALL(*bas_.get(),
+              SetDevice(AUDIO_POLICY_FORCE_USE_MAX, AUDIO_POLICY_FORCE_NONE))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(
+      client_.SetDevice(AUDIO_POLICY_FORCE_USE_MAX, AUDIO_POLICY_FORCE_NONE),
+      0);
+}
+
+TEST_F(BrilloAudioClientTest, RegisterCallbackWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  BAudioCallback bcallback;
+  AudioServiceCallback* callback =
+      new AudioServiceCallback(&bcallback, nullptr);
+  int id = 0;
+  EXPECT_CALL(*bas_.get(),
+              RegisterServiceCallback(sp<IAudioServiceCallback>(callback)))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), 0);
+  EXPECT_NE(id, 0);
+}
+
+TEST_F(BrilloAudioClientTest, RegisterSameCallbackTwiceWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  BAudioCallback bcallback;
+  AudioServiceCallback* callback =
+      new AudioServiceCallback(&bcallback, nullptr);
+  int id = -1;
+  EXPECT_CALL(*bas_.get(),
+              RegisterServiceCallback(sp<IAudioServiceCallback>(callback)))
+      .Times(2)
+      .WillRepeatedly(Return(Status::ok()));
+  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), 0);
+  EXPECT_NE(id, 0);
+  id = -1;
+  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), EINVAL);
+  EXPECT_EQ(id, 0);
+}
+
+TEST_F(BrilloAudioClientTest, UnregisterAudioCallbackValidWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  BAudioCallback bcallback;
+  AudioServiceCallback* callback =
+      new AudioServiceCallback(&bcallback, nullptr);
+  int id = 0;
+  EXPECT_CALL(*bas_.get(),
+              RegisterServiceCallback(sp<IAudioServiceCallback>(callback)))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), 0);
+  EXPECT_NE(id, 0);
+  EXPECT_CALL(*bas_.get(),
+              UnregisterServiceCallback(sp<IAudioServiceCallback>(callback)))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.UnregisterAudioCallback(id), 0);
+}
+
+TEST_F(BrilloAudioClientTest, UnregisterInvalidCallbackWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_EQ(client_.UnregisterAudioCallback(1), EINVAL);
+}
+
+TEST_F(BrilloAudioClientTest, RegisterAndUnregisterAudioTwoCallbacks) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  BAudioCallback bcallback1, bcallback2;
+  AudioServiceCallback* callback1 =
+      new AudioServiceCallback(&bcallback1, nullptr);
+  AudioServiceCallback* callback2 =
+      new AudioServiceCallback(&bcallback2, nullptr);
+  int id1 = 0, id2 = 0;
+  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
+      .WillRepeatedly(Return(Status::ok()));
+  EXPECT_EQ(client_.RegisterAudioCallback(callback1, &id1), 0);
+  EXPECT_NE(id1, 0);
+  EXPECT_EQ(client_.RegisterAudioCallback(callback2, &id2), 0);
+  EXPECT_NE(id2, 0);
+  EXPECT_CALL(*bas_.get(), UnregisterServiceCallback(_))
+      .WillRepeatedly(Return(Status::ok()));
+  EXPECT_EQ(client_.UnregisterAudioCallback(id1), 0);
+  EXPECT_EQ(client_.UnregisterAudioCallback(id2), 0);
+}
+
+TEST_F(BrilloAudioClientTest, GetMaxVolStepsNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  int foo;
+  EXPECT_EQ(client_.GetMaxVolumeSteps(BAudioUsage::kUsageInvalid, &foo),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, GetMaxVolStepsWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  int foo;
+  EXPECT_CALL(*bas_.get(), GetMaxVolumeSteps(AUDIO_STREAM_MUSIC, &foo))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.GetMaxVolumeSteps(BAudioUsage::kUsageMedia, &foo), 0);
+}
+
+TEST_F(BrilloAudioClientTest, SetMaxVolStepsNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.SetMaxVolumeSteps(BAudioUsage::kUsageInvalid, 100),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, SetMaxVolStepsWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_CALL(*bas_.get(), SetMaxVolumeSteps(AUDIO_STREAM_MUSIC, 100))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.SetMaxVolumeSteps(BAudioUsage::kUsageMedia, 100), 0);
+}
+
+TEST_F(BrilloAudioClientTest, SetVolIndexNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.SetVolumeIndex(
+                BAudioUsage::kUsageInvalid, AUDIO_DEVICE_NONE, 100),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, SetVolIndexWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_CALL(*bas_.get(),
+              SetVolumeIndex(AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_SPEAKER, 100))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.SetVolumeIndex(
+                BAudioUsage::kUsageMedia, AUDIO_DEVICE_OUT_SPEAKER, 100),
+            0);
+}
+
+TEST_F(BrilloAudioClientTest, GetVolIndexNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  int foo;
+  EXPECT_EQ(client_.GetVolumeIndex(
+                BAudioUsage::kUsageInvalid, AUDIO_DEVICE_NONE, &foo),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, GetVolIndexWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  int foo;
+  EXPECT_CALL(
+      *bas_.get(),
+      GetVolumeIndex(AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_SPEAKER, &foo))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.GetVolumeIndex(
+                BAudioUsage::kUsageMedia, AUDIO_DEVICE_OUT_SPEAKER, &foo),
+            0);
+}
+
+TEST_F(BrilloAudioClientTest, GetVolumeControlStreamNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  BAudioUsage foo;
+  EXPECT_EQ(client_.GetVolumeControlStream(&foo), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, GetVolumeControlStreamWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_CALL(*bas_.get(), GetVolumeControlStream(_))
+      .WillOnce(Return(Status::ok()));
+  BAudioUsage foo;
+  EXPECT_EQ(client_.GetVolumeControlStream(&foo), 0);
+}
+
+TEST_F(BrilloAudioClientTest, SetVolumeControlStreamNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.SetVolumeControlStream(kUsageMedia), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, SetVolumeControlStreamWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_CALL(*bas_.get(), SetVolumeControlStream(AUDIO_STREAM_MUSIC))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.SetVolumeControlStream(kUsageMedia), 0);
+}
+
+TEST_F(BrilloAudioClientTest, IncrementVolNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.IncrementVolume(), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, IncrementVolWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_CALL(*bas_.get(), IncrementVolume()).WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.IncrementVolume(), 0);
+}
+
+TEST_F(BrilloAudioClientTest, DecrementVolNoService) {
+  EXPECT_CALL(client_, OnBASDisconnect());
+  EXPECT_EQ(client_.DecrementVolume(), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioClientTest, DecrementVolWithBAS) {
+  EXPECT_TRUE(ConnectClientToBAS());
+  EXPECT_CALL(*bas_.get(), DecrementVolume()).WillOnce(Return(Status::ok()));
+  EXPECT_EQ(client_.DecrementVolume(), 0);
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/test/brillo_audio_device_info_internal_test.cpp b/media/brillo/audio/audioservice/test/brillo_audio_device_info_internal_test.cpp
new file mode 100644
index 0000000..d02608c
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/brillo_audio_device_info_internal_test.cpp
@@ -0,0 +1,51 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Tests for the BrilloAudioDeviceInfoInternal test.
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <hardware/audio.h>
+
+#include "brillo_audio_device_info_internal.h"
+
+namespace brillo {
+
+TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadset) {
+  BAudioDeviceInfoInternal* badi =
+      BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
+          AUDIO_DEVICE_OUT_WIRED_HEADSET);
+  EXPECT_EQ(badi->device_id_, TYPE_WIRED_HEADSET);
+  EXPECT_EQ(badi->GetConfig(), AUDIO_POLICY_FORCE_HEADPHONES);
+}
+
+TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadphone) {
+  BAudioDeviceInfoInternal* badi =
+      BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
+          AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
+  EXPECT_EQ(badi->device_id_, TYPE_WIRED_HEADPHONES);
+  EXPECT_EQ(badi->GetConfig(), AUDIO_POLICY_FORCE_HEADPHONES);
+}
+
+TEST(BrilloAudioDeviceInfoInternalTest, InWiredHeadset) {
+  BAudioDeviceInfoInternal* badi =
+      BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
+          AUDIO_DEVICE_IN_WIRED_HEADSET);
+  EXPECT_EQ(badi->device_id_, TYPE_WIRED_HEADSET_MIC);
+  EXPECT_EQ(badi->GetConfig(), AUDIO_POLICY_FORCE_HEADPHONES);
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/test/brillo_audio_manager_test.cpp b/media/brillo/audio/audioservice/test/brillo_audio_manager_test.cpp
new file mode 100644
index 0000000..b4299f7
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/brillo_audio_manager_test.cpp
@@ -0,0 +1,497 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Tests for the brillo audio manager interface.
+
+#include <binderwrapper/binder_test_base.h>
+#include <binderwrapper/stub_binder_wrapper.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include "audio_service_callback.h"
+#include "brillo_audio_client.h"
+#include "include/brillo_audio_manager.h"
+#include "test/brillo_audio_service_mock.h"
+
+using android::sp;
+using testing::Mock;
+using testing::Return;
+using testing::_;
+
+namespace brillo {
+
+static const char kBrilloAudioServiceName[] =
+    "android.brillo.brilloaudioservice.BrilloAudioService";
+
+class BrilloAudioManagerTest : public android::BinderTestBase {
+ public:
+  void ConnectBAS() {
+    bas_ = new BrilloAudioServiceMock();
+    binder_wrapper()->SetBinderForService(kBrilloAudioServiceName, bas_);
+  }
+
+  BAudioManager* GetValidManager() {
+    ConnectBAS();
+    auto bam = BAudioManager_new();
+    EXPECT_NE(bam, nullptr);
+    return bam;
+  }
+
+  void TearDown() {
+    // Stopping the BAS will cause the client to delete itself.
+    binder_wrapper()->NotifyAboutBinderDeath(bas_);
+    bas_.clear();
+  }
+
+  sp<BrilloAudioServiceMock> bas_;
+};
+
+TEST_F(BrilloAudioManagerTest, NewNoService) {
+  EXPECT_EQ(BAudioManager_new(), nullptr);
+}
+
+TEST_F(BrilloAudioManagerTest, NewWithBAS) {
+  ConnectBAS();
+  auto bam = BAudioManager_new();
+  EXPECT_NE(bam, nullptr);
+}
+
+TEST_F(BrilloAudioManagerTest, GetDevicesInvalidParams) {
+  auto bam = GetValidManager();
+  unsigned int num_devices;
+  EXPECT_EQ(BAudioManager_getDevices(nullptr, 1, nullptr, 0, &num_devices),
+            EINVAL);
+  EXPECT_EQ(BAudioManager_getDevices(bam, 1, nullptr, 0, nullptr), EINVAL);
+  EXPECT_EQ(BAudioManager_getDevices(bam, -1, nullptr, 0, &num_devices),
+            EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, GetDevicesNullArrNoDevices) {
+  auto bam = GetValidManager();
+  unsigned int num_devices = -1;
+  EXPECT_CALL(*bas_.get(), GetDevices(1, _)).WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_getDevices(bam, 1, nullptr, 0, &num_devices), 0);
+  EXPECT_EQ(num_devices, 0);
+}
+
+TEST_F(BrilloAudioManagerTest, SetInputDeviceInvalidParams) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_UNKNOWN);
+  EXPECT_EQ(BAudioManager_setInputDevice(nullptr, nullptr), EINVAL);
+  EXPECT_EQ(BAudioManager_setInputDevice(bam, nullptr), EINVAL);
+  EXPECT_EQ(BAudioManager_setInputDevice(nullptr, device), EINVAL);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetInputDeviceHeadsetMic) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADSET_MIC);
+  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_RECORD,
+                                     AUDIO_POLICY_FORCE_HEADPHONES))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setInputDevice(bam, device), 0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetInputDeviceBuiltinMic) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_BUILTIN_MIC);
+  EXPECT_CALL(*bas_.get(),
+              SetDevice(AUDIO_POLICY_FORCE_FOR_RECORD, AUDIO_POLICY_FORCE_NONE))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setInputDevice(bam, device), 0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetOutputDeviceInvalidParams) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_UNKNOWN);
+  EXPECT_EQ(BAudioManager_setOutputDevice(nullptr, nullptr, kUsageMedia),
+            EINVAL);
+  EXPECT_EQ(BAudioManager_setOutputDevice(bam, nullptr, kUsageMedia), EINVAL);
+  EXPECT_EQ(BAudioManager_setOutputDevice(nullptr, device, kUsageMedia),
+            EINVAL);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetOutputDeviceWiredHeadset) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADSET);
+  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_MEDIA,
+                                     AUDIO_POLICY_FORCE_HEADPHONES))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageMedia), 0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetOutputDeviceBuiltinSpeaker) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_BUILTIN_SPEAKER);
+  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_SYSTEM,
+                                     AUDIO_POLICY_FORCE_SPEAKER))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageSystem), 0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetOutputDeviceWiredHeadphoneNotification) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_SYSTEM,
+                                     AUDIO_POLICY_FORCE_HEADPHONES))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageNotifications), 0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetOutputDeviceWiredHeadphoneAlarm) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_SYSTEM,
+                                     AUDIO_POLICY_FORCE_HEADPHONES))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageAlarm), 0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, RegisterCallbackInvalidParams) {
+  auto bam = GetValidManager();
+  BAudioCallback callback;
+  int callback_id;
+  EXPECT_EQ(
+      BAudioManager_registerAudioCallback(nullptr, nullptr, nullptr, nullptr),
+      EINVAL);
+  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, nullptr, nullptr, nullptr),
+            EINVAL);
+  EXPECT_EQ(
+      BAudioManager_registerAudioCallback(bam, &callback, nullptr, nullptr),
+      EINVAL);
+  EXPECT_EQ(
+      BAudioManager_registerAudioCallback(bam, nullptr, nullptr, &callback_id),
+      EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, RegisterCallbackOnStack) {
+  auto bam = GetValidManager();
+  BAudioCallback callback;
+  callback.OnAudioDeviceAdded = nullptr;
+  callback.OnAudioDeviceRemoved = nullptr;
+  int callback_id = 0;
+  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
+                                                &callback_id),
+            0);
+  EXPECT_NE(callback_id, 0);
+}
+
+TEST_F(BrilloAudioManagerTest, RegisterCallbackOnHeap) {
+  auto bam = GetValidManager();
+  BAudioCallback* callback = new BAudioCallback;
+  callback->OnAudioDeviceAdded = nullptr;
+  callback->OnAudioDeviceRemoved = nullptr;
+  int callback_id = 0;
+  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(
+      BAudioManager_registerAudioCallback(bam, callback, nullptr, &callback_id),
+      0);
+  EXPECT_NE(callback_id, 0);
+  delete callback;
+}
+
+TEST_F(BrilloAudioManagerTest, UnregisterCallbackInvalidParams) {
+  auto bam = GetValidManager();
+  EXPECT_EQ(BAudioManager_unregisterAudioCallback(nullptr, 1), EINVAL);
+  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, 1), EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, UnregisterCallback) {
+  auto bam = GetValidManager();
+  BAudioCallback callback;
+  callback.OnAudioDeviceAdded = nullptr;
+  callback.OnAudioDeviceRemoved = nullptr;
+  int callback_id = 0;
+  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
+                                                &callback_id),
+            0);
+  EXPECT_NE(callback_id, 0);
+  EXPECT_CALL(*bas_.get(), UnregisterServiceCallback(_))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, callback_id), 0);
+  // 2nd call shouldn't result in a call to BAS.
+  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, callback_id), EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, GetDevicesBASDies) {
+  auto bam = GetValidManager();
+  unsigned int num_devices = -1;
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_getDevices(bam, 1, nullptr, 0, &num_devices),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioManagerTest, SetInputDeviceBASDies) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADSET_MIC);
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_setInputDevice(bam, device), ECONNABORTED);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetOutputDeviceBASDies) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageNotifications),
+            ECONNABORTED);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, RegisterServiceCallbackBASDies) {
+  auto bam = GetValidManager();
+  BAudioCallback callback;
+  callback.OnAudioDeviceAdded = nullptr;
+  callback.OnAudioDeviceRemoved = nullptr;
+  int callback_id = 1;
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
+                                                &callback_id),
+            ECONNABORTED);
+  EXPECT_EQ(callback_id, 0);
+}
+
+TEST_F(BrilloAudioManagerTest, UnregisterCallbackBASDies) {
+  auto bam = GetValidManager();
+  BAudioCallback callback;
+  callback.OnAudioDeviceAdded = nullptr;
+  callback.OnAudioDeviceRemoved = nullptr;
+  int callback_id = 0;
+  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
+                                                &callback_id),
+            0);
+  EXPECT_NE(callback_id, 0);
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, callback_id),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioManagerTest, GetMaxVolumeStepsInvalidParams) {
+  auto bam = GetValidManager();
+  int foo;
+  EXPECT_EQ(BAudioManager_getMaxVolumeSteps(
+                nullptr, BAudioUsage::kUsageMedia, nullptr),
+            EINVAL);
+  EXPECT_EQ(
+      BAudioManager_getMaxVolumeSteps(nullptr, BAudioUsage::kUsageMedia, &foo),
+      EINVAL);
+  EXPECT_EQ(
+      BAudioManager_getMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, nullptr),
+      EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, GetMaxVolStepsWithBAS) {
+  auto bam = GetValidManager();
+  int foo;
+  EXPECT_CALL(*bas_.get(), GetMaxVolumeSteps(AUDIO_STREAM_MUSIC, &foo))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(
+      BAudioManager_getMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, &foo), 0);
+}
+
+TEST_F(BrilloAudioManagerTest, GetMaxVolStepsBASDies) {
+  auto bam = GetValidManager();
+  int foo;
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(
+      BAudioManager_getMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, &foo),
+      ECONNABORTED);
+}
+
+TEST_F(BrilloAudioManagerTest, SetMaxVolumeStepsInvalidParams) {
+  EXPECT_EQ(
+      BAudioManager_setMaxVolumeSteps(nullptr, BAudioUsage::kUsageMedia, 100),
+      EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, SetMaxVolStepsWithBAS) {
+  auto bam = GetValidManager();
+  EXPECT_CALL(*bas_.get(), SetMaxVolumeSteps(AUDIO_STREAM_MUSIC, 100))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, 100),
+            0);
+}
+
+TEST_F(BrilloAudioManagerTest, SetMaxVolStepsBASDies) {
+  auto bam = GetValidManager();
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_setMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, 100),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioManagerTest, SetVolIndexInvalidParams) {
+  auto bam = GetValidManager();
+  EXPECT_EQ(BAudioManager_setVolumeIndex(
+                nullptr, BAudioUsage::kUsageMedia, nullptr, 100),
+            EINVAL);
+  EXPECT_EQ(
+      BAudioManager_setVolumeIndex(bam, BAudioUsage::kUsageMedia, nullptr, 100),
+      EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, SetVolIndexWithBAS) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  EXPECT_CALL(
+      *bas_.get(),
+      SetVolumeIndex(AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADPHONE, 100))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(
+      BAudioManager_setVolumeIndex(bam, BAudioUsage::kUsageMedia, device, 100),
+      0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, SetVolIndexBASDies) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(
+      BAudioManager_setVolumeIndex(bam, BAudioUsage::kUsageMedia, device, 100),
+      ECONNABORTED);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, GetVolIndexInvalidParams) {
+  auto bam = GetValidManager();
+  int foo;
+  EXPECT_EQ(BAudioManager_getVolumeIndex(
+                nullptr, BAudioUsage::kUsageMedia, nullptr, nullptr),
+            EINVAL);
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  EXPECT_EQ(BAudioManager_getVolumeIndex(
+                bam, BAudioUsage::kUsageMedia, device, nullptr),
+            EINVAL);
+  EXPECT_EQ(BAudioManager_getVolumeIndex(
+                nullptr, BAudioUsage::kUsageMedia, device, &foo),
+            EINVAL);
+  EXPECT_EQ(BAudioManager_getVolumeIndex(
+                bam, BAudioUsage::kUsageMedia, nullptr, &foo),
+            EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, GetVolIndexWithBAS) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  int foo;
+  EXPECT_CALL(*bas_.get(),
+              GetVolumeIndex(
+                  AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADPHONE, &foo))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(
+      BAudioManager_getVolumeIndex(bam, BAudioUsage::kUsageMedia, device, &foo),
+      0);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, GetVolIndexBASDies) {
+  auto bam = GetValidManager();
+  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
+  int foo;
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(
+      BAudioManager_getVolumeIndex(bam, BAudioUsage::kUsageMedia, device, &foo),
+      ECONNABORTED);
+  BAudioDeviceInfo_delete(device);
+}
+
+TEST_F(BrilloAudioManagerTest, GetVolumeControlUsageInvalidParams) {
+  auto bam = GetValidManager();
+  BAudioUsage foo;
+  EXPECT_EQ(BAudioManager_getVolumeControlUsage(nullptr, nullptr), EINVAL);
+  EXPECT_EQ(BAudioManager_getVolumeControlUsage(nullptr, &foo), EINVAL);
+  EXPECT_EQ(BAudioManager_getVolumeControlUsage(bam, nullptr), EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, GetVolumeControlStreamWithBAS) {
+  auto bam = GetValidManager();
+  BAudioUsage foo;
+  EXPECT_CALL(*bas_.get(), GetVolumeControlStream(_))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_getVolumeControlUsage(bam, &foo), 0);
+}
+
+TEST_F(BrilloAudioManagerTest, GetVolumeControlStreamBASDies) {
+  auto bam = GetValidManager();
+  BAudioUsage foo;
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_getVolumeControlUsage(bam, &foo), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioManagerTest, SetVolumeControlUsageInvalidParams) {
+  EXPECT_EQ(
+      BAudioManager_setVolumeControlUsage(nullptr, BAudioUsage::kUsageMedia),
+      EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, SetVolumeControlStreamWithBAS) {
+  auto bam = GetValidManager();
+  EXPECT_CALL(*bas_.get(), SetVolumeControlStream(AUDIO_STREAM_MUSIC))
+      .WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_setVolumeControlUsage(bam, BAudioUsage::kUsageMedia),
+            0);
+}
+
+TEST_F(BrilloAudioManagerTest, SetVolumeControlStreamBASDies) {
+  auto bam = GetValidManager();
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_setVolumeControlUsage(bam, BAudioUsage::kUsageMedia),
+            ECONNABORTED);
+}
+
+TEST_F(BrilloAudioManagerTest, DecIncInvalidParams) {
+  EXPECT_EQ(BAudioManager_decrementVolume(nullptr), EINVAL);
+  EXPECT_EQ(BAudioManager_incrementVolume(nullptr), EINVAL);
+}
+
+TEST_F(BrilloAudioManagerTest, IncVolWithBAS) {
+  auto bam = GetValidManager();
+  EXPECT_CALL(*bas_.get(), IncrementVolume()).WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_incrementVolume(bam), 0);
+}
+
+TEST_F(BrilloAudioManagerTest, IncVolBASDies) {
+  auto bam = GetValidManager();
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_incrementVolume(bam), ECONNABORTED);
+}
+
+TEST_F(BrilloAudioManagerTest, DecVolWithBAS) {
+  auto bam = GetValidManager();
+  EXPECT_CALL(*bas_.get(), DecrementVolume()).WillOnce(Return(Status::ok()));
+  EXPECT_EQ(BAudioManager_decrementVolume(bam), 0);
+}
+
+TEST_F(BrilloAudioManagerTest, DecVolBASDies) {
+  auto bam = GetValidManager();
+  binder_wrapper()->NotifyAboutBinderDeath(bas_);
+  EXPECT_EQ(BAudioManager_decrementVolume(bam), ECONNABORTED);
+}
+
+}  // namespace brillo
diff --git a/media/brillo/audio/audioservice/test/brillo_audio_service_mock.h b/media/brillo/audio/audioservice/test/brillo_audio_service_mock.h
new file mode 100644
index 0000000..4b52ef1
--- /dev/null
+++ b/media/brillo/audio/audioservice/test/brillo_audio_service_mock.h
@@ -0,0 +1,58 @@
+// Copyright 2016 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_MOCK_H_
+#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_MOCK_H_
+
+#include <vector>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest_prod.h>
+
+#include "brillo_audio_service.h"
+
+namespace brillo {
+
+class BrilloAudioServiceMock : public BrilloAudioService {
+ public:
+  BrilloAudioServiceMock() = default;
+  ~BrilloAudioServiceMock() {}
+
+  MOCK_METHOD2(GetDevices, Status(int flag, std::vector<int>* _aidl_return));
+  MOCK_METHOD2(SetDevice, Status(int usage, int config));
+  MOCK_METHOD2(GetMaxVolumeSteps, Status(int stream, int* _aidl_return));
+  MOCK_METHOD2(SetMaxVolumeSteps, Status(int stream, int max_steps));
+  MOCK_METHOD3(SetVolumeIndex, Status(int stream, int device, int index));
+  MOCK_METHOD3(GetVolumeIndex,
+               Status(int stream, int device, int* _aidl_return));
+  MOCK_METHOD1(GetVolumeControlStream, Status(int* _aidl_return));
+  MOCK_METHOD1(SetVolumeControlStream, Status(int stream));
+  MOCK_METHOD0(IncrementVolume, Status());
+  MOCK_METHOD0(DecrementVolume, Status());
+  MOCK_METHOD1(RegisterServiceCallback,
+               Status(const android::sp<IAudioServiceCallback>& callback));
+  MOCK_METHOD1(UnregisterServiceCallback,
+               Status(const android::sp<IAudioServiceCallback>& callback));
+
+  void RegisterHandlers(std::weak_ptr<AudioDeviceHandler>,
+                        std::weak_ptr<AudioVolumeHandler>){};
+  void OnDevicesConnected(const std::vector<int>&) {}
+  void OnDevicesDisconnected(const std::vector<int>&) {}
+  void OnVolumeChanged(audio_stream_type_t, int, int){};
+};
+
+}  // namespace brillo
+
+#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_MOCK_H_
diff --git a/media/camera/docs/CameraCharacteristicsKeys.mako b/media/camera/docs/CameraCharacteristicsKeys.mako
new file mode 100644
index 0000000..5d2d7e4
--- /dev/null
+++ b/media/camera/docs/CameraCharacteristicsKeys.mako
@@ -0,0 +1,17 @@
+## -*- coding: utf-8 -*-
+##
+## Copyright (C) 2013 The Android Open Source Project
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+<%include file="CameraMetadataKeys.mako" args="java_class='CameraCharacteristics', xml_kind='static'" />
diff --git a/media/camera/docs/CameraMetadataEnums.mako b/media/camera/docs/CameraMetadataEnums.mako
new file mode 100644
index 0000000..eb4b1b0
--- /dev/null
+++ b/media/camera/docs/CameraMetadataEnums.mako
@@ -0,0 +1,92 @@
+## -*- coding: utf-8 -*-
+##
+## Copyright (C) 2013 The Android Open Source Project
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+\
+## This section of enum integer definitions is inserted into
+## android.hardware.camera2.CameraMetadata.
+    /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+     * The enum values below this point are generated from metadata
+     * definitions in /system/media/camera/docs. Do not modify by hand or
+     * modify the comment blocks at the start or end.
+     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
+##
+## Generate an enum's integers
+<%def name="generate_enum(entry, target_class)">\
+    //
+    // Enumeration values for ${target_class}#${entry.name | jkey_identifier}
+    //
+
+  % for value in entry.enum.values:
+    /**
+    % if value.notes:
+${value.notes | javadoc(metadata)}\
+    % endif
+     * @see ${target_class}#${entry.name | jkey_identifier}
+    % if entry.applied_visibility == 'hidden' or value.hidden:
+     * @hide
+    %endif
+    % if value.deprecated:
+     * @deprecated Please refer to this API documentation to find the alternatives
+    % endif
+     */
+    public static final int ${jenum_value(entry, value)} = ${enum_calculate_value_string(value)};
+
+  % endfor
+</%def>\
+##
+## Generate a list of only Static, Controls, or Dynamic properties.
+<%def name="single_kind_keys(xml_name, target_class)">\
+% for outer_namespace in metadata.outer_namespaces: ## assumes single 'android' namespace
+  % for section in outer_namespace.sections:
+    % if section.find_first(lambda x: isinstance(x, metadata_model.Entry) and x.kind == xml_name) and \
+         any_visible(section, xml_name, ('public','hidden') ):
+      % for inner_namespace in get_children_by_filtering_kind(section, xml_name, 'namespaces'):
+## We only support 1 level of inner namespace, i.e. android.a.b and android.a.b.c works, but not android.a.b.c.d
+## If we need to support more, we should use a recursive function here instead.. but the indentation gets trickier.
+        % for entry in filter_visibility(inner_namespace.entries, ('hidden','public')):
+          % if entry.enum \
+              and not (entry.typedef and entry.typedef.languages.get('java')) \
+              and not entry.is_clone():
+${generate_enum(entry, target_class)}\
+          % endif
+        % endfor
+      % endfor
+      % for entry in filter_visibility( \
+          get_children_by_filtering_kind(section, xml_name, 'entries'), \
+                                         ('hidden', 'public')):
+        % if entry.enum \
+             and not (entry.typedef and entry.typedef.languages.get('java')) \
+             and not entry.is_clone():
+${generate_enum(entry, target_class)}\
+        % endif
+      % endfor
+    % endif
+  % endfor
+% endfor
+</%def>\
+
+##
+## Static properties only
+${single_kind_keys('static','CameraCharacteristics')}\
+##
+## Controls properties only
+${single_kind_keys('controls','CaptureRequest')}\
+##
+## Dynamic properties only
+${single_kind_keys('dynamic','CaptureResult')}\
+    /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+     * End generated code
+     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
diff --git a/media/camera/docs/CameraMetadataKeys.mako b/media/camera/docs/CameraMetadataKeys.mako
new file mode 100644
index 0000000..f9286fa
--- /dev/null
+++ b/media/camera/docs/CameraMetadataKeys.mako
@@ -0,0 +1,107 @@
+## -*- coding: utf-8 -*-
+##
+## Copyright (C) 2013 The Android Open Source Project
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+\
+## These sections of metadata Key definitions are inserted into the middle of
+## android.hardware.camera2.CameraCharacteristics, CaptureRequest, and CaptureResult.
+<%page args="java_class, xml_kind" />\
+    /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+     * The key entries below this point are generated from metadata
+     * definitions in /system/media/camera/docs. Do not modify by hand or
+     * modify the comment blocks at the start or end.
+     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
+
+##
+## Generate a single key and docs
+<%def name="generate_key(entry)">\
+    /**
+<%
+    # Dedent fixes markdown not to generate code blocks. Then do the rest.
+    description = ""
+    if entry.description:
+        description = dedent(entry.description) + "\n\n"
+    details = ""
+    if entry.details:
+        details = dedent(entry.details)
+    # Unconditionally add extra information if necessary
+    extra_detail = generate_extra_javadoc_detail(entry)("")
+
+    concatenated_info = description + details + extra_detail
+%>\
+## Glue description and details together before javadoc-izing. Otherwise @see in middle of javadoc.
+${concatenated_info | javadoc(metadata)}\
+  % if entry.enum and not (entry.typedef and entry.typedef.languages.get('java')):
+    % for value in entry.enum.values:
+     % if not value.hidden:
+     * @see #${jenum_value(entry, value)}
+     % endif
+    % endfor
+  % endif
+  % if entry.deprecated:
+     * @deprecated
+  % endif
+  % if entry.applied_visibility == 'hidden':
+     * @hide
+  % endif
+     */
+  % if entry.deprecated:
+    @Deprecated
+  % endif
+  % if entry.applied_visibility == 'public':
+    @PublicKey
+  % endif
+  % if entry.synthetic:
+    @SyntheticKey
+  % endif
+    public static final Key<${jtype_boxed(entry)}> ${entry.name | jkey_identifier} =
+            new Key<${jtype_boxed(entry)}>("${entry.name}", ${jkey_type_token(entry)});
+</%def>\
+##
+## Generate a list of only Static, Controls, or Dynamic properties.
+<%def name="single_kind_keys(java_name, xml_name)">\
+% for outer_namespace in metadata.outer_namespaces: ## assumes single 'android' namespace
+  % for section in outer_namespace.sections:
+    % if section.find_first(lambda x: isinstance(x, metadata_model.Entry) and x.kind == xml_name) and \
+         any_visible(section, xml_name, ('public','hidden') ):
+      % for inner_namespace in get_children_by_filtering_kind(section, xml_name, 'namespaces'):
+## We only support 1 level of inner namespace, i.e. android.a.b and android.a.b.c works, but not android.a.b.c.d
+## If we need to support more, we should use a recursive function here instead.. but the indentation gets trickier.
+        % for entry in filter_visibility(inner_namespace.merged_entries, ('hidden','public')):
+${generate_key(entry)}
+       % endfor
+    % endfor
+    % for entry in filter_visibility( \
+        get_children_by_filtering_kind(section, xml_name, 'merged_entries'), \
+                                         ('hidden', 'public')):
+${generate_key(entry)}
+    % endfor
+    % endif
+  % endfor
+% endfor
+</%def>\
+##
+## Static properties only
+##${single_kind_keys('CameraCharacteristicsKeys', 'static')}
+##
+## Controls properties only
+##${single_kind_keys('CaptureRequestKeys', 'controls')}
+##
+## Dynamic properties only
+##${single_kind_keys('CaptureResultKeys', 'dynamic')}
+${single_kind_keys(java_class, xml_kind)}\
+    /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+     * End generated code
+     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
\ No newline at end of file
diff --git a/media/camera/docs/CaptureRequestKeys.mako b/media/camera/docs/CaptureRequestKeys.mako
new file mode 100644
index 0000000..bb8910f
--- /dev/null
+++ b/media/camera/docs/CaptureRequestKeys.mako
@@ -0,0 +1,17 @@
+## -*- coding: utf-8 -*-
+##
+## Copyright (C) 2013 The Android Open Source Project
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+<%include file="CameraMetadataKeys.mako" args="java_class='CaptureRequest', xml_kind='controls'" />
diff --git a/media/camera/docs/CaptureResultKeys.mako b/media/camera/docs/CaptureResultKeys.mako
new file mode 100644
index 0000000..07bb139
--- /dev/null
+++ b/media/camera/docs/CaptureResultKeys.mako
@@ -0,0 +1,17 @@
+## -*- coding: utf-8 -*-
+##
+## Copyright (C) 2013 The Android Open Source Project
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+<%include file="CameraMetadataKeys.mako" args="java_class='CaptureResult', xml_kind='dynamic'" />
diff --git a/media/camera/docs/CaptureResultTest.mako b/media/camera/docs/CaptureResultTest.mako
new file mode 100644
index 0000000..6fb4905
--- /dev/null
+++ b/media/camera/docs/CaptureResultTest.mako
@@ -0,0 +1,38 @@
+## -*- coding: utf-8 -*-
+##
+## Copyright (C) 2013 The Android Open Source Project
+##
+## Licensed under the Apache License, Version 2.0 (the "License");
+## you may not use this file except in compliance with the License.
+## You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+## Unless required by applicable law or agreed to in writing, software
+## distributed under the License is distributed on an "AS IS" BASIS,
+## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+## See the License for the specific language governing permissions and
+## limitations under the License.
+##
+    /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+     * The key entries below this point are generated from metadata
+     * definitions in /system/media/camera/docs. Do not modify by hand or
+     * modify the comment blocks at the start or end.
+     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
+
+    private static List<CaptureResult.Key<?>> getAllCaptureResultKeys() {
+        ArrayList<CaptureResult.Key<?>> resultKeys = new ArrayList<CaptureResult.Key<?>>();
+% for sec in find_all_sections(metadata):
+  % for entry in find_unique_entries(sec):
+    % if entry.kind == 'dynamic' and entry.visibility == "public":
+        resultKeys.add(CaptureResult.${jkey_identifier(entry.name)});
+    % endif
+  % endfor
+% endfor
+
+        return resultKeys;
+    }
+
+    /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+     * End generated code
+     *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
diff --git a/media/camera/docs/README.md b/media/camera/docs/README.md
new file mode 100644
index 0000000..c720455
--- /dev/null
+++ b/media/camera/docs/README.md
@@ -0,0 +1,29 @@
+# Camera Metadata XML
+## Introduction
+This is a set of scripts to manipulate the camera metadata in an XML form.
+
+## Generated Files
+Many files can be generated from XML, such as the documentation (html/pdf),
+C code, Java code, and even XML itself (as a sanity check).
+
+## Dependencies
+* Python 2.7.x+
+* Beautiful Soup 4+ - HTML/XML parser, used to parse `metadata_properties.xml`
+* Mako 0.7+         - Template engine, needed to do file generation.
+* Markdown 2.1+     - Plain text to HTML converter, for docs formatting.
+* Tidy              - Cleans up the XML/HTML files.
+* XML Lint          - Validates XML against XSD schema.
+
+## Quick Setup (Ubuntu Precise):
+sudo apt-get install python-mako
+sudo apt-get install python-bs4
+sudo apt-get install python-markdown
+sudo apt-get install tidy
+sudo apt-get install libxml2-utils #xmllint
+
+## Quick Setup (MacPorts)
+sudo port install py27-beautifulsoup4
+sudo port install py27-mako
+sudo port install py27-markdown
+sudo port install tidy
+sudo port install libxml2 #xmllint
diff --git a/media/camera/docs/__init__.py b/media/camera/docs/__init__.py
new file mode 100644
index 0000000..e18dba4
--- /dev/null
+++ b/media/camera/docs/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/python
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This file is intentionally left empty
diff --git a/media/camera/docs/camera_metadata_tag_info.mako b/media/camera/docs/camera_metadata_tag_info.mako
new file mode 100644
index 0000000..9dde7bf
--- /dev/null
+++ b/media/camera/docs/camera_metadata_tag_info.mako
@@ -0,0 +1,105 @@
+## -*- coding: utf-8 -*-
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * !! Do not reference this file directly !!
+ *
+ * It is logically a part of camera_metadata.c.  It is broken out for ease of
+ * maintaining the tag info.
+ *
+ * Array assignments are done using specified-index syntax to keep things in
+ * sync with camera_metadata_tags.h
+ */
+
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from camera_metadata_tag_info.mako
+ */
+
+const char *camera_metadata_section_names[ANDROID_SECTION_COUNT] = {
+  % for i in find_all_sections(metadata):
+    ${"[%s]" %(path_name(i)) | csym,pad(36)} = "${path_name(i)}",
+  % endfor
+};
+
+unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2] = {
+  % for i in find_all_sections(metadata):
+    ${"[%s]" %(path_name(i)) | csym,pad(36)} = { ${path_name(i) | csym}_START,
+                                       ${path_name(i) | csym}_END },
+  % endfor
+};
+
+% for sec in find_all_sections(metadata):
+static tag_info_t ${path_name(sec) | csyml}[${path_name(sec) | csym}_END -
+        ${path_name(sec) | csym}_START] = {
+  % for entry in remove_synthetic(find_unique_entries(sec)):
+    [ ${entry.name | csym} - ${path_name(sec) | csym}_START ] =
+    { ${'"%s",' %(entry.name_short) | pad(40)} ${entry.type | ctype_enum,ljust(11)} },
+  % endfor
+};
+
+% endfor
+
+tag_info_t *tag_info[ANDROID_SECTION_COUNT] = {
+  % for i in find_all_sections(metadata):
+    ${path_name(i) | csyml},
+  % endfor
+};
+
+int camera_metadata_enum_snprint(uint32_t tag,
+                                 uint32_t value,
+                                 char *dst,
+                                 size_t size) {
+    const char *msg = "error: not an enum";
+    int ret = -1;
+
+    switch(tag) {
+    % for sec in find_all_sections(metadata):
+      % for idx,entry in enumerate(remove_synthetic(find_unique_entries(sec))):
+        case ${entry.name | csym}: {
+          % if entry.enum:
+            switch (value) {
+              % for val in entry.enum.values:
+                case ${entry.name | csym}_${val.name}:
+                    msg = "${val.name}";
+                    ret = 0;
+                    break;
+              % endfor
+                default:
+                    msg = "error: enum value out of range";
+            }
+          % endif
+            break;
+        }
+      % endfor
+
+    %endfor
+    }
+
+    strncpy(dst, msg, size - 1);
+    dst[size - 1] = '\0';
+
+    return ret;
+}
+
+<%
+  find_values = lambda x: isinstance(x, metadata_model.EnumValue)
+  enum_values = metadata.find_all(find_values)
+  enum_value_max_len = max([len(value.name) for value in enum_values]) + 1
+%>
+#define CAMERA_METADATA_ENUM_STRING_MAX_SIZE ${enum_value_max_len}
diff --git a/media/camera/docs/camera_metadata_tags.mako b/media/camera/docs/camera_metadata_tags.mako
new file mode 100644
index 0000000..b950c27
--- /dev/null
+++ b/media/camera/docs/camera_metadata_tags.mako
@@ -0,0 +1,112 @@
+## -*- coding: utf-8 -*-
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * !! Do not include this file directly !!
+ *
+ * Include camera_metadata.h instead.
+ */
+
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from camera_metadata_tags.mako
+ */
+
+<%!
+  def annotated_type(entry):
+    if entry.enum:
+       type = 'enum'
+    else:
+       type = entry.type
+    if entry.container == 'array':
+       type += '[]'
+
+    return type
+%>\
+\
+/** TODO: Nearly every enum in this file needs a description */
+
+/**
+ * Top level hierarchy definitions for camera metadata. *_INFO sections are for
+ * the static metadata that can be retrived without opening the camera device.
+ * New sections must be added right before ANDROID_SECTION_COUNT to maintain
+ * existing enumerations.
+ */
+typedef enum camera_metadata_section {
+  % for i in find_all_sections(metadata):
+    ${path_name(i) | csym},
+  % endfor
+    ANDROID_SECTION_COUNT,
+
+    VENDOR_SECTION = 0x8000
+} camera_metadata_section_t;
+
+/**
+ * Hierarchy positions in enum space. All vendor extension tags must be
+ * defined with tag >= VENDOR_SECTION_START
+ */
+typedef enum camera_metadata_section_start {
+  % for i in find_all_sections(metadata):
+    ${path_name(i) + '.start' | csym,ljust(30)} = ${path_name(i) | csym,pad(64)} << 16,
+  % endfor
+    VENDOR_SECTION_START           = VENDOR_SECTION            << 16
+} camera_metadata_section_start_t;
+
+/**
+ * Main enum for defining camera metadata tags.  New entries must always go
+ * before the section _END tag to preserve existing enumeration values.  In
+ * addition, the name and type of the tag needs to be added to
+ * system/media/camera/src/camera_metadata_tag_info.c
+ */
+typedef enum camera_metadata_tag {
+    % for sec in find_all_sections(metadata):
+      % for idx,entry in enumerate(remove_synthetic(find_unique_entries(sec))):
+        % if idx == 0:
+    ${entry.name + " = " | csym,ljust(50)}// ${annotated_type(entry) | ljust(12)} | ${entry.applied_visibility}
+            ${path_name(find_parent_section(entry)) | csym}_START,
+        % else:
+    ${entry.name + "," | csym,ljust(50)}// ${annotated_type(entry) | ljust(12)} | ${entry.applied_visibility}
+        % endif
+      % endfor
+    ${path_name(sec) | csym}_END,
+
+    %endfor
+} camera_metadata_tag_t;
+
+/**
+ * Enumeration definitions for the various entries that need them
+ */
+
+% for sec in find_all_sections(metadata):
+  % for entry in remove_synthetic(find_unique_entries(sec)):
+    % if entry.enum:
+// ${entry.name | csym}
+typedef enum camera_metadata_enum_${csym(entry.name).lower()} {
+      % for val in entry.enum.values:
+        % if val.id is None:
+    ${entry.name | csym}_${val.name},
+        % else:
+    ${'%s_%s'%(csym(entry.name), val.name) | pad(65)} = ${val.id},
+        % endif
+      % endfor
+} camera_metadata_enum_${csym(entry.name).lower()}_t;
+
+    % endif
+  % endfor
+
+%endfor
diff --git a/media/camera/docs/docs.html b/media/camera/docs/docs.html
new file mode 100644
index 0000000..68fac79
--- /dev/null
+++ b/media/camera/docs/docs.html
@@ -0,0 +1,26648 @@
+<!DOCTYPE html>
+<html>
+<!-- Copyright (C) 2012 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<head>
+  <!-- automatically generated from html.mako. do NOT edit directly -->
+  <meta charset="utf-8" />
+  <title>Android Camera HAL3.2 Properties</title>
+  <style type="text/css">
+     body { background-color: #f7f7f7; font-family: Roboto, sans-serif;}
+     h1 { color: #333333; }
+     h2 { color: #333333; }
+     a:link { color: #258aaf; text-decoration: none}
+     a:hover { color: #459aaf; text-decoration: underline }
+     a:visited { color: #154a5f; text-decoration: none}
+    .section { color: #eeeeee; font-size: 1.5em; font-weight: bold; background-color: #888888; padding: 0.5em 0em 0.5em 0.5em; border-width: thick thin thin thin; border-color: #111111 #777777 #777777 #777777}
+    .kind { color: #eeeeee; font-size: 1.2em; font-weight: bold; padding-left: 1.5em; background-color: #aaaaaa }
+    .entry { background-color: #f0f0f0 }
+    .entry_cont { background-color: #f0f0f0 }
+    .entries_header { background-color: #dddddd; text-align: center}
+
+    /* toc style */
+    .toc_section_header { font-size:1.3em;  }
+    .toc_kind_header { font-size:1.2em;  }
+    .toc_deprecated { text-decoration:line-through; }
+
+    /* table column sizes */
+    table { border-collapse:collapse; table-layout: fixed; width: 100%; word-wrap: break-word }
+    td,th { border: 1px solid; border-color: #aaaaaa; padding-left: 0.5em; padding-right: 0.5em }
+    .th_name { width: 20% }
+    .th_units { width: 10% }
+    .th_tags { width: 5% }
+    .th_details { width: 25% }
+    .th_type { width: 20% }
+    .th_description { width: 20% }
+    .th_range { width: 10% }
+    td { font-size: 0.9em; }
+
+    /* hide the first thead, we need it there only to enforce column sizes */
+    .thead_dummy { visibility: hidden; }
+
+    /* Entry flair */
+    .entry_name { color: #333333; padding-left:1.0em; font-size:1.1em; font-family: monospace; vertical-align:top; }
+    .entry_name_deprecated { text-decoration:line-through; }
+
+    /* Entry type flair */
+    .entry_type_name { font-size:1.1em; color: #669900; font-weight: bold;}
+    .entry_type_name_enum:after { color: #669900; font-weight: bold; content:" (enum)" }
+    .entry_type_visibility { font-weight: bolder; padding-left:1em}
+    .entry_type_synthetic { font-weight: bolder; color: #996600; }
+    .entry_type_hwlevel { font-weight: bolder; color: #000066; }
+    .entry_type_deprecated { font-weight: bolder; color: #4D4D4D; }
+    .entry_type_enum_name { font-family: monospace; font-weight: bolder; }
+    .entry_type_enum_notes:before { content:" - " }
+    .entry_type_enum_notes>p:first-child { display:inline; }
+    .entry_type_enum_value:before { content:" = " }
+    .entry_type_enum_value { font-family: monospace; }
+    .entry ul { margin: 0 0 0 0; list-style-position: inside; padding-left: 0.5em; }
+    .entry ul li { padding: 0 0 0 0; margin: 0 0 0 0;}
+    .entry_range_deprecated { font-weight: bolder; }
+
+    /* Entry tags flair */
+    .entry_tags ul { list-style-type: none; }
+
+    /* Entry details (full docs) flair */
+    .entry_details_header { font-weight: bold; background-color: #dddddd;
+      text-align: center; font-size: 1.1em; margin-left: 0em; margin-right: 0em; }
+
+    /* Entry spacer flair */
+    .entry_spacer { background-color: transparent; border-style: none; height: 0.5em; }
+
+    /* TODO: generate abbr element for each tag link? */
+    /* TODO for each x.y.z try to link it to the entry */
+
+  </style>
+
+  <style>
+
+    {
+      /* broken...
+         supposedly there is a bug in chrome that it lays out tables before
+         it knows its being printed, so the page-break-* styles are ignored
+         */
+        tr { page-break-after: always; page-break-inside: avoid; }
+    }
+
+  </style>
+</head>
+
+
+
+<body>
+  <h1>Android Camera HAL3.2 Properties</h1>
+
+
+  <h2>Table of Contents</h2>
+  <ul class="toc">
+    <li><a href="#tag_index" class="toc_section_header">Tags</a></li>
+    <li>
+      <span class="toc_section_header"><a href="#section_colorCorrection">colorCorrection</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.colorCorrection.mode">android.colorCorrection.mode</a></li>
+            <li
+            ><a href="#controls_android.colorCorrection.transform">android.colorCorrection.transform</a></li>
+            <li
+            ><a href="#controls_android.colorCorrection.gains">android.colorCorrection.gains</a></li>
+            <li
+            ><a href="#controls_android.colorCorrection.aberrationMode">android.colorCorrection.aberrationMode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.colorCorrection.mode">android.colorCorrection.mode</a></li>
+            <li
+            ><a href="#dynamic_android.colorCorrection.transform">android.colorCorrection.transform</a></li>
+            <li
+            ><a href="#dynamic_android.colorCorrection.gains">android.colorCorrection.gains</a></li>
+            <li
+            ><a href="#dynamic_android.colorCorrection.aberrationMode">android.colorCorrection.aberrationMode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.colorCorrection.availableAberrationModes">android.colorCorrection.availableAberrationModes</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_control">control</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.control.aeAntibandingMode">android.control.aeAntibandingMode</a></li>
+            <li
+            ><a href="#controls_android.control.aeExposureCompensation">android.control.aeExposureCompensation</a></li>
+            <li
+            ><a href="#controls_android.control.aeLock">android.control.aeLock</a></li>
+            <li
+            ><a href="#controls_android.control.aeMode">android.control.aeMode</a></li>
+            <li
+            ><a href="#controls_android.control.aeRegions">android.control.aeRegions</a></li>
+            <li
+            ><a href="#controls_android.control.aeTargetFpsRange">android.control.aeTargetFpsRange</a></li>
+            <li
+            ><a href="#controls_android.control.aePrecaptureTrigger">android.control.aePrecaptureTrigger</a></li>
+            <li
+            ><a href="#controls_android.control.afMode">android.control.afMode</a></li>
+            <li
+            ><a href="#controls_android.control.afRegions">android.control.afRegions</a></li>
+            <li
+            ><a href="#controls_android.control.afTrigger">android.control.afTrigger</a></li>
+            <li
+            ><a href="#controls_android.control.awbLock">android.control.awbLock</a></li>
+            <li
+            ><a href="#controls_android.control.awbMode">android.control.awbMode</a></li>
+            <li
+            ><a href="#controls_android.control.awbRegions">android.control.awbRegions</a></li>
+            <li
+            ><a href="#controls_android.control.captureIntent">android.control.captureIntent</a></li>
+            <li
+            ><a href="#controls_android.control.effectMode">android.control.effectMode</a></li>
+            <li
+            ><a href="#controls_android.control.mode">android.control.mode</a></li>
+            <li
+            ><a href="#controls_android.control.sceneMode">android.control.sceneMode</a></li>
+            <li
+            ><a href="#controls_android.control.videoStabilizationMode">android.control.videoStabilizationMode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.control.aeAvailableAntibandingModes">android.control.aeAvailableAntibandingModes</a></li>
+            <li
+            ><a href="#static_android.control.aeAvailableModes">android.control.aeAvailableModes</a></li>
+            <li
+            ><a href="#static_android.control.aeAvailableTargetFpsRanges">android.control.aeAvailableTargetFpsRanges</a></li>
+            <li
+            ><a href="#static_android.control.aeCompensationRange">android.control.aeCompensationRange</a></li>
+            <li
+            ><a href="#static_android.control.aeCompensationStep">android.control.aeCompensationStep</a></li>
+            <li
+            ><a href="#static_android.control.afAvailableModes">android.control.afAvailableModes</a></li>
+            <li
+            ><a href="#static_android.control.availableEffects">android.control.availableEffects</a></li>
+            <li
+            ><a href="#static_android.control.availableSceneModes">android.control.availableSceneModes</a></li>
+            <li
+            ><a href="#static_android.control.availableVideoStabilizationModes">android.control.availableVideoStabilizationModes</a></li>
+            <li
+            ><a href="#static_android.control.awbAvailableModes">android.control.awbAvailableModes</a></li>
+            <li
+            ><a href="#static_android.control.maxRegions">android.control.maxRegions</a></li>
+            <li
+            ><a href="#static_android.control.maxRegionsAe">android.control.maxRegionsAe</a></li>
+            <li
+            ><a href="#static_android.control.maxRegionsAwb">android.control.maxRegionsAwb</a></li>
+            <li
+            ><a href="#static_android.control.maxRegionsAf">android.control.maxRegionsAf</a></li>
+            <li
+            ><a href="#static_android.control.sceneModeOverrides">android.control.sceneModeOverrides</a></li>
+            <li
+            ><a href="#static_android.control.availableHighSpeedVideoConfigurations">android.control.availableHighSpeedVideoConfigurations</a></li>
+            <li
+            ><a href="#static_android.control.aeLockAvailable">android.control.aeLockAvailable</a></li>
+            <li
+            ><a href="#static_android.control.awbLockAvailable">android.control.awbLockAvailable</a></li>
+            <li
+            ><a href="#static_android.control.availableModes">android.control.availableModes</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+                class="toc_deprecated"
+            ><a href="#dynamic_android.control.aePrecaptureId">android.control.aePrecaptureId</a></li>
+            <li
+            ><a href="#dynamic_android.control.aeAntibandingMode">android.control.aeAntibandingMode</a></li>
+            <li
+            ><a href="#dynamic_android.control.aeExposureCompensation">android.control.aeExposureCompensation</a></li>
+            <li
+            ><a href="#dynamic_android.control.aeLock">android.control.aeLock</a></li>
+            <li
+            ><a href="#dynamic_android.control.aeMode">android.control.aeMode</a></li>
+            <li
+            ><a href="#dynamic_android.control.aeRegions">android.control.aeRegions</a></li>
+            <li
+            ><a href="#dynamic_android.control.aeTargetFpsRange">android.control.aeTargetFpsRange</a></li>
+            <li
+            ><a href="#dynamic_android.control.aePrecaptureTrigger">android.control.aePrecaptureTrigger</a></li>
+            <li
+            ><a href="#dynamic_android.control.aeState">android.control.aeState</a></li>
+            <li
+            ><a href="#dynamic_android.control.afMode">android.control.afMode</a></li>
+            <li
+            ><a href="#dynamic_android.control.afRegions">android.control.afRegions</a></li>
+            <li
+            ><a href="#dynamic_android.control.afTrigger">android.control.afTrigger</a></li>
+            <li
+            ><a href="#dynamic_android.control.afState">android.control.afState</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#dynamic_android.control.afTriggerId">android.control.afTriggerId</a></li>
+            <li
+            ><a href="#dynamic_android.control.awbLock">android.control.awbLock</a></li>
+            <li
+            ><a href="#dynamic_android.control.awbMode">android.control.awbMode</a></li>
+            <li
+            ><a href="#dynamic_android.control.awbRegions">android.control.awbRegions</a></li>
+            <li
+            ><a href="#dynamic_android.control.captureIntent">android.control.captureIntent</a></li>
+            <li
+            ><a href="#dynamic_android.control.awbState">android.control.awbState</a></li>
+            <li
+            ><a href="#dynamic_android.control.effectMode">android.control.effectMode</a></li>
+            <li
+            ><a href="#dynamic_android.control.mode">android.control.mode</a></li>
+            <li
+            ><a href="#dynamic_android.control.sceneMode">android.control.sceneMode</a></li>
+            <li
+            ><a href="#dynamic_android.control.videoStabilizationMode">android.control.videoStabilizationMode</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_demosaic">demosaic</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.demosaic.mode">android.demosaic.mode</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_edge">edge</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.edge.mode">android.edge.mode</a></li>
+            <li
+            ><a href="#controls_android.edge.strength">android.edge.strength</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.edge.availableEdgeModes">android.edge.availableEdgeModes</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.edge.mode">android.edge.mode</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_flash">flash</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.flash.firingPower">android.flash.firingPower</a></li>
+            <li
+            ><a href="#controls_android.flash.firingTime">android.flash.firingTime</a></li>
+            <li
+            ><a href="#controls_android.flash.mode">android.flash.mode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+
+            <li
+            ><a href="#static_android.flash.info.available">android.flash.info.available</a></li>
+            <li
+            ><a href="#static_android.flash.info.chargeDuration">android.flash.info.chargeDuration</a></li>
+
+            <li
+            ><a href="#static_android.flash.colorTemperature">android.flash.colorTemperature</a></li>
+            <li
+            ><a href="#static_android.flash.maxEnergy">android.flash.maxEnergy</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.flash.firingPower">android.flash.firingPower</a></li>
+            <li
+            ><a href="#dynamic_android.flash.firingTime">android.flash.firingTime</a></li>
+            <li
+            ><a href="#dynamic_android.flash.mode">android.flash.mode</a></li>
+            <li
+            ><a href="#dynamic_android.flash.state">android.flash.state</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_hotPixel">hotPixel</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.hotPixel.mode">android.hotPixel.mode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.hotPixel.availableHotPixelModes">android.hotPixel.availableHotPixelModes</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_jpeg">jpeg</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.jpeg.gpsLocation">android.jpeg.gpsLocation</a></li>
+            <li
+            ><a href="#controls_android.jpeg.gpsCoordinates">android.jpeg.gpsCoordinates</a></li>
+            <li
+            ><a href="#controls_android.jpeg.gpsProcessingMethod">android.jpeg.gpsProcessingMethod</a></li>
+            <li
+            ><a href="#controls_android.jpeg.gpsTimestamp">android.jpeg.gpsTimestamp</a></li>
+            <li
+            ><a href="#controls_android.jpeg.orientation">android.jpeg.orientation</a></li>
+            <li
+            ><a href="#controls_android.jpeg.quality">android.jpeg.quality</a></li>
+            <li
+            ><a href="#controls_android.jpeg.thumbnailQuality">android.jpeg.thumbnailQuality</a></li>
+            <li
+            ><a href="#controls_android.jpeg.thumbnailSize">android.jpeg.thumbnailSize</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.jpeg.availableThumbnailSizes">android.jpeg.availableThumbnailSizes</a></li>
+            <li
+            ><a href="#static_android.jpeg.maxSize">android.jpeg.maxSize</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.jpeg.gpsLocation">android.jpeg.gpsLocation</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.gpsCoordinates">android.jpeg.gpsCoordinates</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.gpsProcessingMethod">android.jpeg.gpsProcessingMethod</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.gpsTimestamp">android.jpeg.gpsTimestamp</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.orientation">android.jpeg.orientation</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.quality">android.jpeg.quality</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.size">android.jpeg.size</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.thumbnailQuality">android.jpeg.thumbnailQuality</a></li>
+            <li
+            ><a href="#dynamic_android.jpeg.thumbnailSize">android.jpeg.thumbnailSize</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_lens">lens</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.lens.aperture">android.lens.aperture</a></li>
+            <li
+            ><a href="#controls_android.lens.filterDensity">android.lens.filterDensity</a></li>
+            <li
+            ><a href="#controls_android.lens.focalLength">android.lens.focalLength</a></li>
+            <li
+            ><a href="#controls_android.lens.focusDistance">android.lens.focusDistance</a></li>
+            <li
+            ><a href="#controls_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+
+            <li
+            ><a href="#static_android.lens.info.availableApertures">android.lens.info.availableApertures</a></li>
+            <li
+            ><a href="#static_android.lens.info.availableFilterDensities">android.lens.info.availableFilterDensities</a></li>
+            <li
+            ><a href="#static_android.lens.info.availableFocalLengths">android.lens.info.availableFocalLengths</a></li>
+            <li
+            ><a href="#static_android.lens.info.availableOpticalStabilization">android.lens.info.availableOpticalStabilization</a></li>
+            <li
+            ><a href="#static_android.lens.info.hyperfocalDistance">android.lens.info.hyperfocalDistance</a></li>
+            <li
+            ><a href="#static_android.lens.info.minimumFocusDistance">android.lens.info.minimumFocusDistance</a></li>
+            <li
+            ><a href="#static_android.lens.info.shadingMapSize">android.lens.info.shadingMapSize</a></li>
+            <li
+            ><a href="#static_android.lens.info.focusDistanceCalibration">android.lens.info.focusDistanceCalibration</a></li>
+
+            <li
+            ><a href="#static_android.lens.facing">android.lens.facing</a></li>
+            <li
+            ><a href="#static_android.lens.poseRotation">android.lens.poseRotation</a></li>
+            <li
+            ><a href="#static_android.lens.poseTranslation">android.lens.poseTranslation</a></li>
+            <li
+            ><a href="#static_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a></li>
+            <li
+            ><a href="#static_android.lens.radialDistortion">android.lens.radialDistortion</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.lens.aperture">android.lens.aperture</a></li>
+            <li
+            ><a href="#dynamic_android.lens.filterDensity">android.lens.filterDensity</a></li>
+            <li
+            ><a href="#dynamic_android.lens.focalLength">android.lens.focalLength</a></li>
+            <li
+            ><a href="#dynamic_android.lens.focusDistance">android.lens.focusDistance</a></li>
+            <li
+            ><a href="#dynamic_android.lens.focusRange">android.lens.focusRange</a></li>
+            <li
+            ><a href="#dynamic_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a></li>
+            <li
+            ><a href="#dynamic_android.lens.state">android.lens.state</a></li>
+            <li
+            ><a href="#dynamic_android.lens.poseRotation">android.lens.poseRotation</a></li>
+            <li
+            ><a href="#dynamic_android.lens.poseTranslation">android.lens.poseTranslation</a></li>
+            <li
+            ><a href="#dynamic_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a></li>
+            <li
+            ><a href="#dynamic_android.lens.radialDistortion">android.lens.radialDistortion</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_noiseReduction">noiseReduction</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.noiseReduction.mode">android.noiseReduction.mode</a></li>
+            <li
+            ><a href="#controls_android.noiseReduction.strength">android.noiseReduction.strength</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.noiseReduction.availableNoiseReductionModes</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.noiseReduction.mode">android.noiseReduction.mode</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_quirks">quirks</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.quirks.meteringCropRegion">android.quirks.meteringCropRegion</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.quirks.triggerAfWithAuto">android.quirks.triggerAfWithAuto</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.quirks.useZslFormat">android.quirks.useZslFormat</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.quirks.usePartialResult">android.quirks.usePartialResult</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+                class="toc_deprecated"
+            ><a href="#dynamic_android.quirks.partialResult">android.quirks.partialResult</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_request">request</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+                class="toc_deprecated"
+            ><a href="#controls_android.request.frameCount">android.request.frameCount</a></li>
+            <li
+            ><a href="#controls_android.request.id">android.request.id</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#controls_android.request.inputStreams">android.request.inputStreams</a></li>
+            <li
+            ><a href="#controls_android.request.metadataMode">android.request.metadataMode</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#controls_android.request.outputStreams">android.request.outputStreams</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#controls_android.request.type">android.request.type</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.request.maxNumOutputStreams">android.request.maxNumOutputStreams</a></li>
+            <li
+            ><a href="#static_android.request.maxNumOutputRaw">android.request.maxNumOutputRaw</a></li>
+            <li
+            ><a href="#static_android.request.maxNumOutputProc">android.request.maxNumOutputProc</a></li>
+            <li
+            ><a href="#static_android.request.maxNumOutputProcStalling">android.request.maxNumOutputProcStalling</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.request.maxNumReprocessStreams">android.request.maxNumReprocessStreams</a></li>
+            <li
+            ><a href="#static_android.request.maxNumInputStreams">android.request.maxNumInputStreams</a></li>
+            <li
+            ><a href="#static_android.request.pipelineMaxDepth">android.request.pipelineMaxDepth</a></li>
+            <li
+            ><a href="#static_android.request.partialResultCount">android.request.partialResultCount</a></li>
+            <li
+            ><a href="#static_android.request.availableCapabilities">android.request.availableCapabilities</a></li>
+            <li
+            ><a href="#static_android.request.availableRequestKeys">android.request.availableRequestKeys</a></li>
+            <li
+            ><a href="#static_android.request.availableResultKeys">android.request.availableResultKeys</a></li>
+            <li
+            ><a href="#static_android.request.availableCharacteristicsKeys">android.request.availableCharacteristicsKeys</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+                class="toc_deprecated"
+            ><a href="#dynamic_android.request.frameCount">android.request.frameCount</a></li>
+            <li
+            ><a href="#dynamic_android.request.id">android.request.id</a></li>
+            <li
+            ><a href="#dynamic_android.request.metadataMode">android.request.metadataMode</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#dynamic_android.request.outputStreams">android.request.outputStreams</a></li>
+            <li
+            ><a href="#dynamic_android.request.pipelineDepth">android.request.pipelineDepth</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_scaler">scaler</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.scaler.cropRegion">android.scaler.cropRegion</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.scaler.availableFormats">android.scaler.availableFormats</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.scaler.availableJpegMinDurations">android.scaler.availableJpegMinDurations</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.scaler.availableJpegSizes">android.scaler.availableJpegSizes</a></li>
+            <li
+            ><a href="#static_android.scaler.availableMaxDigitalZoom">android.scaler.availableMaxDigitalZoom</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.scaler.availableProcessedMinDurations">android.scaler.availableProcessedMinDurations</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.scaler.availableProcessedSizes">android.scaler.availableProcessedSizes</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.scaler.availableRawMinDurations">android.scaler.availableRawMinDurations</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#static_android.scaler.availableRawSizes">android.scaler.availableRawSizes</a></li>
+            <li
+            ><a href="#static_android.scaler.availableInputOutputFormatsMap">android.scaler.availableInputOutputFormatsMap</a></li>
+            <li
+            ><a href="#static_android.scaler.availableStreamConfigurations">android.scaler.availableStreamConfigurations</a></li>
+            <li
+            ><a href="#static_android.scaler.availableMinFrameDurations">android.scaler.availableMinFrameDurations</a></li>
+            <li
+            ><a href="#static_android.scaler.availableStallDurations">android.scaler.availableStallDurations</a></li>
+            <li
+            ><a href="#static_android.scaler.streamConfigurationMap">android.scaler.streamConfigurationMap</a></li>
+            <li
+            ><a href="#static_android.scaler.croppingType">android.scaler.croppingType</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.scaler.cropRegion">android.scaler.cropRegion</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_sensor">sensor</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.sensor.exposureTime">android.sensor.exposureTime</a></li>
+            <li
+            ><a href="#controls_android.sensor.frameDuration">android.sensor.frameDuration</a></li>
+            <li
+            ><a href="#controls_android.sensor.sensitivity">android.sensor.sensitivity</a></li>
+            <li
+            ><a href="#controls_android.sensor.testPatternData">android.sensor.testPatternData</a></li>
+            <li
+            ><a href="#controls_android.sensor.testPatternMode">android.sensor.testPatternMode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+
+            <li
+            ><a href="#static_android.sensor.info.activeArraySize">android.sensor.info.activeArraySize</a></li>
+            <li
+            ><a href="#static_android.sensor.info.sensitivityRange">android.sensor.info.sensitivityRange</a></li>
+            <li
+            ><a href="#static_android.sensor.info.colorFilterArrangement">android.sensor.info.colorFilterArrangement</a></li>
+            <li
+            ><a href="#static_android.sensor.info.exposureTimeRange">android.sensor.info.exposureTimeRange</a></li>
+            <li
+            ><a href="#static_android.sensor.info.maxFrameDuration">android.sensor.info.maxFrameDuration</a></li>
+            <li
+            ><a href="#static_android.sensor.info.physicalSize">android.sensor.info.physicalSize</a></li>
+            <li
+            ><a href="#static_android.sensor.info.pixelArraySize">android.sensor.info.pixelArraySize</a></li>
+            <li
+            ><a href="#static_android.sensor.info.whiteLevel">android.sensor.info.whiteLevel</a></li>
+            <li
+            ><a href="#static_android.sensor.info.timestampSource">android.sensor.info.timestampSource</a></li>
+            <li
+            ><a href="#static_android.sensor.info.lensShadingApplied">android.sensor.info.lensShadingApplied</a></li>
+            <li
+            ><a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.sensor.info.preCorrectionActiveArraySize</a></li>
+
+            <li
+            ><a href="#static_android.sensor.referenceIlluminant1">android.sensor.referenceIlluminant1</a></li>
+            <li
+            ><a href="#static_android.sensor.referenceIlluminant2">android.sensor.referenceIlluminant2</a></li>
+            <li
+            ><a href="#static_android.sensor.calibrationTransform1">android.sensor.calibrationTransform1</a></li>
+            <li
+            ><a href="#static_android.sensor.calibrationTransform2">android.sensor.calibrationTransform2</a></li>
+            <li
+            ><a href="#static_android.sensor.colorTransform1">android.sensor.colorTransform1</a></li>
+            <li
+            ><a href="#static_android.sensor.colorTransform2">android.sensor.colorTransform2</a></li>
+            <li
+            ><a href="#static_android.sensor.forwardMatrix1">android.sensor.forwardMatrix1</a></li>
+            <li
+            ><a href="#static_android.sensor.forwardMatrix2">android.sensor.forwardMatrix2</a></li>
+            <li
+            ><a href="#static_android.sensor.baseGainFactor">android.sensor.baseGainFactor</a></li>
+            <li
+            ><a href="#static_android.sensor.blackLevelPattern">android.sensor.blackLevelPattern</a></li>
+            <li
+            ><a href="#static_android.sensor.maxAnalogSensitivity">android.sensor.maxAnalogSensitivity</a></li>
+            <li
+            ><a href="#static_android.sensor.orientation">android.sensor.orientation</a></li>
+            <li
+            ><a href="#static_android.sensor.profileHueSatMapDimensions">android.sensor.profileHueSatMapDimensions</a></li>
+            <li
+            ><a href="#static_android.sensor.availableTestPatternModes">android.sensor.availableTestPatternModes</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.sensor.exposureTime">android.sensor.exposureTime</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.frameDuration">android.sensor.frameDuration</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.sensitivity">android.sensor.sensitivity</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.timestamp">android.sensor.timestamp</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.temperature">android.sensor.temperature</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.neutralColorPoint">android.sensor.neutralColorPoint</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.noiseProfile">android.sensor.noiseProfile</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.profileHueSatMap">android.sensor.profileHueSatMap</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.profileToneCurve">android.sensor.profileToneCurve</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.greenSplit">android.sensor.greenSplit</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.testPatternData">android.sensor.testPatternData</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.testPatternMode">android.sensor.testPatternMode</a></li>
+            <li
+            ><a href="#dynamic_android.sensor.rollingShutterSkew">android.sensor.rollingShutterSkew</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_shading">shading</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.shading.mode">android.shading.mode</a></li>
+            <li
+            ><a href="#controls_android.shading.strength">android.shading.strength</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.shading.mode">android.shading.mode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.shading.availableModes">android.shading.availableModes</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_statistics">statistics</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.statistics.faceDetectMode">android.statistics.faceDetectMode</a></li>
+            <li
+            ><a href="#controls_android.statistics.histogramMode">android.statistics.histogramMode</a></li>
+            <li
+            ><a href="#controls_android.statistics.sharpnessMapMode">android.statistics.sharpnessMapMode</a></li>
+            <li
+            ><a href="#controls_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a></li>
+            <li
+            ><a href="#controls_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+
+            <li
+            ><a href="#static_android.statistics.info.availableFaceDetectModes">android.statistics.info.availableFaceDetectModes</a></li>
+            <li
+            ><a href="#static_android.statistics.info.histogramBucketCount">android.statistics.info.histogramBucketCount</a></li>
+            <li
+            ><a href="#static_android.statistics.info.maxFaceCount">android.statistics.info.maxFaceCount</a></li>
+            <li
+            ><a href="#static_android.statistics.info.maxHistogramCount">android.statistics.info.maxHistogramCount</a></li>
+            <li
+            ><a href="#static_android.statistics.info.maxSharpnessMapValue">android.statistics.info.maxSharpnessMapValue</a></li>
+            <li
+            ><a href="#static_android.statistics.info.sharpnessMapSize">android.statistics.info.sharpnessMapSize</a></li>
+            <li
+            ><a href="#static_android.statistics.info.availableHotPixelMapModes">android.statistics.info.availableHotPixelMapModes</a></li>
+            <li
+            ><a href="#static_android.statistics.info.availableLensShadingMapModes">android.statistics.info.availableLensShadingMapModes</a></li>
+
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.statistics.faceDetectMode">android.statistics.faceDetectMode</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.faceIds">android.statistics.faceIds</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.faceLandmarks">android.statistics.faceLandmarks</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.faceRectangles">android.statistics.faceRectangles</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.faceScores">android.statistics.faceScores</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.faces">android.statistics.faces</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.histogram">android.statistics.histogram</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.histogramMode">android.statistics.histogramMode</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.sharpnessMap">android.statistics.sharpnessMap</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.sharpnessMapMode">android.statistics.sharpnessMapMode</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.statistics.lensShadingCorrectionMap</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.lensShadingMap">android.statistics.lensShadingMap</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#dynamic_android.statistics.predictedColorGains">android.statistics.predictedColorGains</a></li>
+            <li
+                class="toc_deprecated"
+            ><a href="#dynamic_android.statistics.predictedColorTransform">android.statistics.predictedColorTransform</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.sceneFlicker">android.statistics.sceneFlicker</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_tonemap">tonemap</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.tonemap.curveBlue">android.tonemap.curveBlue</a></li>
+            <li
+            ><a href="#controls_android.tonemap.curveGreen">android.tonemap.curveGreen</a></li>
+            <li
+            ><a href="#controls_android.tonemap.curveRed">android.tonemap.curveRed</a></li>
+            <li
+            ><a href="#controls_android.tonemap.curve">android.tonemap.curve</a></li>
+            <li
+            ><a href="#controls_android.tonemap.mode">android.tonemap.mode</a></li>
+            <li
+            ><a href="#controls_android.tonemap.gamma">android.tonemap.gamma</a></li>
+            <li
+            ><a href="#controls_android.tonemap.presetCurve">android.tonemap.presetCurve</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.tonemap.maxCurvePoints">android.tonemap.maxCurvePoints</a></li>
+            <li
+            ><a href="#static_android.tonemap.availableToneMapModes">android.tonemap.availableToneMapModes</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.tonemap.curveBlue">android.tonemap.curveBlue</a></li>
+            <li
+            ><a href="#dynamic_android.tonemap.curveGreen">android.tonemap.curveGreen</a></li>
+            <li
+            ><a href="#dynamic_android.tonemap.curveRed">android.tonemap.curveRed</a></li>
+            <li
+            ><a href="#dynamic_android.tonemap.curve">android.tonemap.curve</a></li>
+            <li
+            ><a href="#dynamic_android.tonemap.mode">android.tonemap.mode</a></li>
+            <li
+            ><a href="#dynamic_android.tonemap.gamma">android.tonemap.gamma</a></li>
+            <li
+            ><a href="#dynamic_android.tonemap.presetCurve">android.tonemap.presetCurve</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_led">led</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.led.transmit">android.led.transmit</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.led.transmit">android.led.transmit</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.led.availableLeds">android.led.availableLeds</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_info">info</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.info.supportedHardwareLevel">android.info.supportedHardwareLevel</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_blackLevel">blackLevel</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.blackLevel.lock">android.blackLevel.lock</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.blackLevel.lock">android.blackLevel.lock</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_sync">sync</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.sync.frameNumber">android.sync.frameNumber</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.sync.maxLatency">android.sync.maxLatency</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_reprocess">reprocess</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.reprocess.effectiveExposureFactor">android.reprocess.effectiveExposureFactor</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.reprocess.effectiveExposureFactor">android.reprocess.effectiveExposureFactor</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.reprocess.maxCaptureStall">android.reprocess.maxCaptureStall</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_depth">depth</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.depth.maxDepthSamples">android.depth.maxDepthSamples</a></li>
+            <li
+            ><a href="#static_android.depth.availableDepthStreamConfigurations">android.depth.availableDepthStreamConfigurations</a></li>
+            <li
+            ><a href="#static_android.depth.availableDepthMinFrameDurations">android.depth.availableDepthMinFrameDurations</a></li>
+            <li
+            ><a href="#static_android.depth.availableDepthStallDurations">android.depth.availableDepthStallDurations</a></li>
+            <li
+            ><a href="#static_android.depth.depthIsExclusive">android.depth.depthIsExclusive</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+  </ul>
+
+
+  <h1>Properties</h1>
+  <table class="properties">
+
+    <thead class="thead_dummy">
+      <tr>
+        <th class="th_name">Property Name</th>
+        <th class="th_type">Type</th>
+        <th class="th_description">Description</th>
+        <th class="th_units">Units</th>
+        <th class="th_range">Range</th>
+        <th class="th_tags">Tags</th>
+      </tr>
+    </thead> <!-- so that the first occurrence of thead is not
+                         above the first occurrence of tr -->
+<!-- <namespace name="android"> -->
+  <tr><td colspan="6" id="section_colorCorrection" class="section">colorCorrection</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.colorCorrection.mode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>color<wbr/>Correction.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">TRANSFORM_MATRIX</span>
+                    <span class="entry_type_enum_notes"><p>Use the <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> matrix
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> to do color conversion.<wbr/></p>
+<p>All advanced white balance adjustments (not specified
+by our white balance pipeline) must be disabled.<wbr/></p>
+<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
+TRANSFORM_<wbr/>MATRIX is ignored.<wbr/> The camera device will override
+this value to either FAST or HIGH_<wbr/>QUALITY.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Color correction processing must not slow down
+capture rate relative to sensor raw output.<wbr/></p>
+<p>Advanced white balance adjustments above and beyond
+the specified white balance pipeline may be applied.<wbr/></p>
+<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
+the camera device uses the last frame's AWB values
+(or defaults if AWB has never been run).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Color correction processing operates at improved
+quality but the capture rate might be reduced (relative to sensor
+raw output rate)</p>
+<p>Advanced white balance adjustments above and beyond
+the specified white balance pipeline may be applied.<wbr/></p>
+<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
+the camera device uses the last frame's AWB values
+(or defaults if AWB has never been run).<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The mode control selects how the image data is converted from the
+sensor's native color into linear sRGB color.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When auto-white balance (AWB) is enabled with <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> this
+control is overridden by the AWB routine.<wbr/> When AWB is disabled,<wbr/> the
+application controls how the color mapping is performed.<wbr/></p>
+<p>We define the expected processing pipeline below.<wbr/> For consistency
+across devices,<wbr/> this is always the case with TRANSFORM_<wbr/>MATRIX.<wbr/></p>
+<p>When either FULL or HIGH_<wbr/>QUALITY is used,<wbr/> the camera device may
+do additional processing but <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
+<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> will still be provided by the
+camera device (in the results) and be roughly correct.<wbr/></p>
+<p>Switching to TRANSFORM_<wbr/>MATRIX and using the data provided from
+FAST or HIGH_<wbr/>QUALITY will yield a picture with the same white point
+as what was produced by the camera device in the earlier frame.<wbr/></p>
+<p>The expected processing pipeline is as follows:</p>
+<p><img alt="White balance processing pipeline" src="images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png"/></p>
+<p>The white balance is encoded by two values,<wbr/> a 4-channel white-balance
+gain vector (applied in the Bayer domain),<wbr/> and a 3x3 color transform
+matrix (applied after demosaic).<wbr/></p>
+<p>The 4-channel white-balance gains are defined as:</p>
+<pre><code><a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> = [ R G_<wbr/>even G_<wbr/>odd B ]
+</code></pre>
+<p>where <code>G_<wbr/>even</code> is the gain for green pixels on even rows of the
+output,<wbr/> and <code>G_<wbr/>odd</code> is the gain for green pixels on the odd rows.<wbr/>
+These may be identical for a given camera device implementation; if
+the camera device does not support a separate gain for even/<wbr/>odd green
+channels,<wbr/> it will use the <code>G_<wbr/>even</code> value,<wbr/> and write <code>G_<wbr/>odd</code> equal to
+<code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
+<p>The matrices for color transforms are defined as a 9-entry vector:</p>
+<pre><code><a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
+</code></pre>
+<p>which define a transform from input sensor colors,<wbr/> <code>P_<wbr/>in = [ r g b ]</code>,<wbr/>
+to output linear sRGB,<wbr/> <code>P_<wbr/>out = [ r' g' b' ]</code>,<wbr/></p>
+<p>with colors as follows:</p>
+<pre><code>r' = I0r + I1g + I2b
+g' = I3r + I4g + I5b
+b' = I6r + I7g + I8b
+</code></pre>
+<p>Both the input and output value ranges must match.<wbr/> Overflow/<wbr/>underflow
+values are clipped to fit within the range.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if color correction control is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY should generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.colorCorrection.transform">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>color<wbr/>Correction.<wbr/>transform
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">3x3 rational matrix in row-major order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A color transform matrix to use to transform
+from sensor RGB color space to output linear sRGB color space.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Unitless scale factors
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is either set by the camera device when the request
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not TRANSFORM_<wbr/>MATRIX,<wbr/> or
+directly by the application in the request when the
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is TRANSFORM_<wbr/>MATRIX.<wbr/></p>
+<p>In the latter case,<wbr/> the camera device may round the matrix to account
+for precision issues; the final rounded matrix should be reported back
+in this matrix result metadata.<wbr/> The transform should keep the magnitude
+of the output color values within <code>[0,<wbr/> 1.<wbr/>0]</code> (assuming input color
+values is within the normalized range <code>[0,<wbr/> 1.<wbr/>0]</code>),<wbr/> or clipping may occur.<wbr/></p>
+<p>The valid range of each matrix element varies on different devices,<wbr/> but
+values within [-1.<wbr/>5,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.colorCorrection.gains">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>color<wbr/>Correction.<wbr/>gains
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as rggbChannelVector]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">A 1D array of floats for 4 color channel gains</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Gains applying to Bayer raw color channels for
+white-balance.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Unitless gain factors
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>These per-channel gains are either set by the camera device
+when the request <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not
+TRANSFORM_<wbr/>MATRIX,<wbr/> or directly by the application in the
+request when the <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is
+TRANSFORM_<wbr/>MATRIX.<wbr/></p>
+<p>The gains in the result metadata are the gains actually
+applied by the camera device to the current frame.<wbr/></p>
+<p>The valid range of gains varies on different devices,<wbr/> but gains
+between [1.<wbr/>0,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/> Even if a given
+device allows gains below 1.<wbr/>0,<wbr/> this is usually not recommended because
+this can create color artifacts.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The 4-channel white-balance gains are defined in
+the order of <code>[R G_<wbr/>even G_<wbr/>odd B]</code>,<wbr/> where <code>G_<wbr/>even</code> is the gain
+for green pixels on even rows of the output,<wbr/> and <code>G_<wbr/>odd</code>
+is the gain for green pixels on the odd rows.<wbr/></p>
+<p>If a HAL does not support a separate gain for even/<wbr/>odd green
+channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
+<code>G_<wbr/>odd</code> equal to <code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.colorCorrection.aberrationMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No aberration correction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Aberration correction will not slow down capture rate
+relative to sensor raw output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Aberration correction operates at improved quality but the capture rate might be
+reduced (relative to sensor raw output rate)</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Mode of operation for the chromatic aberration correction algorithm.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
+can not focus on the same point after exiting from the lens.<wbr/> This metadata defines
+the high level control of chromatic aberration correction algorithm,<wbr/> which aims to
+minimize the chromatic artifacts that may occur along the object boundaries in an
+image.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean that camera device determined aberration
+correction will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device will
+use the highest-quality aberration correction algorithms,<wbr/> even if it slows down
+capture rate.<wbr/> FAST means the camera device will not slow down capture rate when
+applying aberration correction.<wbr/></p>
+<p>LEGACY devices will always be in FAST mode.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.colorCorrection.mode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>color<wbr/>Correction.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">TRANSFORM_MATRIX</span>
+                    <span class="entry_type_enum_notes"><p>Use the <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> matrix
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> to do color conversion.<wbr/></p>
+<p>All advanced white balance adjustments (not specified
+by our white balance pipeline) must be disabled.<wbr/></p>
+<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
+TRANSFORM_<wbr/>MATRIX is ignored.<wbr/> The camera device will override
+this value to either FAST or HIGH_<wbr/>QUALITY.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Color correction processing must not slow down
+capture rate relative to sensor raw output.<wbr/></p>
+<p>Advanced white balance adjustments above and beyond
+the specified white balance pipeline may be applied.<wbr/></p>
+<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
+the camera device uses the last frame's AWB values
+(or defaults if AWB has never been run).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Color correction processing operates at improved
+quality but the capture rate might be reduced (relative to sensor
+raw output rate)</p>
+<p>Advanced white balance adjustments above and beyond
+the specified white balance pipeline may be applied.<wbr/></p>
+<p>If AWB is enabled with <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != OFF</code>,<wbr/> then
+the camera device uses the last frame's AWB values
+(or defaults if AWB has never been run).<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The mode control selects how the image data is converted from the
+sensor's native color into linear sRGB color.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When auto-white balance (AWB) is enabled with <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> this
+control is overridden by the AWB routine.<wbr/> When AWB is disabled,<wbr/> the
+application controls how the color mapping is performed.<wbr/></p>
+<p>We define the expected processing pipeline below.<wbr/> For consistency
+across devices,<wbr/> this is always the case with TRANSFORM_<wbr/>MATRIX.<wbr/></p>
+<p>When either FULL or HIGH_<wbr/>QUALITY is used,<wbr/> the camera device may
+do additional processing but <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
+<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> will still be provided by the
+camera device (in the results) and be roughly correct.<wbr/></p>
+<p>Switching to TRANSFORM_<wbr/>MATRIX and using the data provided from
+FAST or HIGH_<wbr/>QUALITY will yield a picture with the same white point
+as what was produced by the camera device in the earlier frame.<wbr/></p>
+<p>The expected processing pipeline is as follows:</p>
+<p><img alt="White balance processing pipeline" src="images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png"/></p>
+<p>The white balance is encoded by two values,<wbr/> a 4-channel white-balance
+gain vector (applied in the Bayer domain),<wbr/> and a 3x3 color transform
+matrix (applied after demosaic).<wbr/></p>
+<p>The 4-channel white-balance gains are defined as:</p>
+<pre><code><a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> = [ R G_<wbr/>even G_<wbr/>odd B ]
+</code></pre>
+<p>where <code>G_<wbr/>even</code> is the gain for green pixels on even rows of the
+output,<wbr/> and <code>G_<wbr/>odd</code> is the gain for green pixels on the odd rows.<wbr/>
+These may be identical for a given camera device implementation; if
+the camera device does not support a separate gain for even/<wbr/>odd green
+channels,<wbr/> it will use the <code>G_<wbr/>even</code> value,<wbr/> and write <code>G_<wbr/>odd</code> equal to
+<code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
+<p>The matrices for color transforms are defined as a 9-entry vector:</p>
+<pre><code><a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
+</code></pre>
+<p>which define a transform from input sensor colors,<wbr/> <code>P_<wbr/>in = [ r g b ]</code>,<wbr/>
+to output linear sRGB,<wbr/> <code>P_<wbr/>out = [ r' g' b' ]</code>,<wbr/></p>
+<p>with colors as follows:</p>
+<pre><code>r' = I0r + I1g + I2b
+g' = I3r + I4g + I5b
+b' = I6r + I7g + I8b
+</code></pre>
+<p>Both the input and output value ranges must match.<wbr/> Overflow/<wbr/>underflow
+values are clipped to fit within the range.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if color correction control is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY should generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.colorCorrection.transform">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>color<wbr/>Correction.<wbr/>transform
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">3x3 rational matrix in row-major order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A color transform matrix to use to transform
+from sensor RGB color space to output linear sRGB color space.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Unitless scale factors
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is either set by the camera device when the request
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not TRANSFORM_<wbr/>MATRIX,<wbr/> or
+directly by the application in the request when the
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is TRANSFORM_<wbr/>MATRIX.<wbr/></p>
+<p>In the latter case,<wbr/> the camera device may round the matrix to account
+for precision issues; the final rounded matrix should be reported back
+in this matrix result metadata.<wbr/> The transform should keep the magnitude
+of the output color values within <code>[0,<wbr/> 1.<wbr/>0]</code> (assuming input color
+values is within the normalized range <code>[0,<wbr/> 1.<wbr/>0]</code>),<wbr/> or clipping may occur.<wbr/></p>
+<p>The valid range of each matrix element varies on different devices,<wbr/> but
+values within [-1.<wbr/>5,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.colorCorrection.gains">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>color<wbr/>Correction.<wbr/>gains
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as rggbChannelVector]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">A 1D array of floats for 4 color channel gains</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Gains applying to Bayer raw color channels for
+white-balance.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Unitless gain factors
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>These per-channel gains are either set by the camera device
+when the request <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not
+TRANSFORM_<wbr/>MATRIX,<wbr/> or directly by the application in the
+request when the <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is
+TRANSFORM_<wbr/>MATRIX.<wbr/></p>
+<p>The gains in the result metadata are the gains actually
+applied by the camera device to the current frame.<wbr/></p>
+<p>The valid range of gains varies on different devices,<wbr/> but gains
+between [1.<wbr/>0,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wbr/> Even if a given
+device allows gains below 1.<wbr/>0,<wbr/> this is usually not recommended because
+this can create color artifacts.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The 4-channel white-balance gains are defined in
+the order of <code>[R G_<wbr/>even G_<wbr/>odd B]</code>,<wbr/> where <code>G_<wbr/>even</code> is the gain
+for green pixels on even rows of the output,<wbr/> and <code>G_<wbr/>odd</code>
+is the gain for green pixels on the odd rows.<wbr/></p>
+<p>If a HAL does not support a separate gain for even/<wbr/>odd green
+channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
+<code>G_<wbr/>odd</code> equal to <code>G_<wbr/>even</code> in the output result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.colorCorrection.aberrationMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No aberration correction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Aberration correction will not slow down capture rate
+relative to sensor raw output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Aberration correction operates at improved quality but the capture rate might be
+reduced (relative to sensor raw output rate)</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Mode of operation for the chromatic aberration correction algorithm.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
+can not focus on the same point after exiting from the lens.<wbr/> This metadata defines
+the high level control of chromatic aberration correction algorithm,<wbr/> which aims to
+minimize the chromatic artifacts that may occur along the object boundaries in an
+image.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean that camera device determined aberration
+correction will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device will
+use the highest-quality aberration correction algorithms,<wbr/> even if it slows down
+capture rate.<wbr/> FAST means the camera device will not slow down capture rate when
+applying aberration correction.<wbr/></p>
+<p>LEGACY devices will always be in FAST mode.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.colorCorrection.availableAberrationModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of aberration correction modes for <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a> that are
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This key lists the valid modes for <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a>.<wbr/>  If no
+aberration correction modes are available for a device,<wbr/> this list will solely include
+OFF mode.<wbr/> All camera devices will support either OFF or FAST mode.<wbr/></p>
+<p>Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always list
+OFF mode.<wbr/> This includes all FULL level devices.<wbr/></p>
+<p>LEGACY devices will always only support FAST mode.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if chromatic aberration control is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_control" class="section">control</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.control.aeAntibandingMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will not adjust exposure duration to
+avoid banding problems.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">50HZ</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
+avoid banding problems with 50Hz illumination sources.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">60HZ</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
+avoid banding problems with 60Hz illumination
+sources.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will automatically adapt its
+antibanding routine to the current illumination
+condition.<wbr/> This is the default mode if AUTO is
+available on given camera device.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired setting for the camera device's auto-exposure
+algorithm's antibanding compensation.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Some kinds of lighting fixtures,<wbr/> such as some fluorescent
+lights,<wbr/> flicker at the rate of the power supply frequency
+(60Hz or 50Hz,<wbr/> depending on country).<wbr/> While this is
+typically not noticeable to a person,<wbr/> it can be visible to
+a camera device.<wbr/> If a camera sets its exposure time to the
+wrong value,<wbr/> the flicker may become visible in the
+viewfinder as flicker or in a final captured image,<wbr/> as a
+set of variable-brightness bands across the image.<wbr/></p>
+<p>Therefore,<wbr/> the auto-exposure routines of camera devices
+include antibanding routines that ensure that the chosen
+exposure value will not cause such banding.<wbr/> The choice of
+exposure time depends on the rate of flicker,<wbr/> which the
+camera device can detect automatically,<wbr/> or the expected
+rate can be selected by the application using this
+control.<wbr/></p>
+<p>A given camera device may not support all of the possible
+options for the antibanding mode.<wbr/> The
+<a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a> key contains
+the available modes for a given camera device.<wbr/></p>
+<p>AUTO mode is the default if it is available on given
+camera device.<wbr/> When AUTO mode is not available,<wbr/> the
+default will be either 50HZ or 60HZ,<wbr/> and both 50HZ
+and 60HZ will be available.<wbr/></p>
+<p>If manual exposure control is enabled (by setting
+<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
+then this setting has no effect,<wbr/> and the application must
+ensure it selects exposure times that do not cause banding
+issues.<wbr/> The <a href="#dynamic_android.statistics.sceneFlicker">android.<wbr/>statistics.<wbr/>scene<wbr/>Flicker</a> key can assist
+the application in this.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For all capture request templates,<wbr/> this field must be set
+to AUTO if AUTO mode is available.<wbr/> If AUTO is not available,<wbr/>
+the default must be either 50HZ or 60HZ,<wbr/> and both 50HZ and
+60HZ must be available.<wbr/></p>
+<p>If manual exposure control is enabled (by setting
+<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
+then the exposure values provided by the application must not be
+adjusted for antibanding.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.aeExposureCompensation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Adjustment to auto-exposure (AE) target image
+brightness.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Compensation steps
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The adjustment is measured as a count of steps,<wbr/> with the
+step size defined by <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> and the
+allowed range by <a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a>.<wbr/></p>
+<p>For example,<wbr/> if the exposure value (EV) step is 0.<wbr/>333,<wbr/> '6'
+will mean an exposure compensation of +2 EV; -3 will mean an
+exposure compensation of -1 EV.<wbr/> One EV represents a doubling
+of image brightness.<wbr/> Note that this control will only be
+effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF.<wbr/> This control
+will take effect even when <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> <code>== true</code>.<wbr/></p>
+<p>In the event of exposure compensation value being changed,<wbr/> camera device
+may take several frames to reach the newly requested exposure target.<wbr/>
+During that time,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> field will be in the SEARCHING
+state.<wbr/> Once the new exposure target is reached,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> will
+change from SEARCHING to either CONVERGED,<wbr/> LOCKED (if AE lock is enabled),<wbr/> or
+FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.aeLock">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Lock
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Auto-exposure lock is disabled; the AE algorithm
+is free to update its parameters.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Auto-exposure lock is enabled; the AE algorithm
+must not update the exposure and sensitivity parameters
+while the lock is active.<wbr/></p>
+<p><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> setting changes
+will still take effect while auto-exposure is locked.<wbr/></p>
+<p>Some rare LEGACY devices may not support
+this,<wbr/> in which case the value will always be overridden to OFF.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-exposure (AE) is currently locked to its latest
+calculated values.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to <code>true</code> (ON),<wbr/> the AE algorithm is locked to its latest parameters,<wbr/>
+and will not change exposure settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
+<p>Note that even when AE is locked,<wbr/> the flash may be fired if
+the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>AUTO_<wbr/>FLASH /<wbr/>
+ON_<wbr/>ALWAYS_<wbr/>FLASH /<wbr/> ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE.<wbr/></p>
+<p>When <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> is changed,<wbr/> even if the AE lock
+is ON,<wbr/> the camera device will still adjust its exposure value.<wbr/></p>
+<p>If AE precapture is triggered (see <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>)
+when AE is already locked,<wbr/> the camera device will not change the exposure time
+(<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>) and sensitivity (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
+parameters.<wbr/> The flash may be fired if the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
+is ON_<wbr/>AUTO_<wbr/>FLASH/<wbr/>ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE and the scene is too dark.<wbr/> If the
+<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> the scene may become overexposed.<wbr/>
+Similarly,<wbr/> AE precapture trigger CANCEL has no effect when AE is already locked.<wbr/></p>
+<p>When an AE precapture sequence is triggered,<wbr/> AE unlock will not be able to unlock
+the AE if AE is locked by the camera device internally during precapture metering
+sequence In other words,<wbr/> submitting requests with AE unlock has no effect for an
+ongoing precapture metering sequence.<wbr/> Otherwise,<wbr/> the precapture metering sequence
+will never succeed in a sequence of preview requests where AE lock is always set
+to <code>false</code>.<wbr/></p>
+<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
+get locked do not necessarily correspond to the settings that were present in the
+latest capture result received from the camera device,<wbr/> since additional captures
+and AE updates may have occurred even before the result was sent out.<wbr/> If an
+application is switching between automatic and manual control and wishes to eliminate
+any flicker during the switch,<wbr/> the following procedure is recommended:</p>
+<ol>
+<li>Starting in auto-AE mode:</li>
+<li>Lock AE</li>
+<li>Wait for the first result to be output that has the AE locked</li>
+<li>Copy exposure settings from that result into a request,<wbr/> set the request to manual AE</li>
+<li>Submit the capture request,<wbr/> proceed to run manual AE as desired.<wbr/></li>
+</ol>
+<p>See <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE lock related state transition details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.aeMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is disabled.<wbr/></p>
+<p>The application-selected <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are used by the camera
+device,<wbr/> along with android.<wbr/>flash.<wbr/>* fields,<wbr/> if there's
+a flash unit for this camera device.<wbr/></p>
+<p>Note that auto-white balance (AWB) and auto-focus (AF)
+behavior is device dependent when AE is in OFF mode.<wbr/>
+To have consistent behavior across different devices,<wbr/>
+it is recommended to either set AWB and AF to OFF mode
+or lock AWB and AF before setting AE to OFF.<wbr/>
+See <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a>,<wbr/> and <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
+for more details.<wbr/></p>
+<p>LEGACY devices do not support the OFF mode and will
+override attempts to use this value to ON.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is active,<wbr/>
+with no flash control.<wbr/></p>
+<p>The application's values for
+<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are ignored.<wbr/> The
+application has control over the various
+android.<wbr/>flash.<wbr/>* fields.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH</span>
+                    <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
+the camera's flash unit,<wbr/> firing it in low-light
+conditions.<wbr/></p>
+<p>The flash may be fired during a precapture sequence
+(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
+may be fired for captures for which the
+<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
+STILL_<wbr/>CAPTURE</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_ALWAYS_FLASH</span>
+                    <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
+the camera's flash unit,<wbr/> always firing it for still
+captures.<wbr/></p>
+<p>The flash may be fired during a precapture sequence
+(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
+will always be fired for captures for which the
+<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
+STILL_<wbr/>CAPTURE</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE</span>
+                    <span class="entry_type_enum_notes"><p>Like ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> but with automatic red eye
+reduction.<wbr/></p>
+<p>If deemed necessary by the camera device,<wbr/> a red eye
+reduction flash will fire during the precapture
+sequence.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired mode for the camera device's
+auto-exposure routine.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.aeAvailableModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is
+AUTO.<wbr/></p>
+<p>When set to any of the ON modes,<wbr/> the camera device's
+auto-exposure routine is enabled,<wbr/> overriding the
+application's selected exposure time,<wbr/> sensor sensitivity,<wbr/>
+and frame duration (<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>).<wbr/> If one of the FLASH modes
+is selected,<wbr/> the camera device's flash unit controls are
+also overridden.<wbr/></p>
+<p>The FLASH modes are only available if the camera device
+has a flash unit (<a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> is <code>true</code>).<wbr/></p>
+<p>If flash TORCH mode is desired,<wbr/> this field must be set to
+ON or OFF,<wbr/> and <a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> set to TORCH.<wbr/></p>
+<p>When set to any of the ON modes,<wbr/> the values chosen by the
+camera device auto-exposure routine for the overridden
+fields for a given capture will be available in its
+CaptureResult.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.aeRegions">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>ae<wbr/>Regions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5 x area_count
+                </span>
+              <span class="entry_type_visibility"> [public as meteringRectangle]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of metering areas to use for auto-exposure adjustment.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+              <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not available if <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a> is 0.<wbr/>
+Otherwise will always be present.<wbr/></p>
+<p>The maximum number of regions supported by the device is determined by the value
+of <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a>.<wbr/></p>
+<p>The coordinate system is based on the active pixel array,<wbr/>
+with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
+(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
+bottom-right pixel in the active pixel array.<wbr/></p>
+<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
+for every pixel in the area.<wbr/> This means that a large metering area
+with the same weight as a smaller area will have more effect in
+the metering result.<wbr/> Metering areas can partially overlap and the
+camera device will add the weights in the overlap region.<wbr/></p>
+<p>The weights are relative to weights of other exposure metering regions,<wbr/> so if only one
+region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0
+weight is ignored.<wbr/></p>
+<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
+camera device.<wbr/></p>
+<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
+capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
+region and output only the intersection rectangle as the metering region in the result
+metadata.<wbr/>  If the region is entirely outside the crop region,<wbr/> it will be ignored and
+not reported in the result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL level representation of MeteringRectangle[] is a
+int[5 * area_<wbr/>count].<wbr/>
+Every five elements represent a metering region of
+(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
+The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
+exclusive on xmax and ymax.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.aeTargetFpsRange">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as rangeInt]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Range over which the auto-exposure routine can
+adjust the capture frame rate to maintain good
+exposure.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Frames per second (FPS)
+            </td>
+
+            <td class="entry_range">
+              <p>Any of the entries in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only constrains auto-exposure (AE) algorithm,<wbr/> not
+manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.aePrecaptureTrigger">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_notes"><p>The precapture metering sequence will be started
+by the camera device.<wbr/></p>
+<p>The exact effect of the precapture trigger depends on
+the current AE mode and state.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will cancel any currently active or completed
+precapture metering sequence,<wbr/> the auto-exposure routine will return to its
+initial state.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device will trigger a precapture
+metering sequence when it processes this request.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is normally set to IDLE,<wbr/> or is not
+included at all in the request settings.<wbr/> When included and
+set to START,<wbr/> the camera device will trigger the auto-exposure (AE)
+precapture metering sequence.<wbr/></p>
+<p>When set to CANCEL,<wbr/> the camera device will cancel any active
+precapture metering trigger,<wbr/> and return to its initial AE state.<wbr/>
+If a precapture metering sequence is already completed,<wbr/> and the camera
+device has implicitly locked the AE for subsequent still capture,<wbr/> the
+CANCEL trigger will unlock the AE and return to its initial AE state.<wbr/></p>
+<p>The precapture sequence should be triggered before starting a
+high-quality still capture for final metering decisions to
+be made,<wbr/> and for firing pre-capture flash pulses to estimate
+scene brightness and required final capture flash power,<wbr/> when
+the flash is enabled.<wbr/></p>
+<p>Normally,<wbr/> this entry should be set to START for only a
+single request,<wbr/> and the application should wait until the
+sequence completes before starting a new one.<wbr/></p>
+<p>When a precapture metering sequence is finished,<wbr/> the camera device
+may lock the auto-exposure routine internally to be able to accurately expose the
+subsequent still capture image (<code><a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE</code>).<wbr/>
+For this case,<wbr/> the AE may not resume normal scan if no subsequent still capture is
+submitted.<wbr/> To ensure that the AE routine restarts normal scan,<wbr/> the application should
+submit a request with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == true</code>,<wbr/> followed by a request
+with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == false</code>,<wbr/> if the application decides not to submit a
+still capture request after the precapture sequence completes.<wbr/> Alternatively,<wbr/> for
+API level 23 or newer devices,<wbr/> the CANCEL can be used to unlock the camera device
+internally locked AE if the application doesn't submit a still capture request after
+the AE precapture trigger.<wbr/> Note that,<wbr/> the CANCEL was added in API level 23,<wbr/> and must not
+be used in devices that have earlier API levels.<wbr/></p>
+<p>The exact effect of auto-exposure (AE) precapture trigger
+depends on the current AE mode and state; see
+<a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE precapture state transition
+details.<wbr/></p>
+<p>On LEGACY-level devices,<wbr/> the precapture trigger is not supported;
+capturing a high-resolution JPEG image will automatically trigger a
+precapture sequence before the high-resolution capture,<wbr/> including
+potentially firing a pre-capture flash.<wbr/></p>
+<p>Using the precapture trigger and the auto-focus trigger <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
+simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
+the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
+focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
+trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
+changes to <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> indicating the start of the precapture sequence,<wbr/> for
+example.<wbr/></p>
+<p>If both the precapture and the auto-focus trigger are activated on the same request,<wbr/> then
+the camera device will complete them in the optimal order for that device.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must support triggering the AE precapture trigger while an AF trigger is active
+(and vice versa),<wbr/> or at the same time as the AF trigger.<wbr/>  It is acceptable for the HAL to
+treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
+AE trigger.<wbr/>  Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
+to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.afMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>af<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The auto-focus routine does not control the lens;
+<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> is controlled by the
+application.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>Basic automatic focus mode.<wbr/></p>
+<p>In this mode,<wbr/> the lens does not move unless
+the autofocus trigger action is called.<wbr/> When that trigger
+is activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
+the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/></p>
+<p>Always supported if lens is not fixed focus.<wbr/></p>
+<p>Use <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> to determine if lens
+is fixed-focus.<wbr/></p>
+<p>Triggering AF_<wbr/>CANCEL resets the lens position to default,<wbr/>
+and sets the AF state to INACTIVE.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MACRO</span>
+                    <span class="entry_type_enum_notes"><p>Close-up focusing mode.<wbr/></p>
+<p>In this mode,<wbr/> the lens does not move unless the
+autofocus trigger action is called.<wbr/> When that trigger is
+activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
+the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/> This
+mode is optimized for focusing on objects very close to
+the camera.<wbr/></p>
+<p>When that trigger is activated,<wbr/> AF will transition to
+ACTIVE_<wbr/>SCAN,<wbr/> then to the outcome of the scan (FOCUSED or
+NOT_<wbr/>FOCUSED).<wbr/> Triggering cancel AF resets the lens
+position to default,<wbr/> and sets the AF state to
+INACTIVE.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONTINUOUS_VIDEO</span>
+                    <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
+position continually to attempt to provide a
+constantly-in-focus image stream.<wbr/></p>
+<p>The focusing behavior should be suitable for good quality
+video recording; typically this means slower focus
+movement and no overshoots.<wbr/> When the AF trigger is not
+involved,<wbr/> the AF algorithm should start in INACTIVE state,<wbr/>
+and then transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED
+states as appropriate.<wbr/> When the AF trigger is activated,<wbr/>
+the algorithm should immediately transition into
+AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
+lens position until a cancel AF trigger is received.<wbr/></p>
+<p>Once cancel is received,<wbr/> the algorithm should transition
+back to INACTIVE and resume passive scan.<wbr/> Note that this
+behavior is not identical to CONTINUOUS_<wbr/>PICTURE,<wbr/> since an
+ongoing PASSIVE_<wbr/>SCAN must immediately be
+canceled.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONTINUOUS_PICTURE</span>
+                    <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
+position continually to attempt to provide a
+constantly-in-focus image stream.<wbr/></p>
+<p>The focusing behavior should be suitable for still image
+capture; typically this means focusing as fast as
+possible.<wbr/> When the AF trigger is not involved,<wbr/> the AF
+algorithm should start in INACTIVE state,<wbr/> and then
+transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED states as
+appropriate as it attempts to maintain focus.<wbr/> When the AF
+trigger is activated,<wbr/> the algorithm should finish its
+PASSIVE_<wbr/>SCAN if active,<wbr/> and then transition into
+AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
+lens position until a cancel AF trigger is received.<wbr/></p>
+<p>When the AF cancel trigger is activated,<wbr/> the algorithm
+should transition back to INACTIVE and then act as if it
+has just been started.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">EDOF</span>
+                    <span class="entry_type_enum_notes"><p>Extended depth of field (digital focus) mode.<wbr/></p>
+<p>The camera device will produce images with an extended
+depth of field automatically; no special focusing
+operations need to be done before taking a picture.<wbr/></p>
+<p>AF triggers are ignored,<wbr/> and the AF state will always be
+INACTIVE.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-focus (AF) is currently enabled,<wbr/> and what
+mode it is set to.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.afAvailableModes">android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> = AUTO and the lens is not fixed focus
+(i.<wbr/>e.<wbr/> <code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> &gt; 0</code>).<wbr/> Also note that
+when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/> the behavior of AF is device
+dependent.<wbr/> It is recommended to lock AF by using <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> before
+setting <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> to OFF,<wbr/> or set AF mode to OFF when AE is OFF.<wbr/></p>
+<p>If the lens is controlled by the camera device auto-focus algorithm,<wbr/>
+the camera device will report the current AF status in <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>
+in result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When afMode is AUTO or MACRO,<wbr/> the lens must not move until an AF trigger is sent in a
+request (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> <code>==</code> START).<wbr/> After an AF trigger,<wbr/> the afState will end
+up with either FOCUSED_<wbr/>LOCKED or NOT_<wbr/>FOCUSED_<wbr/>LOCKED state (see
+<a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> which indicates that the lens is
+locked and will not move.<wbr/> If camera movement (e.<wbr/>g.<wbr/> tilting camera) causes the lens to move
+after the lens is locked,<wbr/> the HAL must compensate this movement appropriately such that
+the same focal plane remains in focus.<wbr/></p>
+<p>When afMode is one of the continuous auto focus modes,<wbr/> the HAL is free to start a AF
+scan whenever it's not locked.<wbr/> When the lens is locked after an AF trigger
+(see <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> the HAL should maintain the
+same lock behavior as above.<wbr/></p>
+<p>When afMode is OFF,<wbr/> the application controls focus manually.<wbr/> The accuracy of the
+focus distance control depends on the <a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a>.<wbr/>
+However,<wbr/> the lens must not move regardless of the camera movement for any focus distance
+manual control.<wbr/></p>
+<p>To put this in concrete terms,<wbr/> if the camera has lens elements which may move based on
+camera orientation or motion (e.<wbr/>g.<wbr/> due to gravity),<wbr/> then the HAL must drive the lens to
+remain in a fixed position invariant to the camera's orientation or motion,<wbr/> for example,<wbr/>
+by using accelerometer measurements in the lens control logic.<wbr/> This is a typical issue
+that will arise on camera modules with open-loop VCMs.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.afRegions">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>af<wbr/>Regions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5 x area_count
+                </span>
+              <span class="entry_type_visibility"> [public as meteringRectangle]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of metering areas to use for auto-focus.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+              <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not available if <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a> is 0.<wbr/>
+Otherwise will always be present.<wbr/></p>
+<p>The maximum number of focus areas supported by the device is determined by the value
+of <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a>.<wbr/></p>
+<p>The coordinate system is based on the active pixel array,<wbr/>
+with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
+(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
+bottom-right pixel in the active pixel array.<wbr/></p>
+<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
+for every pixel in the area.<wbr/> This means that a large metering area
+with the same weight as a smaller area will have more effect in
+the metering result.<wbr/> Metering areas can partially overlap and the
+camera device will add the weights in the overlap region.<wbr/></p>
+<p>The weights are relative to weights of other metering regions,<wbr/> so if only one region
+is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0 weight is
+ignored.<wbr/></p>
+<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
+camera device.<wbr/></p>
+<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
+capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
+region and output only the intersection rectangle as the metering region in the result
+metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
+not reported in the result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL level representation of MeteringRectangle[] is a
+int[5 * area_<wbr/>count].<wbr/>
+Every five elements represent a metering region of
+(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
+The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
+exclusive on xmax and ymax.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.afTrigger">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>af<wbr/>Trigger
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_notes"><p>Autofocus will trigger now.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_notes"><p>Autofocus will return to its initial
+state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device will trigger autofocus for this request.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is normally set to IDLE,<wbr/> or is not
+included at all in the request settings.<wbr/></p>
+<p>When included and set to START,<wbr/> the camera device will trigger the
+autofocus algorithm.<wbr/> If autofocus is disabled,<wbr/> this trigger has no effect.<wbr/></p>
+<p>When set to CANCEL,<wbr/> the camera device will cancel any active trigger,<wbr/>
+and return to its initial AF state.<wbr/></p>
+<p>Generally,<wbr/> applications should set this entry to START or CANCEL for only a
+single capture,<wbr/> and then return it to IDLE (or not set at all).<wbr/> Specifying
+START for multiple captures in a row means restarting the AF operation over
+and over again.<wbr/></p>
+<p>See <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for what the trigger means for each AF mode.<wbr/></p>
+<p>Using the autofocus trigger and the precapture trigger <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>
+simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
+the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
+focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
+trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
+changes to <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>,<wbr/> for example.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must support triggering the AF trigger while an AE precapture trigger is active
+(and vice versa),<wbr/> or at the same time as the AE trigger.<wbr/>  It is acceptable for the HAL to
+treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
+AE trigger.<wbr/>  Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
+to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.awbLock">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>awb<wbr/>Lock
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Auto-white balance lock is disabled; the AWB
+algorithm is free to update its parameters if in AUTO
+mode.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Auto-white balance lock is enabled; the AWB
+algorithm will not update its parameters while the lock
+is active.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-white balance (AWB) is currently locked to its
+latest calculated values.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to <code>true</code> (ON),<wbr/> the AWB algorithm is locked to its latest parameters,<wbr/>
+and will not change color balance settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
+<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
+get locked do not necessarily correspond to the settings that were present in the
+latest capture result received from the camera device,<wbr/> since additional captures
+and AWB updates may have occurred even before the result was sent out.<wbr/> If an
+application is switching between automatic and manual control and wishes to eliminate
+any flicker during the switch,<wbr/> the following procedure is recommended:</p>
+<ol>
+<li>Starting in auto-AWB mode:</li>
+<li>Lock AWB</li>
+<li>Wait for the first result to be output that has the AWB locked</li>
+<li>Copy AWB settings from that result into a request,<wbr/> set the request to manual AWB</li>
+<li>Submit the capture request,<wbr/> proceed to run manual AWB as desired.<wbr/></li>
+</ol>
+<p>Note that AWB lock is only meaningful when
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> is in the AUTO mode; in other modes,<wbr/>
+AWB is already fixed to a specific setting.<wbr/></p>
+<p>Some LEGACY devices may not support ON; the value is then overridden to OFF.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.awbMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>awb<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled.<wbr/></p>
+<p>The application-selected color transform matrix
+(<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>) and gains
+(<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>) are used by the camera
+device for manual white balance control.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is active.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">INCANDESCENT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses incandescent light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant A.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FLUORESCENT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses fluorescent light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant F2.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">WARM_FLUORESCENT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses warm fluorescent light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant F4.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DAYLIGHT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses daylight light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant D65.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CLOUDY_DAYLIGHT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses cloudy daylight light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TWILIGHT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses twilight light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SHADE</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses shade light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-white balance (AWB) is currently setting the color
+transform fields,<wbr/> and what its illumination target
+is.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.awbAvailableModes">android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is AUTO.<wbr/></p>
+<p>When set to the ON mode,<wbr/> the camera device's auto-white balance
+routine is enabled,<wbr/> overriding the application's selected
+<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/> Note that when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
+is OFF,<wbr/> the behavior of AWB is device dependent.<wbr/> It is recommened to
+also set AWB mode to OFF or lock AWB by using <a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> before
+setting AE mode to OFF.<wbr/></p>
+<p>When set to the OFF mode,<wbr/> the camera device's auto-white balance
+routine is disabled.<wbr/> The application manually controls the white
+balance by <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>
+and <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/></p>
+<p>When set to any other modes,<wbr/> the camera device's auto-white
+balance routine is disabled.<wbr/> The camera device uses each
+particular illumination target for white balance
+adjustment.<wbr/> The application's values for
+<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/>
+<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> are ignored.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.awbRegions">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>awb<wbr/>Regions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5 x area_count
+                </span>
+              <span class="entry_type_visibility"> [public as meteringRectangle]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of metering areas to use for auto-white-balance illuminant
+estimation.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+              <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not available if <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a> is 0.<wbr/>
+Otherwise will always be present.<wbr/></p>
+<p>The maximum number of regions supported by the device is determined by the value
+of <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a>.<wbr/></p>
+<p>The coordinate system is based on the active pixel array,<wbr/>
+with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
+(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
+bottom-right pixel in the active pixel array.<wbr/></p>
+<p>The weight must range from 0 to 1000,<wbr/> and represents a weight
+for every pixel in the area.<wbr/> This means that a large metering area
+with the same weight as a smaller area will have more effect in
+the metering result.<wbr/> Metering areas can partially overlap and the
+camera device will add the weights in the overlap region.<wbr/></p>
+<p>The weights are relative to weights of other white balance metering regions,<wbr/> so if
+only one region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with
+0 weight is ignored.<wbr/></p>
+<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
+camera device.<wbr/></p>
+<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
+capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
+region and output only the intersection rectangle as the metering region in the result
+metadata.<wbr/>  If the region is entirely outside the crop region,<wbr/> it will be ignored and
+not reported in the result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL level representation of MeteringRectangle[] is a
+int[5 * area_<wbr/>count].<wbr/>
+Every five elements represent a metering region of
+(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
+The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
+exclusive on xmax and ymax.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.captureIntent">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>capture<wbr/>Intent
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">CUSTOM</span>
+                    <span class="entry_type_enum_notes"><p>The goal of this request doesn't fall into the other
+categories.<wbr/> The camera device will default to preview-like
+behavior.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PREVIEW</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a preview-like use case.<wbr/></p>
+<p>The precapture trigger may be used to start off a metering
+w/<wbr/>flash sequence.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STILL_CAPTURE</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a still capture-type
+use case.<wbr/></p>
+<p>If the flash unit is under automatic control,<wbr/> it may fire as needed.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">VIDEO_RECORD</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a video recording
+use case.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">VIDEO_SNAPSHOT</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a video snapshot (still
+image while recording video) use case.<wbr/></p>
+<p>The camera device should take the highest-quality image
+possible (given the other settings) without disrupting the
+frame rate of video recording.<wbr/>  </p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a ZSL usecase; the
+application will stream full-resolution images and
+reprocess one or several later for a final
+capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MANUAL</span>
+                    <span class="entry_type_enum_notes"><p>This request is for manual capture use case where
+the applications want to directly control the capture parameters.<wbr/></p>
+<p>For example,<wbr/> the application may wish to manually control
+<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> etc.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Information to the camera device 3A (auto-exposure,<wbr/>
+auto-focus,<wbr/> auto-white balance) routines about the purpose
+of this capture,<wbr/> to help the camera device to decide optimal 3A
+strategy.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control (except for MANUAL) is only effective if
+<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF</code> and any 3A routine is active.<wbr/></p>
+<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
+contains PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/> MANUAL will be supported if
+<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains MANUAL_<wbr/>SENSOR.<wbr/> Other intent values are
+always supported.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.effectMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>effect<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No color effect will be applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MONO</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "monocolor" effect where the image is mapped into
+a single color.<wbr/></p>
+<p>This will typically be grayscale.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">NEGATIVE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "photo-negative" effect where the image's colors
+are inverted.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SOLARIZE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "solarisation" effect (Sabattier effect) where the
+image is wholly or partially reversed in
+tone.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SEPIA</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "sepia" effect where the image is mapped into warm
+gray,<wbr/> red,<wbr/> and brown tones.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">POSTERIZE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "posterization" effect where the image uses
+discrete regions of tone rather than a continuous
+gradient of tones.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">WHITEBOARD</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "whiteboard" effect where the image is typically displayed
+as regions of white,<wbr/> with black or grey details.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BLACKBOARD</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "blackboard" effect where the image is typically displayed
+as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AQUA</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>An "aqua" effect where a blue hue is added to the image.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A special color effect to apply.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableEffects">android.<wbr/>control.<wbr/>available<wbr/>Effects</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When this mode is set,<wbr/> a color effect will be applied
+to images produced by the camera device.<wbr/> The interpretation
+and implementation of these color effects is left to the
+implementor of the camera device,<wbr/> and should not be
+depended on to be consistent (or present) across all
+devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Full application control of pipeline.<wbr/></p>
+<p>All control by the device's metering and focusing (3A)
+routines is disabled,<wbr/> and no other settings in
+android.<wbr/>control.<wbr/>* have any effect,<wbr/> except that
+<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> may be used by the camera
+device to select post-processing values for processing
+blocks that do not allow for manual control,<wbr/> or are not
+exposed by the camera API.<wbr/></p>
+<p>However,<wbr/> the camera device's 3A routines may continue to
+collect statistics and update their internal state so that
+when control is switched to AUTO mode,<wbr/> good control values
+can be immediately applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>Use settings for each individual 3A routine.<wbr/></p>
+<p>Manual control of capture parameters is disabled.<wbr/> All
+controls in android.<wbr/>control.<wbr/>* besides sceneMode take
+effect.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">USE_SCENE_MODE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Use a specific scene mode.<wbr/></p>
+<p>Enabling this disables control.<wbr/>aeMode,<wbr/> control.<wbr/>awbMode and
+control.<wbr/>afMode controls; the camera device will ignore
+those settings while USE_<wbr/>SCENE_<wbr/>MODE is active (except for
+FACE_<wbr/>PRIORITY scene mode).<wbr/> Other control entries are still active.<wbr/>
+This setting can only be used if scene mode is supported (i.<wbr/>e.<wbr/>
+<a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>
+contain some modes other than DISABLED).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">OFF_KEEP_STATE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Same as OFF mode,<wbr/> except that this capture will not be
+used by camera device background auto-exposure,<wbr/> auto-white balance and
+auto-focus algorithms (3A) to update their statistics.<wbr/></p>
+<p>Specifically,<wbr/> the 3A routines are locked to the last
+values set from a request with AUTO,<wbr/> OFF,<wbr/> or
+USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> and any statistics or state updates
+collected from manual captures with OFF_<wbr/>KEEP_<wbr/>STATE will be
+discarded by the camera device.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Overall mode of 3A (auto-exposure,<wbr/> auto-white-balance,<wbr/> auto-focus) control
+routines.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableModes">android.<wbr/>control.<wbr/>available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is a top-level 3A control switch.<wbr/> When set to OFF,<wbr/> all 3A control
+by the camera device is disabled.<wbr/> The application must set the fields for
+capture parameters itself.<wbr/></p>
+<p>When set to AUTO,<wbr/> the individual algorithm controls in
+android.<wbr/>control.<wbr/>* are in effect,<wbr/> such as <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>.<wbr/></p>
+<p>When set to USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> the individual controls in
+android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device implements
+one of the scene mode settings (such as ACTION,<wbr/> SUNSET,<wbr/> or PARTY)
+as it wishes.<wbr/> The camera device scene mode 3A settings are provided by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
+<p>When set to OFF_<wbr/>KEEP_<wbr/>STATE,<wbr/> it is similar to OFF mode,<wbr/> the only difference
+is that this frame will not be used by camera device background 3A statistics
+update,<wbr/> as if this frame is never captured.<wbr/> This mode can be used in the scenario
+where the application doesn't want a 3A manual control capture to affect
+the subsequent auto 3A capture results.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.sceneMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>scene<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">DISABLED</span>
+                    <span class="entry_type_enum_value">0</span>
+                    <span class="entry_type_enum_notes"><p>Indicates that no scene modes are set for a given capture request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FACE_PRIORITY</span>
+                    <span class="entry_type_enum_notes"><p>If face detection support exists,<wbr/> use face
+detection data for auto-focus,<wbr/> auto-white balance,<wbr/> and
+auto-exposure routines.<wbr/></p>
+<p>If face detection statistics are disabled
+(i.<wbr/>e.<wbr/> <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> is set to OFF),<wbr/>
+this should still operate correctly (but will not return
+face detection statistics to the framework).<wbr/></p>
+<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
+remain active when FACE_<wbr/>PRIORITY is set.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ACTION</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving objects.<wbr/></p>
+<p>Similar to SPORTS.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PORTRAIT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for still photos of people.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">LANDSCAPE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for photos of distant macroscopic objects.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">NIGHT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for low-light settings.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">NIGHT_PORTRAIT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for still photos of people in low-light
+settings.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">THEATRE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings where flash must
+remain off.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BEACH</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor beach settings.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SNOW</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor settings containing snow.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SUNSET</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for scenes of the setting sun.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STEADYPHOTO</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized to avoid blurry photos due to small amounts of
+device motion (for example: due to hand shake).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FIREWORKS</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for nighttime photos of fireworks.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SPORTS</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving people.<wbr/></p>
+<p>Similar to ACTION.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PARTY</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings with multiple moving
+people.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CANDLELIGHT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for dim settings where the main light source
+is a flame.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BARCODE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for accurately capturing a photo of barcode
+for use by camera applications that wish to read the
+barcode value.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_SPEED_VIDEO</span>
+                    <span class="entry_type_enum_deprecated">[deprecated]</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>This is deprecated,<wbr/> please use <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
+and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>
+for high speed video recording.<wbr/></p>
+<p>Optimized for high speed video recording (frame rate &gt;=60fps) use case.<wbr/></p>
+<p>The supported high speed video sizes and fps ranges are specified in
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> To get desired
+output frame rates,<wbr/> the application is only allowed to select video size
+and fps range combinations listed in this static metadata.<wbr/> The fps range
+can be control via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
+<p>In this mode,<wbr/> the camera device will override aeMode,<wbr/> awbMode,<wbr/> and afMode to
+ON,<wbr/> ON,<wbr/> and CONTINUOUS_<wbr/>VIDEO,<wbr/> respectively.<wbr/> All post-processing block mode
+controls will be overridden to be FAST.<wbr/> Therefore,<wbr/> no manual control of capture
+and post-processing parameters is possible.<wbr/> All other controls operate the
+same as when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == AUTO.<wbr/> This means that all other
+android.<wbr/>control.<wbr/>* fields continue to work,<wbr/> such as</p>
+<ul>
+<li><a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a></li>
+<li><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a></li>
+<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
+<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
+<li><a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></li>
+<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a></li>
+<li><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a></li>
+</ul>
+<p>Outside of android.<wbr/>control.<wbr/>*,<wbr/> the following controls will work:</p>
+<ul>
+<li><a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> (automatic flash for still capture will not work since aeMode is ON)</li>
+<li><a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> (if it is supported)</li>
+<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
+<li><a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a></li>
+</ul>
+<p>For high speed recording use case,<wbr/> the actual maximum supported frame rate may
+be lower than what camera can output,<wbr/> depending on the destination Surfaces for
+the image data.<wbr/> For example,<wbr/> if the destination surface is from video encoder,<wbr/>
+the application need check if the video encoder is capable of supporting the
+high frame rate for a given video size,<wbr/> or it will end up with lower recording
+frame rate.<wbr/> If the destination surface is from preview window,<wbr/> the preview frame
+rate will be bounded by the screen refresh rate.<wbr/></p>
+<p>The camera device will only support up to 2 output high speed streams
+(processed non-stalling format defined in <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>)
+in this mode.<wbr/> This control will be effective only if all of below conditions are true:</p>
+<ul>
+<li>The application created no more than maxNumHighSpeedStreams processed non-stalling
+format output streams,<wbr/> where maxNumHighSpeedStreams is calculated as
+min(2,<wbr/> <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>[Processed (but not-stalling)]).<wbr/></li>
+<li>The stream sizes are selected from the sizes reported by
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/></li>
+<li>No processed non-stalling or raw streams are configured.<wbr/></li>
+</ul>
+<p>When above conditions are NOT satistied,<wbr/> the controls of this mode and
+<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> will be ignored by the camera device,<wbr/>
+the camera device will fall back to <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> <code>==</code> AUTO,<wbr/>
+and the returned capture result metadata will give the fps range choosen
+by the camera device.<wbr/></p>
+<p>Switching into or out of this mode may trigger some camera ISP/<wbr/>sensor
+reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommended that
+the application avoids unnecessary scene mode switch as much as possible.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HDR</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Turn on a device-specific high dynamic range (HDR) mode.<wbr/></p>
+<p>In this scene mode,<wbr/> the camera device captures images
+that keep a larger range of scene illumination levels
+visible in the final image.<wbr/> For example,<wbr/> when taking a
+picture of a object in front of a bright window,<wbr/> both
+the object and the scene through the window may be
+visible when using HDR mode,<wbr/> while in normal AUTO mode,<wbr/>
+one or the other may be poorly exposed.<wbr/> As a tradeoff,<wbr/>
+HDR mode generally takes much longer to capture a single
+image,<wbr/> has no user control,<wbr/> and may have other artifacts
+depending on the HDR method used.<wbr/></p>
+<p>Therefore,<wbr/> HDR captures operate at a much slower rate
+than regular captures.<wbr/></p>
+<p>In this mode,<wbr/> on LIMITED or FULL devices,<wbr/> when a request
+is made with a <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> of
+STILL_<wbr/>CAPTURE,<wbr/> the camera device will capture an image
+using a high dynamic range capture technique.<wbr/>  On LEGACY
+devices,<wbr/> captures that target a JPEG-format output will
+be captured with HDR,<wbr/> and the capture intent is not
+relevant.<wbr/></p>
+<p>The HDR capture may involve the device capturing a burst
+of images internally and combining them into one,<wbr/> or it
+may involve the device using specialized high dynamic
+range capture hardware.<wbr/> In all cases,<wbr/> a single image is
+produced in response to a capture request submitted
+while in HDR mode.<wbr/></p>
+<p>Since substantial post-processing is generally needed to
+produce an HDR image,<wbr/> only YUV and JPEG outputs are
+supported for LIMITED/<wbr/>FULL device HDR captures,<wbr/> and only
+JPEG outputs are supported for LEGACY HDR
+captures.<wbr/> Using a RAW output for HDR capture is not
+supported.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_hidden">[hidden]</span>
+                    <span class="entry_type_enum_notes"><p>Same as FACE_<wbr/>PRIORITY scene mode,<wbr/> except that the camera
+device will choose higher sensitivity values (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
+under low light conditions.<wbr/></p>
+<p>The camera device may be tuned to expose the images in a reduced
+sensitivity range to produce the best quality images.<wbr/> For example,<wbr/>
+if the <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> gives range of [100,<wbr/> 1600],<wbr/>
+the camera device auto-exposure routine tuning process may limit the actual
+exposure sensitivity range to [100,<wbr/> 1200] to ensure that the noise level isn't
+exessive in order to preserve the image quality.<wbr/> Under this situation,<wbr/> the image under
+low light may be under-exposed when the sensor max exposure time (bounded by the
+<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of the
+ON_<wbr/>* modes) and effective max sensitivity are reached.<wbr/> This scene mode allows the
+camera device auto-exposure routine to increase the sensitivity up to the max
+sensitivity specified by <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> when the scene is too
+dark and the max exposure time is reached.<wbr/> The captured images may be noisier
+compared with the images captured in normal FACE_<wbr/>PRIORITY mode; therefore,<wbr/> it is
+recommended that the application only use this scene mode when it is capable of
+reducing the noise level of the captured images.<wbr/></p>
+<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
+remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Control for which scene mode is currently active.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Scene modes are custom camera modes optimized for a certain set of conditions and
+capture settings.<wbr/></p>
+<p>This is the mode that that is active when
+<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code>.<wbr/> Aside from FACE_<wbr/>PRIORITY,<wbr/> these modes will
+disable <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
+while in use.<wbr/></p>
+<p>The interpretation and implementation of these scene modes is left
+to the implementor of the camera device.<wbr/> Their behavior will not be
+consistent across all devices,<wbr/> and any given device may only implement
+a subset of these modes.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL implementations that include scene modes are expected to provide
+the per-scene settings to use for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> in
+<a href="#static_android.control.sceneModeOverrides">android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides</a>.<wbr/></p>
+<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/>
+the HAL must list supported video size and fps range in
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>
+1280x720,<wbr/> if the HAL has two different sensor configurations for normal streaming
+mode and high speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture
+requests,<wbr/> the HAL may have to switch between different sensor modes.<wbr/>
+This mode is deprecated in HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a> and CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO
+capbility defined in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.control.videoStabilizationMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Video stabilization is disabled.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Video stabilization is enabled.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether video stabilization is
+active.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Video stabilization automatically warps images from
+the camera in order to stabilize motion between consecutive frames.<wbr/></p>
+<p>If enabled,<wbr/> video stabilization can modify the
+<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> to keep the video stream stabilized.<wbr/></p>
+<p>Switching between different video stabilization modes may take several
+frames to initialize,<wbr/> the camera device will report the current mode
+in capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/>
+the video stabilization modes in the first several capture results may
+still be "OFF",<wbr/> and it will become "ON" when the initialization is
+done.<wbr/></p>
+<p>In addition,<wbr/> not all recording sizes or frame rates may be supported for
+stabilization by a device that reports stabilization support.<wbr/> It is guaranteed
+that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+the recording resolution is less than or equal to 1920 x 1080 (width less than
+or equal to 1920,<wbr/> height less than or equal to 1080),<wbr/> and the recording
+frame rate is less than or equal to 30fps.<wbr/>  At other sizes,<wbr/> the CaptureResult
+<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a> field will return
+OFF if the recording output is not stabilized,<wbr/> or if there are no output
+Surface types that can be stabilized.<wbr/></p>
+<p>If a camera device supports both this mode and OIS
+(<a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may
+produce undesirable interaction,<wbr/> so it is recommended not to enable
+both at the same time.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.control.aeAvailableAntibandingModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of auto-exposure antibanding modes for <a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> that are
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not all of the auto-exposure anti-banding modes may be
+supported by a given camera device.<wbr/> This field lists the
+valid anti-banding modes that the application may request
+for this camera device with the
+<a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> control.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.aeAvailableModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of auto-exposure modes for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> that are supported by this camera
+device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not all the auto-exposure modes may be supported by a
+given camera device,<wbr/> especially if no flash unit is
+available.<wbr/> This entry lists the valid modes for
+<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> for this camera device.<wbr/></p>
+<p>All camera devices support ON,<wbr/> and all camera devices with flash
+units support ON_<wbr/>AUTO_<wbr/>FLASH and ON_<wbr/>ALWAYS_<wbr/>FLASH.<wbr/></p>
+<p>FULL mode camera devices always support OFF mode,<wbr/>
+which enables application control of camera exposure time,<wbr/>
+sensitivity,<wbr/> and frame duration.<wbr/></p>
+<p>LEGACY mode camera devices never support OFF mode.<wbr/>
+LIMITED mode devices support OFF if they support the MANUAL_<wbr/>SENSOR
+capability.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.aeAvailableTargetFpsRanges">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2 x n
+                </span>
+              <span class="entry_type_visibility"> [public as rangeInt]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">list of pairs of frame rates</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of frame rate ranges for <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> supported by
+this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Frames per second (FPS)
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For devices at the LEGACY level or above:</p>
+<ul>
+<li>
+<p>For constant-framerate recording,<wbr/> for each normal
+<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>,<wbr/> that is,<wbr/> a
+<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
+<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#quality">quality</a> in
+the range [<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW">QUALITY_<wbr/>LOW</a>,<wbr/>
+<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P">QUALITY_<wbr/>2160P</a>],<wbr/> if the profile is
+supported by the device and has
+<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code>,<wbr/> this list will
+always include (<code>x</code>,<wbr/><code>x</code>).<wbr/></p>
+</li>
+<li>
+<p>Also,<wbr/> a camera device must either not support any
+<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a>,<wbr/>
+or support at least one
+normal <a href="https://developer.android.com/reference/android/media/CamcorderProfile.html">CamcorderProfile</a> that has
+<a href="https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate">videoFrameRate</a> <code>x</code> &gt;= 24.<wbr/></p>
+</li>
+</ul>
+<p>For devices at the LIMITED level or above:</p>
+<ul>
+<li>For YUV_<wbr/>420_<wbr/>888 burst capture use case,<wbr/> this list will always include (<code>min</code>,<wbr/> <code>max</code>)
+and (<code>max</code>,<wbr/> <code>max</code>) where <code>min</code> &lt;= 15 and <code>max</code> = the maximum output frame rate of the
+maximum YUV_<wbr/>420_<wbr/>888 output size.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.aeCompensationRange">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as rangeInt]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum and minimum exposure compensation values for
+<a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a>,<wbr/> in counts of <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a>,<wbr/>
+that are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Range [0,<wbr/>0] indicates that exposure compensation is not supported.<wbr/></p>
+<p>For LIMITED and FULL devices,<wbr/> range must follow below requirements if exposure
+compensation is supported (<code>range != [0,<wbr/> 0]</code>):</p>
+<p><code>Min.<wbr/>exposure compensation * <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> &lt;= -2 EV</code></p>
+<p><code>Max.<wbr/>exposure compensation * <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> &gt;= 2 EV</code></p>
+<p>LEGACY devices may support a smaller range than this.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.aeCompensationStep">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Smallest step by which the exposure compensation
+can be changed.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Exposure Value (EV)
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is the unit for <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a>.<wbr/> For example,<wbr/> if this key has
+a value of <code>1/<wbr/>2</code>,<wbr/> then a setting of <code>-2</code> for <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> means
+that the target EV offset for the auto-exposure routine is -1 EV.<wbr/></p>
+<p>One unit of EV compensation changes the brightness of the captured image by a factor
+of two.<wbr/> +1 EV doubles the image brightness,<wbr/> while -1 EV halves the image brightness.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This must be less than or equal to 1/<wbr/>2.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.afAvailableModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of auto-focus (AF) modes for <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> that are
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not all the auto-focus modes may be supported by a
+given camera device.<wbr/> This entry lists the valid modes for
+<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> for this camera device.<wbr/></p>
+<p>All LIMITED and FULL mode camera devices will support OFF mode,<wbr/> and all
+camera devices with adjustable focuser units
+(<code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> &gt; 0</code>) will support AUTO mode.<wbr/></p>
+<p>LEGACY devices will support OFF mode only if they support
+focusing to infinity (by also setting <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> to
+<code>0.<wbr/>0f</code>).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.availableEffects">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>available<wbr/>Effects
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums (android.<wbr/>control.<wbr/>effect<wbr/>Mode).<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of color effects for <a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a> that are supported by this camera
+device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This list contains the color effect modes that can be applied to
+images produced by the camera device.<wbr/>
+Implementations are not expected to be consistent across all devices.<wbr/>
+If no color effect modes are available for a device,<wbr/> this will only list
+OFF.<wbr/></p>
+<p>A color effect will only be applied if
+<a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF.<wbr/>  OFF is always included in this list.<wbr/></p>
+<p>This control has no effect on the operation of other control routines such
+as auto-exposure,<wbr/> white balance,<wbr/> or focus.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.availableSceneModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums (android.<wbr/>control.<wbr/>scene<wbr/>Mode).<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of scene modes for <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> that are supported by this camera
+device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This list contains scene modes that can be set for the camera device.<wbr/>
+Only scene modes that have been fully implemented for the
+camera device may be included here.<wbr/> Implementations are not expected
+to be consistent across all devices.<wbr/></p>
+<p>If no scene modes are supported by the camera device,<wbr/> this
+will be set to DISABLED.<wbr/> Otherwise DISABLED will not be listed.<wbr/></p>
+<p>FACE_<wbr/>PRIORITY is always listed if face detection is
+supported (i.<wbr/>e.<wbr/><code><a href="#static_android.statistics.info.maxFaceCount">android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Face<wbr/>Count</a> &gt;
+0</code>).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.availableVideoStabilizationModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>available<wbr/>Video<wbr/>Stabilization<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of video stabilization modes for <a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>
+that are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>OFF will always be listed.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.awbAvailableModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of auto-white-balance modes for <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> that are supported by this
+camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not all the auto-white-balance modes may be supported by a
+given camera device.<wbr/> This entry lists the valid modes for
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> for this camera device.<wbr/></p>
+<p>All camera devices will support ON mode.<wbr/></p>
+<p>Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always support OFF
+mode,<wbr/> which enables application control of white balance,<wbr/> by using
+<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>(<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> must be set to TRANSFORM_<wbr/>MATRIX).<wbr/> This includes all FULL
+mode camera devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.maxRegions">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>control.<wbr/>max<wbr/>Regions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of the maximum number of regions that can be used for metering in
+auto-exposure (AE),<wbr/> auto-white balance (AWB),<wbr/> and auto-focus (AF);
+this corresponds to the the maximum number of elements in
+<a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a>,<wbr/> <a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a>,<wbr/>
+and <a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Value must be &gt;= 0 for each element.<wbr/> For full-capability devices
+this value must be &gt;= 1 for AE and AF.<wbr/> The order of the elements is:
+<code>(AE,<wbr/> AWB,<wbr/> AF)</code>.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.maxRegionsAe">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum number of metering regions that can be used by the auto-exposure (AE)
+routine.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Value will be &gt;= 0.<wbr/> For FULL-capability devices,<wbr/> this
+value will be &gt;= 1.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This corresponds to the the maximum allowed number of elements in
+<a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is private to the framework.<wbr/> Fill in
+maxRegions to have this entry be automatically populated.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.maxRegionsAwb">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum number of metering regions that can be used by the auto-white balance (AWB)
+routine.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Value will be &gt;= 0.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This corresponds to the the maximum allowed number of elements in
+<a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is private to the framework.<wbr/> Fill in
+maxRegions to have this entry be automatically populated.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.maxRegionsAf">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum number of metering regions that can be used by the auto-focus (AF) routine.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Value will be &gt;= 0.<wbr/> For FULL-capability devices,<wbr/> this
+value will be &gt;= 1.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This corresponds to the the maximum allowed number of elements in
+<a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is private to the framework.<wbr/> Fill in
+maxRegions to have this entry be automatically populated.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.sceneModeOverrides">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x length(availableSceneModes)
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Ordered list of auto-exposure,<wbr/> auto-white balance,<wbr/> and auto-focus
+settings to use with each available scene mode.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>For each available scene mode,<wbr/> the list must contain three
+entries containing the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> values used
+by the camera device.<wbr/> The entry order is <code>(aeMode,<wbr/> awbMode,<wbr/> afMode)</code>
+where aeMode has the lowest index position.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When a scene mode is enabled,<wbr/> the camera device is expected
+to override <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/>
+and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> with its preferred settings for
+that scene mode.<wbr/></p>
+<p>The order of this list matches that of availableSceneModes,<wbr/>
+with 3 entries for each mode.<wbr/>  The overrides listed
+for FACE_<wbr/>PRIORITY and FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT (if supported) are ignored,<wbr/>
+since for that mode the application-set <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> values are
+used instead,<wbr/> matching the behavior when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>
+is set to AUTO.<wbr/> It is recommended that the FACE_<wbr/>PRIORITY and
+FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT (if supported) overrides should be set to 0.<wbr/></p>
+<p>For example,<wbr/> if availableSceneModes contains
+<code>(FACE_<wbr/>PRIORITY,<wbr/> ACTION,<wbr/> NIGHT)</code>,<wbr/>  then the camera framework
+expects sceneModeOverrides to have 9 entries formatted like:
+<code>(0,<wbr/> 0,<wbr/> 0,<wbr/> ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> AUTO,<wbr/> CONTINUOUS_<wbr/>PICTURE,<wbr/>
+ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> INCANDESCENT,<wbr/> AUTO)</code>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>To maintain backward compatibility,<wbr/> this list will be made available
+in the static metadata of the camera service.<wbr/>  The camera service will
+use these values to set <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> when using a scene
+mode other than FACE_<wbr/>PRIORITY and FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT (if supported).<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.availableHighSpeedVideoConfigurations">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5 x n
+                </span>
+              <span class="entry_type_visibility"> [hidden as highSpeedVideoConfiguration]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of available high speed video size,<wbr/> fps range and max batch size configurations
+supported by the camera device,<wbr/> in the format of (width,<wbr/> height,<wbr/> fps_<wbr/>min,<wbr/> fps_<wbr/>max,<wbr/> batch_<wbr/>size_<wbr/>max).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>For each configuration,<wbr/> the fps_<wbr/>max &gt;= 120fps.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO is supported in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>,<wbr/>
+this metadata will list the supported high speed video size,<wbr/> fps range and max batch size
+configurations.<wbr/> All the sizes listed in this configuration will be a subset of the sizes
+reported by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a>
+for processed non-stalling formats.<wbr/></p>
+<p>For the high speed video use case,<wbr/> the application must
+select the video size and fps range from this metadata to configure the recording and
+preview streams and setup the recording requests.<wbr/> For example,<wbr/> if the application intends
+to do high speed recording,<wbr/> it can select the maximum size reported by this metadata to
+configure output streams.<wbr/> Once the size is selected,<wbr/> application can filter this metadata
+by selected size and get the supported fps ranges,<wbr/> and use these fps ranges to setup the
+recording requests.<wbr/> Note that for the use case of multiple output streams,<wbr/> application
+must select one unique size from this metadata to use (e.<wbr/>g.,<wbr/> preview and recording streams
+must have the same size).<wbr/> Otherwise,<wbr/> the high speed capture session creation will fail.<wbr/></p>
+<p>The min and max fps will be multiple times of 30fps.<wbr/></p>
+<p>High speed video streaming extends significant performance pressue to camera hardware,<wbr/>
+to achieve efficient high speed streaming,<wbr/> the camera device may have to aggregate
+multiple frames together and send to camera device for processing where the request
+controls are same for all the frames in this batch.<wbr/> Max batch size indicates
+the max possible number of frames the camera device will group together for this high
+speed stream configuration.<wbr/> This max batch size will be used to generate a high speed
+recording request list by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>.<wbr/>
+The max batch size for each configuration will satisfy below conditions:</p>
+<ul>
+<li>Each max batch size will be a divisor of its corresponding fps_<wbr/>max /<wbr/> 30.<wbr/> For example,<wbr/>
+if max_<wbr/>fps is 300,<wbr/> max batch size will only be 1,<wbr/> 2,<wbr/> 5,<wbr/> or 10.<wbr/></li>
+<li>The camera device may choose smaller internal batch size for each configuration,<wbr/> but
+the actual batch size will be a divisor of max batch size.<wbr/> For example,<wbr/> if the max batch
+size is 8,<wbr/> the actual batch size used by camera device will only be 1,<wbr/> 2,<wbr/> 4,<wbr/> or 8.<wbr/></li>
+<li>The max batch size in each configuration entry must be no larger than 32.<wbr/></li>
+</ul>
+<p>The camera device doesn't have to support batch mode to achieve high speed video recording,<wbr/>
+in such case,<wbr/> batch_<wbr/>size_<wbr/>max will be reported as 1 in each configuration entry.<wbr/></p>
+<p>This fps ranges in this configuration list can only be used to create requests
+that are submitted to a high speed camera capture session created by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>.<wbr/>
+The fps ranges reported in this metadata must not be used to setup capture requests for
+normal capture session,<wbr/> or it will cause request error.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All the sizes listed in this configuration will be a subset of the sizes reported by
+<a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> for processed non-stalling output formats.<wbr/>
+Note that for all high speed video configurations,<wbr/> HAL must be able to support a minimum
+of two streams,<wbr/> though the application might choose to configure just one stream.<wbr/></p>
+<p>The HAL may support multiple sensor modes for high speed outputs,<wbr/> for example,<wbr/> 120fps
+sensor mode and 120fps recording,<wbr/> 240fps sensor mode for 240fps recording.<wbr/> The application
+usually starts preview first,<wbr/> then starts recording.<wbr/> To avoid sensor mode switch caused
+stutter when starting recording as much as possible,<wbr/> the application may want to ensure
+the same sensor mode is used for preview and recording.<wbr/> Therefore,<wbr/> The HAL must advertise
+the variable fps range [30,<wbr/> fps_<wbr/>max] for each fixed fps range in this configuration list.<wbr/>
+For example,<wbr/> if the HAL advertises [120,<wbr/> 120] and [240,<wbr/> 240],<wbr/> the HAL must also advertise
+[30,<wbr/> 120] and [30,<wbr/> 240] for each configuration.<wbr/> In doing so,<wbr/> if the application intends to
+do 120fps recording,<wbr/> it can select [30,<wbr/> 120] to start preview,<wbr/> and [120,<wbr/> 120] to start
+recording.<wbr/> For these variable fps ranges,<wbr/> it's up to the HAL to decide the actual fps
+values that are suitable for smooth preview streaming.<wbr/> If the HAL sees different max_<wbr/>fps
+values that fall into different sensor modes in a sequence of requests,<wbr/> the HAL must
+switch the sensor mode as quick as possible to minimize the mode switch caused stutter.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.aeLockAvailable">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Lock<wbr/>Available
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FALSE</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TRUE</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device supports <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Devices with MANUAL_<wbr/>SENSOR capability or BURST_<wbr/>CAPTURE capability will always
+list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.awbLockAvailable">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>awb<wbr/>Lock<wbr/>Available
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FALSE</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TRUE</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device supports <a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Devices with MANUAL_<wbr/>POST_<wbr/>PROCESSING capability or BURST_<wbr/>CAPTURE capability will
+always list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.control.availableModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>available<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums (android.<wbr/>control.<wbr/>mode).<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of control modes for <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> that are supported by this camera
+device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This list contains control modes that can be set for the camera device.<wbr/>
+LEGACY mode devices will always support AUTO mode.<wbr/> LIMITED and FULL
+devices will always support OFF,<wbr/> AUTO modes.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.control.aePrecaptureId">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Id
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The ID sent with the latest
+CAMERA2_<wbr/>TRIGGER_<wbr/>PRECAPTURE_<wbr/>METERING call</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Must be 0 if no
+CAMERA2_<wbr/>TRIGGER_<wbr/>PRECAPTURE_<wbr/>METERING trigger received yet
+by HAL.<wbr/> Always updated even if AE algorithm ignores the
+trigger</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aeAntibandingMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will not adjust exposure duration to
+avoid banding problems.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">50HZ</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
+avoid banding problems with 50Hz illumination sources.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">60HZ</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
+avoid banding problems with 60Hz illumination
+sources.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will automatically adapt its
+antibanding routine to the current illumination
+condition.<wbr/> This is the default mode if AUTO is
+available on given camera device.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired setting for the camera device's auto-exposure
+algorithm's antibanding compensation.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Some kinds of lighting fixtures,<wbr/> such as some fluorescent
+lights,<wbr/> flicker at the rate of the power supply frequency
+(60Hz or 50Hz,<wbr/> depending on country).<wbr/> While this is
+typically not noticeable to a person,<wbr/> it can be visible to
+a camera device.<wbr/> If a camera sets its exposure time to the
+wrong value,<wbr/> the flicker may become visible in the
+viewfinder as flicker or in a final captured image,<wbr/> as a
+set of variable-brightness bands across the image.<wbr/></p>
+<p>Therefore,<wbr/> the auto-exposure routines of camera devices
+include antibanding routines that ensure that the chosen
+exposure value will not cause such banding.<wbr/> The choice of
+exposure time depends on the rate of flicker,<wbr/> which the
+camera device can detect automatically,<wbr/> or the expected
+rate can be selected by the application using this
+control.<wbr/></p>
+<p>A given camera device may not support all of the possible
+options for the antibanding mode.<wbr/> The
+<a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a> key contains
+the available modes for a given camera device.<wbr/></p>
+<p>AUTO mode is the default if it is available on given
+camera device.<wbr/> When AUTO mode is not available,<wbr/> the
+default will be either 50HZ or 60HZ,<wbr/> and both 50HZ
+and 60HZ will be available.<wbr/></p>
+<p>If manual exposure control is enabled (by setting
+<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
+then this setting has no effect,<wbr/> and the application must
+ensure it selects exposure times that do not cause banding
+issues.<wbr/> The <a href="#dynamic_android.statistics.sceneFlicker">android.<wbr/>statistics.<wbr/>scene<wbr/>Flicker</a> key can assist
+the application in this.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For all capture request templates,<wbr/> this field must be set
+to AUTO if AUTO mode is available.<wbr/> If AUTO is not available,<wbr/>
+the default must be either 50HZ or 60HZ,<wbr/> and both 50HZ and
+60HZ must be available.<wbr/></p>
+<p>If manual exposure control is enabled (by setting
+<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> to OFF),<wbr/>
+then the exposure values provided by the application must not be
+adjusted for antibanding.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aeExposureCompensation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Adjustment to auto-exposure (AE) target image
+brightness.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Compensation steps
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The adjustment is measured as a count of steps,<wbr/> with the
+step size defined by <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> and the
+allowed range by <a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a>.<wbr/></p>
+<p>For example,<wbr/> if the exposure value (EV) step is 0.<wbr/>333,<wbr/> '6'
+will mean an exposure compensation of +2 EV; -3 will mean an
+exposure compensation of -1 EV.<wbr/> One EV represents a doubling
+of image brightness.<wbr/> Note that this control will only be
+effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF.<wbr/> This control
+will take effect even when <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> <code>== true</code>.<wbr/></p>
+<p>In the event of exposure compensation value being changed,<wbr/> camera device
+may take several frames to reach the newly requested exposure target.<wbr/>
+During that time,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> field will be in the SEARCHING
+state.<wbr/> Once the new exposure target is reached,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> will
+change from SEARCHING to either CONVERGED,<wbr/> LOCKED (if AE lock is enabled),<wbr/> or
+FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aeLock">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Lock
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Auto-exposure lock is disabled; the AE algorithm
+is free to update its parameters.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Auto-exposure lock is enabled; the AE algorithm
+must not update the exposure and sensitivity parameters
+while the lock is active.<wbr/></p>
+<p><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> setting changes
+will still take effect while auto-exposure is locked.<wbr/></p>
+<p>Some rare LEGACY devices may not support
+this,<wbr/> in which case the value will always be overridden to OFF.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-exposure (AE) is currently locked to its latest
+calculated values.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to <code>true</code> (ON),<wbr/> the AE algorithm is locked to its latest parameters,<wbr/>
+and will not change exposure settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
+<p>Note that even when AE is locked,<wbr/> the flash may be fired if
+the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>AUTO_<wbr/>FLASH /<wbr/>
+ON_<wbr/>ALWAYS_<wbr/>FLASH /<wbr/> ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE.<wbr/></p>
+<p>When <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> is changed,<wbr/> even if the AE lock
+is ON,<wbr/> the camera device will still adjust its exposure value.<wbr/></p>
+<p>If AE precapture is triggered (see <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>)
+when AE is already locked,<wbr/> the camera device will not change the exposure time
+(<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>) and sensitivity (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
+parameters.<wbr/> The flash may be fired if the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
+is ON_<wbr/>AUTO_<wbr/>FLASH/<wbr/>ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE and the scene is too dark.<wbr/> If the
+<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> the scene may become overexposed.<wbr/>
+Similarly,<wbr/> AE precapture trigger CANCEL has no effect when AE is already locked.<wbr/></p>
+<p>When an AE precapture sequence is triggered,<wbr/> AE unlock will not be able to unlock
+the AE if AE is locked by the camera device internally during precapture metering
+sequence In other words,<wbr/> submitting requests with AE unlock has no effect for an
+ongoing precapture metering sequence.<wbr/> Otherwise,<wbr/> the precapture metering sequence
+will never succeed in a sequence of preview requests where AE lock is always set
+to <code>false</code>.<wbr/></p>
+<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
+get locked do not necessarily correspond to the settings that were present in the
+latest capture result received from the camera device,<wbr/> since additional captures
+and AE updates may have occurred even before the result was sent out.<wbr/> If an
+application is switching between automatic and manual control and wishes to eliminate
+any flicker during the switch,<wbr/> the following procedure is recommended:</p>
+<ol>
+<li>Starting in auto-AE mode:</li>
+<li>Lock AE</li>
+<li>Wait for the first result to be output that has the AE locked</li>
+<li>Copy exposure settings from that result into a request,<wbr/> set the request to manual AE</li>
+<li>Submit the capture request,<wbr/> proceed to run manual AE as desired.<wbr/></li>
+</ol>
+<p>See <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE lock related state transition details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aeMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is disabled.<wbr/></p>
+<p>The application-selected <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are used by the camera
+device,<wbr/> along with android.<wbr/>flash.<wbr/>* fields,<wbr/> if there's
+a flash unit for this camera device.<wbr/></p>
+<p>Note that auto-white balance (AWB) and auto-focus (AF)
+behavior is device dependent when AE is in OFF mode.<wbr/>
+To have consistent behavior across different devices,<wbr/>
+it is recommended to either set AWB and AF to OFF mode
+or lock AWB and AF before setting AE to OFF.<wbr/>
+See <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a>,<wbr/> and <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
+for more details.<wbr/></p>
+<p>LEGACY devices do not support the OFF mode and will
+override attempts to use this value to ON.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is active,<wbr/>
+with no flash control.<wbr/></p>
+<p>The application's values for
+<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> are ignored.<wbr/> The
+application has control over the various
+android.<wbr/>flash.<wbr/>* fields.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH</span>
+                    <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
+the camera's flash unit,<wbr/> firing it in low-light
+conditions.<wbr/></p>
+<p>The flash may be fired during a precapture sequence
+(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
+may be fired for captures for which the
+<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
+STILL_<wbr/>CAPTURE</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_ALWAYS_FLASH</span>
+                    <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
+the camera's flash unit,<wbr/> always firing it for still
+captures.<wbr/></p>
+<p>The flash may be fired during a precapture sequence
+(triggered by <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>) and
+will always be fired for captures for which the
+<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> field is set to
+STILL_<wbr/>CAPTURE</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE</span>
+                    <span class="entry_type_enum_notes"><p>Like ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> but with automatic red eye
+reduction.<wbr/></p>
+<p>If deemed necessary by the camera device,<wbr/> a red eye
+reduction flash will fire during the precapture
+sequence.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired mode for the camera device's
+auto-exposure routine.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.aeAvailableModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is
+AUTO.<wbr/></p>
+<p>When set to any of the ON modes,<wbr/> the camera device's
+auto-exposure routine is enabled,<wbr/> overriding the
+application's selected exposure time,<wbr/> sensor sensitivity,<wbr/>
+and frame duration (<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>).<wbr/> If one of the FLASH modes
+is selected,<wbr/> the camera device's flash unit controls are
+also overridden.<wbr/></p>
+<p>The FLASH modes are only available if the camera device
+has a flash unit (<a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> is <code>true</code>).<wbr/></p>
+<p>If flash TORCH mode is desired,<wbr/> this field must be set to
+ON or OFF,<wbr/> and <a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> set to TORCH.<wbr/></p>
+<p>When set to any of the ON modes,<wbr/> the values chosen by the
+camera device auto-exposure routine for the overridden
+fields for a given capture will be available in its
+CaptureResult.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aeRegions">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>ae<wbr/>Regions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5 x area_count
+                </span>
+              <span class="entry_type_visibility"> [public as meteringRectangle]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of metering areas to use for auto-exposure adjustment.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+              <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not available if <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a> is 0.<wbr/>
+Otherwise will always be present.<wbr/></p>
+<p>The maximum number of regions supported by the device is determined by the value
+of <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a>.<wbr/></p>
+<p>The coordinate system is based on the active pixel array,<wbr/>
+with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
+(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
+bottom-right pixel in the active pixel array.<wbr/></p>
+<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
+for every pixel in the area.<wbr/> This means that a large metering area
+with the same weight as a smaller area will have more effect in
+the metering result.<wbr/> Metering areas can partially overlap and the
+camera device will add the weights in the overlap region.<wbr/></p>
+<p>The weights are relative to weights of other exposure metering regions,<wbr/> so if only one
+region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0
+weight is ignored.<wbr/></p>
+<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
+camera device.<wbr/></p>
+<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
+capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
+region and output only the intersection rectangle as the metering region in the result
+metadata.<wbr/>  If the region is entirely outside the crop region,<wbr/> it will be ignored and
+not reported in the result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL level representation of MeteringRectangle[] is a
+int[5 * area_<wbr/>count].<wbr/>
+Every five elements represent a metering region of
+(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
+The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
+exclusive on xmax and ymax.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aeTargetFpsRange">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as rangeInt]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Range over which the auto-exposure routine can
+adjust the capture frame rate to maintain good
+exposure.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Frames per second (FPS)
+            </td>
+
+            <td class="entry_range">
+              <p>Any of the entries in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only constrains auto-exposure (AE) algorithm,<wbr/> not
+manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> and
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aePrecaptureTrigger">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_notes"><p>The precapture metering sequence will be started
+by the camera device.<wbr/></p>
+<p>The exact effect of the precapture trigger depends on
+the current AE mode and state.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_notes"><p>The camera device will cancel any currently active or completed
+precapture metering sequence,<wbr/> the auto-exposure routine will return to its
+initial state.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device will trigger a precapture
+metering sequence when it processes this request.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is normally set to IDLE,<wbr/> or is not
+included at all in the request settings.<wbr/> When included and
+set to START,<wbr/> the camera device will trigger the auto-exposure (AE)
+precapture metering sequence.<wbr/></p>
+<p>When set to CANCEL,<wbr/> the camera device will cancel any active
+precapture metering trigger,<wbr/> and return to its initial AE state.<wbr/>
+If a precapture metering sequence is already completed,<wbr/> and the camera
+device has implicitly locked the AE for subsequent still capture,<wbr/> the
+CANCEL trigger will unlock the AE and return to its initial AE state.<wbr/></p>
+<p>The precapture sequence should be triggered before starting a
+high-quality still capture for final metering decisions to
+be made,<wbr/> and for firing pre-capture flash pulses to estimate
+scene brightness and required final capture flash power,<wbr/> when
+the flash is enabled.<wbr/></p>
+<p>Normally,<wbr/> this entry should be set to START for only a
+single request,<wbr/> and the application should wait until the
+sequence completes before starting a new one.<wbr/></p>
+<p>When a precapture metering sequence is finished,<wbr/> the camera device
+may lock the auto-exposure routine internally to be able to accurately expose the
+subsequent still capture image (<code><a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE</code>).<wbr/>
+For this case,<wbr/> the AE may not resume normal scan if no subsequent still capture is
+submitted.<wbr/> To ensure that the AE routine restarts normal scan,<wbr/> the application should
+submit a request with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == true</code>,<wbr/> followed by a request
+with <code><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> == false</code>,<wbr/> if the application decides not to submit a
+still capture request after the precapture sequence completes.<wbr/> Alternatively,<wbr/> for
+API level 23 or newer devices,<wbr/> the CANCEL can be used to unlock the camera device
+internally locked AE if the application doesn't submit a still capture request after
+the AE precapture trigger.<wbr/> Note that,<wbr/> the CANCEL was added in API level 23,<wbr/> and must not
+be used in devices that have earlier API levels.<wbr/></p>
+<p>The exact effect of auto-exposure (AE) precapture trigger
+depends on the current AE mode and state; see
+<a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> for AE precapture state transition
+details.<wbr/></p>
+<p>On LEGACY-level devices,<wbr/> the precapture trigger is not supported;
+capturing a high-resolution JPEG image will automatically trigger a
+precapture sequence before the high-resolution capture,<wbr/> including
+potentially firing a pre-capture flash.<wbr/></p>
+<p>Using the precapture trigger and the auto-focus trigger <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>
+simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
+the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
+focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
+trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
+changes to <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> indicating the start of the precapture sequence,<wbr/> for
+example.<wbr/></p>
+<p>If both the precapture and the auto-focus trigger are activated on the same request,<wbr/> then
+the camera device will complete them in the optimal order for that device.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must support triggering the AE precapture trigger while an AF trigger is active
+(and vice versa),<wbr/> or at the same time as the AF trigger.<wbr/>  It is acceptable for the HAL to
+treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
+AE trigger.<wbr/>  Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
+to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.aeState">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>ae<wbr/>State
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">INACTIVE</span>
+                    <span class="entry_type_enum_notes"><p>AE is off or recently reset.<wbr/></p>
+<p>When a camera device is opened,<wbr/> it starts in
+this state.<wbr/> This is a transient state,<wbr/> the camera device may skip reporting
+this state in capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SEARCHING</span>
+                    <span class="entry_type_enum_notes"><p>AE doesn't yet have a good set of control values
+for the current scene.<wbr/></p>
+<p>This is a transient state,<wbr/> the camera device may skip
+reporting this state in capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONVERGED</span>
+                    <span class="entry_type_enum_notes"><p>AE has a good set of control values for the
+current scene.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">LOCKED</span>
+                    <span class="entry_type_enum_notes"><p>AE has been locked.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FLASH_REQUIRED</span>
+                    <span class="entry_type_enum_notes"><p>AE has a good set of control values,<wbr/> but flash
+needs to be fired for good quality still
+capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PRECAPTURE</span>
+                    <span class="entry_type_enum_notes"><p>AE has been asked to do a precapture sequence
+and is currently executing it.<wbr/></p>
+<p>Precapture can be triggered through setting
+<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> to START.<wbr/> Currently
+active and completed (if it causes camera device internal AE lock) precapture
+metering sequence can be canceled through setting
+<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> to CANCEL.<wbr/></p>
+<p>Once PRECAPTURE completes,<wbr/> AE will transition to CONVERGED
+or FLASH_<wbr/>REQUIRED as appropriate.<wbr/> This is a transient
+state,<wbr/> the camera device may skip reporting this state in
+capture result.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Current state of the auto-exposure (AE) algorithm.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Switching between or enabling AE modes (<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>) always
+resets the AE state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
+or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
+the algorithm states to INACTIVE.<wbr/></p>
+<p>The camera device can do several state transitions between two results,<wbr/> if it is
+allowed by the state transition table.<wbr/> For example: INACTIVE may never actually be
+seen in a result.<wbr/></p>
+<p>The state in the result is the state for this image (in sync with this image): if
+AE state becomes CONVERGED,<wbr/> then the image data associated with this result should
+be good to use.<wbr/></p>
+<p>Below are state transition tables for different AE modes.<wbr/></p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center"></td>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device auto exposure algorithm is disabled</td>
+</tr>
+</tbody>
+</table>
+<p>When <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is AE_<wbr/>MODE_<wbr/>ON_<wbr/>*:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device initiates AE scan</td>
+<td align="center">SEARCHING</td>
+<td align="center">Values changing</td>
+</tr>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Values locked</td>
+</tr>
+<tr>
+<td align="center">SEARCHING</td>
+<td align="center">Camera device finishes AE scan</td>
+<td align="center">CONVERGED</td>
+<td align="center">Good values,<wbr/> not changing</td>
+</tr>
+<tr>
+<td align="center">SEARCHING</td>
+<td align="center">Camera device finishes AE scan</td>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center">Converged but too dark w/<wbr/>o flash</td>
+</tr>
+<tr>
+<td align="center">SEARCHING</td>
+<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Values locked</td>
+</tr>
+<tr>
+<td align="center">CONVERGED</td>
+<td align="center">Camera device initiates AE scan</td>
+<td align="center">SEARCHING</td>
+<td align="center">Values changing</td>
+</tr>
+<tr>
+<td align="center">CONVERGED</td>
+<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Values locked</td>
+</tr>
+<tr>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center">Camera device initiates AE scan</td>
+<td align="center">SEARCHING</td>
+<td align="center">Values changing</td>
+</tr>
+<tr>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Values locked</td>
+</tr>
+<tr>
+<td align="center">LOCKED</td>
+<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
+<td align="center">SEARCHING</td>
+<td align="center">Values not good after unlock</td>
+</tr>
+<tr>
+<td align="center">LOCKED</td>
+<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
+<td align="center">CONVERGED</td>
+<td align="center">Values good after unlock</td>
+</tr>
+<tr>
+<td align="center">LOCKED</td>
+<td align="center"><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center">Exposure good,<wbr/> but too dark</td>
+</tr>
+<tr>
+<td align="center">PRECAPTURE</td>
+<td align="center">Sequence done.<wbr/> <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is OFF</td>
+<td align="center">CONVERGED</td>
+<td align="center">Ready for high-quality capture</td>
+</tr>
+<tr>
+<td align="center">PRECAPTURE</td>
+<td align="center">Sequence done.<wbr/> <a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Ready for high-quality capture</td>
+</tr>
+<tr>
+<td align="center">LOCKED</td>
+<td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
+<td align="center">LOCKED</td>
+<td align="center">Precapture trigger is ignored when AE is already locked</td>
+</tr>
+<tr>
+<td align="center">LOCKED</td>
+<td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
+<td align="center">LOCKED</td>
+<td align="center">Precapture trigger is ignored when AE is already locked</td>
+</tr>
+<tr>
+<td align="center">Any state (excluding LOCKED)</td>
+<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is START</td>
+<td align="center">PRECAPTURE</td>
+<td align="center">Start AE precapture metering sequence</td>
+</tr>
+<tr>
+<td align="center">Any state (excluding LOCKED)</td>
+<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Currently active precapture metering sequence is canceled</td>
+</tr>
+</tbody>
+</table>
+<p>For the above table,<wbr/> the camera device may skip reporting any state changes that happen
+without application intervention (i.<wbr/>e.<wbr/> mode switch,<wbr/> trigger,<wbr/> locking).<wbr/> Any state that
+can be skipped in that manner is called a transient state.<wbr/></p>
+<p>For example,<wbr/> for above AE modes (AE_<wbr/>MODE_<wbr/>ON_<wbr/>*),<wbr/> in addition to the state transitions
+listed in above table,<wbr/> it is also legal for the camera device to skip one or more
+transient states between two results.<wbr/> See below table for examples:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device finished AE scan</td>
+<td align="center">CONVERGED</td>
+<td align="center">Values are already good,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+<tr>
+<td align="center">Any state (excluding LOCKED)</td>
+<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is START,<wbr/> sequence done</td>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center">Converged but too dark w/<wbr/>o flash after a precapture sequence,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+<tr>
+<td align="center">Any state (excluding LOCKED)</td>
+<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is START,<wbr/> sequence done</td>
+<td align="center">CONVERGED</td>
+<td align="center">Converged after a precapture sequence,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+<tr>
+<td align="center">Any state (excluding LOCKED)</td>
+<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is CANCEL,<wbr/> converged</td>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center">Converged but too dark w/<wbr/>o flash after a precapture sequence is canceled,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+<tr>
+<td align="center">Any state (excluding LOCKED)</td>
+<td align="center"><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a> is CANCEL,<wbr/> converged</td>
+<td align="center">CONVERGED</td>
+<td align="center">Converged after a precapture sequenceis canceled,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+<tr>
+<td align="center">CONVERGED</td>
+<td align="center">Camera device finished AE scan</td>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center">Converged but too dark w/<wbr/>o flash after a new scan,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+<tr>
+<td align="center">FLASH_<wbr/>REQUIRED</td>
+<td align="center">Camera device finished AE scan</td>
+<td align="center">CONVERGED</td>
+<td align="center">Converged after a new scan,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+</tbody>
+</table>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.afMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>af<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The auto-focus routine does not control the lens;
+<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> is controlled by the
+application.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>Basic automatic focus mode.<wbr/></p>
+<p>In this mode,<wbr/> the lens does not move unless
+the autofocus trigger action is called.<wbr/> When that trigger
+is activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
+the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/></p>
+<p>Always supported if lens is not fixed focus.<wbr/></p>
+<p>Use <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> to determine if lens
+is fixed-focus.<wbr/></p>
+<p>Triggering AF_<wbr/>CANCEL resets the lens position to default,<wbr/>
+and sets the AF state to INACTIVE.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MACRO</span>
+                    <span class="entry_type_enum_notes"><p>Close-up focusing mode.<wbr/></p>
+<p>In this mode,<wbr/> the lens does not move unless the
+autofocus trigger action is called.<wbr/> When that trigger is
+activated,<wbr/> AF will transition to ACTIVE_<wbr/>SCAN,<wbr/> then to
+the outcome of the scan (FOCUSED or NOT_<wbr/>FOCUSED).<wbr/> This
+mode is optimized for focusing on objects very close to
+the camera.<wbr/></p>
+<p>When that trigger is activated,<wbr/> AF will transition to
+ACTIVE_<wbr/>SCAN,<wbr/> then to the outcome of the scan (FOCUSED or
+NOT_<wbr/>FOCUSED).<wbr/> Triggering cancel AF resets the lens
+position to default,<wbr/> and sets the AF state to
+INACTIVE.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONTINUOUS_VIDEO</span>
+                    <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
+position continually to attempt to provide a
+constantly-in-focus image stream.<wbr/></p>
+<p>The focusing behavior should be suitable for good quality
+video recording; typically this means slower focus
+movement and no overshoots.<wbr/> When the AF trigger is not
+involved,<wbr/> the AF algorithm should start in INACTIVE state,<wbr/>
+and then transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED
+states as appropriate.<wbr/> When the AF trigger is activated,<wbr/>
+the algorithm should immediately transition into
+AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
+lens position until a cancel AF trigger is received.<wbr/></p>
+<p>Once cancel is received,<wbr/> the algorithm should transition
+back to INACTIVE and resume passive scan.<wbr/> Note that this
+behavior is not identical to CONTINUOUS_<wbr/>PICTURE,<wbr/> since an
+ongoing PASSIVE_<wbr/>SCAN must immediately be
+canceled.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONTINUOUS_PICTURE</span>
+                    <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
+position continually to attempt to provide a
+constantly-in-focus image stream.<wbr/></p>
+<p>The focusing behavior should be suitable for still image
+capture; typically this means focusing as fast as
+possible.<wbr/> When the AF trigger is not involved,<wbr/> the AF
+algorithm should start in INACTIVE state,<wbr/> and then
+transition into PASSIVE_<wbr/>SCAN and PASSIVE_<wbr/>FOCUSED states as
+appropriate as it attempts to maintain focus.<wbr/> When the AF
+trigger is activated,<wbr/> the algorithm should finish its
+PASSIVE_<wbr/>SCAN if active,<wbr/> and then transition into
+AF_<wbr/>FOCUSED or AF_<wbr/>NOT_<wbr/>FOCUSED as appropriate,<wbr/> and lock the
+lens position until a cancel AF trigger is received.<wbr/></p>
+<p>When the AF cancel trigger is activated,<wbr/> the algorithm
+should transition back to INACTIVE and then act as if it
+has just been started.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">EDOF</span>
+                    <span class="entry_type_enum_notes"><p>Extended depth of field (digital focus) mode.<wbr/></p>
+<p>The camera device will produce images with an extended
+depth of field automatically; no special focusing
+operations need to be done before taking a picture.<wbr/></p>
+<p>AF triggers are ignored,<wbr/> and the AF state will always be
+INACTIVE.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-focus (AF) is currently enabled,<wbr/> and what
+mode it is set to.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.afAvailableModes">android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> = AUTO and the lens is not fixed focus
+(i.<wbr/>e.<wbr/> <code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> &gt; 0</code>).<wbr/> Also note that
+when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/> the behavior of AF is device
+dependent.<wbr/> It is recommended to lock AF by using <a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> before
+setting <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> to OFF,<wbr/> or set AF mode to OFF when AE is OFF.<wbr/></p>
+<p>If the lens is controlled by the camera device auto-focus algorithm,<wbr/>
+the camera device will report the current AF status in <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>
+in result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When afMode is AUTO or MACRO,<wbr/> the lens must not move until an AF trigger is sent in a
+request (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> <code>==</code> START).<wbr/> After an AF trigger,<wbr/> the afState will end
+up with either FOCUSED_<wbr/>LOCKED or NOT_<wbr/>FOCUSED_<wbr/>LOCKED state (see
+<a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> which indicates that the lens is
+locked and will not move.<wbr/> If camera movement (e.<wbr/>g.<wbr/> tilting camera) causes the lens to move
+after the lens is locked,<wbr/> the HAL must compensate this movement appropriately such that
+the same focal plane remains in focus.<wbr/></p>
+<p>When afMode is one of the continuous auto focus modes,<wbr/> the HAL is free to start a AF
+scan whenever it's not locked.<wbr/> When the lens is locked after an AF trigger
+(see <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for detailed state transitions),<wbr/> the HAL should maintain the
+same lock behavior as above.<wbr/></p>
+<p>When afMode is OFF,<wbr/> the application controls focus manually.<wbr/> The accuracy of the
+focus distance control depends on the <a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a>.<wbr/>
+However,<wbr/> the lens must not move regardless of the camera movement for any focus distance
+manual control.<wbr/></p>
+<p>To put this in concrete terms,<wbr/> if the camera has lens elements which may move based on
+camera orientation or motion (e.<wbr/>g.<wbr/> due to gravity),<wbr/> then the HAL must drive the lens to
+remain in a fixed position invariant to the camera's orientation or motion,<wbr/> for example,<wbr/>
+by using accelerometer measurements in the lens control logic.<wbr/> This is a typical issue
+that will arise on camera modules with open-loop VCMs.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.afRegions">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>af<wbr/>Regions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5 x area_count
+                </span>
+              <span class="entry_type_visibility"> [public as meteringRectangle]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of metering areas to use for auto-focus.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+              <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not available if <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a> is 0.<wbr/>
+Otherwise will always be present.<wbr/></p>
+<p>The maximum number of focus areas supported by the device is determined by the value
+of <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a>.<wbr/></p>
+<p>The coordinate system is based on the active pixel array,<wbr/>
+with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
+(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
+bottom-right pixel in the active pixel array.<wbr/></p>
+<p>The weight must be within <code>[0,<wbr/> 1000]</code>,<wbr/> and represents a weight
+for every pixel in the area.<wbr/> This means that a large metering area
+with the same weight as a smaller area will have more effect in
+the metering result.<wbr/> Metering areas can partially overlap and the
+camera device will add the weights in the overlap region.<wbr/></p>
+<p>The weights are relative to weights of other metering regions,<wbr/> so if only one region
+is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0 weight is
+ignored.<wbr/></p>
+<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
+camera device.<wbr/></p>
+<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
+capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
+region and output only the intersection rectangle as the metering region in the result
+metadata.<wbr/> If the region is entirely outside the crop region,<wbr/> it will be ignored and
+not reported in the result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL level representation of MeteringRectangle[] is a
+int[5 * area_<wbr/>count].<wbr/>
+Every five elements represent a metering region of
+(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
+The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
+exclusive on xmax and ymax.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.afTrigger">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>af<wbr/>Trigger
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_notes"><p>Autofocus will trigger now.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_notes"><p>Autofocus will return to its initial
+state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device will trigger autofocus for this request.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is normally set to IDLE,<wbr/> or is not
+included at all in the request settings.<wbr/></p>
+<p>When included and set to START,<wbr/> the camera device will trigger the
+autofocus algorithm.<wbr/> If autofocus is disabled,<wbr/> this trigger has no effect.<wbr/></p>
+<p>When set to CANCEL,<wbr/> the camera device will cancel any active trigger,<wbr/>
+and return to its initial AF state.<wbr/></p>
+<p>Generally,<wbr/> applications should set this entry to START or CANCEL for only a
+single capture,<wbr/> and then return it to IDLE (or not set at all).<wbr/> Specifying
+START for multiple captures in a row means restarting the AF operation over
+and over again.<wbr/></p>
+<p>See <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a> for what the trigger means for each AF mode.<wbr/></p>
+<p>Using the autofocus trigger and the precapture trigger <a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>
+simultaneously is allowed.<wbr/> However,<wbr/> since these triggers often require cooperation between
+the auto-focus and auto-exposure routines (for example,<wbr/> the may need to be enabled for a
+focus sweep),<wbr/> the camera device may delay acting on a later trigger until the previous
+trigger has been fully handled.<wbr/> This may lead to longer intervals between the trigger and
+changes to <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr/>af<wbr/>State</a>,<wbr/> for example.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must support triggering the AF trigger while an AE precapture trigger is active
+(and vice versa),<wbr/> or at the same time as the AE trigger.<wbr/>  It is acceptable for the HAL to
+treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
+AE trigger.<wbr/>  Or the HAL may choose to optimize the case with both triggers fired at once,<wbr/>
+to minimize the latency for converging both focus and exposure/<wbr/>flash usage.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.afState">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>af<wbr/>State
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">INACTIVE</span>
+                    <span class="entry_type_enum_notes"><p>AF is off or has not yet tried to scan/<wbr/>been asked
+to scan.<wbr/></p>
+<p>When a camera device is opened,<wbr/> it starts in this
+state.<wbr/> This is a transient state,<wbr/> the camera device may
+skip reporting this state in capture
+result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PASSIVE_SCAN</span>
+                    <span class="entry_type_enum_notes"><p>AF is currently performing an AF scan initiated the
+camera device in a continuous autofocus mode.<wbr/></p>
+<p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient
+state,<wbr/> the camera device may skip reporting this state in
+capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PASSIVE_FOCUSED</span>
+                    <span class="entry_type_enum_notes"><p>AF currently believes it is in focus,<wbr/> but may
+restart scanning at any time.<wbr/></p>
+<p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient
+state,<wbr/> the camera device may skip reporting this state in
+capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ACTIVE_SCAN</span>
+                    <span class="entry_type_enum_notes"><p>AF is performing an AF scan because it was
+triggered by AF trigger.<wbr/></p>
+<p>Only used by AUTO or MACRO AF modes.<wbr/> This is a transient
+state,<wbr/> the camera device may skip reporting this state in
+capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FOCUSED_LOCKED</span>
+                    <span class="entry_type_enum_notes"><p>AF believes it is focused correctly and has locked
+focus.<wbr/></p>
+<p>This state is reached only after an explicit START AF trigger has been
+sent (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>),<wbr/> when good focus has been obtained.<wbr/></p>
+<p>The lens will remain stationary until the AF mode (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>) is changed or
+a new AF trigger is sent to the camera device (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">NOT_FOCUSED_LOCKED</span>
+                    <span class="entry_type_enum_notes"><p>AF has failed to focus successfully and has locked
+focus.<wbr/></p>
+<p>This state is reached only after an explicit START AF trigger has been
+sent (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>),<wbr/> when good focus cannot be obtained.<wbr/></p>
+<p>The lens will remain stationary until the AF mode (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>) is changed or
+a new AF trigger is sent to the camera device (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PASSIVE_UNFOCUSED</span>
+                    <span class="entry_type_enum_notes"><p>AF finished a passive scan without finding focus,<wbr/>
+and may restart scanning at any time.<wbr/></p>
+<p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient state,<wbr/> the camera
+device may skip reporting this state in capture result.<wbr/></p>
+<p>LEGACY camera devices do not support this state.<wbr/> When a passive
+scan has finished,<wbr/> it will always go to PASSIVE_<wbr/>FOCUSED.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Current state of auto-focus (AF) algorithm.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Switching between or enabling AF modes (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>) always
+resets the AF state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
+or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
+the algorithm states to INACTIVE.<wbr/></p>
+<p>The camera device can do several state transitions between two results,<wbr/> if it is
+allowed by the state transition table.<wbr/> For example: INACTIVE may never actually be
+seen in a result.<wbr/></p>
+<p>The state in the result is the state for this image (in sync with this image): if
+AF state becomes FOCUSED,<wbr/> then the image data associated with this result should
+be sharp.<wbr/></p>
+<p>Below are state transition tables for different AF modes.<wbr/></p>
+<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>OFF or AF_<wbr/>MODE_<wbr/>EDOF:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center"></td>
+<td align="center">INACTIVE</td>
+<td align="center">Never changes</td>
+</tr>
+</tbody>
+</table>
+<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>AUTO or AF_<wbr/>MODE_<wbr/>MACRO:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">ACTIVE_<wbr/>SCAN</td>
+<td align="center">Start AF sweep,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">ACTIVE_<wbr/>SCAN</td>
+<td align="center">AF sweep done</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Focused,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">ACTIVE_<wbr/>SCAN</td>
+<td align="center">AF sweep done</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Not focused,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">ACTIVE_<wbr/>SCAN</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Cancel/<wbr/>reset AF,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Cancel/<wbr/>reset AF</td>
+</tr>
+<tr>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">ACTIVE_<wbr/>SCAN</td>
+<td align="center">Start new sweep,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Cancel/<wbr/>reset AF</td>
+</tr>
+<tr>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">ACTIVE_<wbr/>SCAN</td>
+<td align="center">Start new sweep,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">Any state</td>
+<td align="center">Mode change</td>
+<td align="center">INACTIVE</td>
+<td align="center"></td>
+</tr>
+</tbody>
+</table>
+<p>For the above table,<wbr/> the camera device may skip reporting any state changes that happen
+without application intervention (i.<wbr/>e.<wbr/> mode switch,<wbr/> trigger,<wbr/> locking).<wbr/> Any state that
+can be skipped in that manner is called a transient state.<wbr/></p>
+<p>For example,<wbr/> for these AF modes (AF_<wbr/>MODE_<wbr/>AUTO and AF_<wbr/>MODE_<wbr/>MACRO),<wbr/> in addition to the
+state transitions listed in above table,<wbr/> it is also legal for the camera device to skip
+one or more transient states between two results.<wbr/> See below table for examples:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Focus is already good or good after a scan,<wbr/> lens is now locked.<wbr/></td>
+</tr>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Focus failed after a scan,<wbr/> lens is now locked.<wbr/></td>
+</tr>
+<tr>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Focus is already good or good after a scan,<wbr/> lens is now locked.<wbr/></td>
+</tr>
+<tr>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Focus is good after a scan,<wbr/> lens is not locked.<wbr/></td>
+</tr>
+</tbody>
+</table>
+<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>CONTINUOUS_<wbr/>VIDEO:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device initiates new scan</td>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Start AF scan,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF state query,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Camera device completes current scan</td>
+<td align="center">PASSIVE_<wbr/>FOCUSED</td>
+<td align="center">End AF scan,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Camera device fails current scan</td>
+<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
+<td align="center">End AF scan,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Immediate transition,<wbr/> if focus is good.<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Immediate transition,<wbr/> if focus is bad.<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Reset lens position,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>FOCUSED</td>
+<td align="center">Camera device initiates new scan</td>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Start AF scan,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
+<td align="center">Camera device initiates new scan</td>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Start AF scan,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>FOCUSED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Immediate transition,<wbr/> lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Immediate transition,<wbr/> lens now locked</td>
+</tr>
+<tr>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">No effect</td>
+</tr>
+<tr>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Restart AF scan</td>
+</tr>
+<tr>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">No effect</td>
+</tr>
+<tr>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Restart AF scan</td>
+</tr>
+</tbody>
+</table>
+<p>When <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> is AF_<wbr/>MODE_<wbr/>CONTINUOUS_<wbr/>PICTURE:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device initiates new scan</td>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Start AF scan,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF state query,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Camera device completes current scan</td>
+<td align="center">PASSIVE_<wbr/>FOCUSED</td>
+<td align="center">End AF scan,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Camera device fails current scan</td>
+<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
+<td align="center">End AF scan,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Eventual transition once the focus is good.<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Eventual transition if cannot find focus.<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Reset lens position,<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>FOCUSED</td>
+<td align="center">Camera device initiates new scan</td>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Start AF scan,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
+<td align="center">Camera device initiates new scan</td>
+<td align="center">PASSIVE_<wbr/>SCAN</td>
+<td align="center">Start AF scan,<wbr/> Lens now moving</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>FOCUSED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Immediate trans.<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">PASSIVE_<wbr/>UNFOCUSED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">Immediate trans.<wbr/> Lens now locked</td>
+</tr>
+<tr>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">No effect</td>
+</tr>
+<tr>
+<td align="center">FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Restart AF scan</td>
+</tr>
+<tr>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>TRIGGER</td>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">No effect</td>
+</tr>
+<tr>
+<td align="center">NOT_<wbr/>FOCUSED_<wbr/>LOCKED</td>
+<td align="center">AF_<wbr/>CANCEL</td>
+<td align="center">INACTIVE</td>
+<td align="center">Restart AF scan</td>
+</tr>
+</tbody>
+</table>
+<p>When switch between AF_<wbr/>MODE_<wbr/>CONTINUOUS_<wbr/>* (CAF modes) and AF_<wbr/>MODE_<wbr/>AUTO/<wbr/>AF_<wbr/>MODE_<wbr/>MACRO
+(AUTO modes),<wbr/> the initial INACTIVE or PASSIVE_<wbr/>SCAN states may be skipped by the
+camera device.<wbr/> When a trigger is included in a mode switch request,<wbr/> the trigger
+will be evaluated in the context of the new mode in the request.<wbr/>
+See below table for examples:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">any state</td>
+<td align="center">CAF--&gt;AUTO mode switch</td>
+<td align="center">INACTIVE</td>
+<td align="center">Mode switch without trigger,<wbr/> initial state must be INACTIVE</td>
+</tr>
+<tr>
+<td align="center">any state</td>
+<td align="center">CAF--&gt;AUTO mode switch with AF_<wbr/>TRIGGER</td>
+<td align="center">trigger-reachable states from INACTIVE</td>
+<td align="center">Mode switch with trigger,<wbr/> INACTIVE is skipped</td>
+</tr>
+<tr>
+<td align="center">any state</td>
+<td align="center">AUTO--&gt;CAF mode switch</td>
+<td align="center">passively reachable states from INACTIVE</td>
+<td align="center">Mode switch without trigger,<wbr/> passive transient state is skipped</td>
+</tr>
+</tbody>
+</table>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.afTriggerId">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>af<wbr/>Trigger<wbr/>Id
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The ID sent with the latest
+CAMERA2_<wbr/>TRIGGER_<wbr/>AUTOFOCUS call</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Must be 0 if no CAMERA2_<wbr/>TRIGGER_<wbr/>AUTOFOCUS trigger
+received yet by HAL.<wbr/> Always updated even if AF algorithm
+ignores the trigger</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.awbLock">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>awb<wbr/>Lock
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Auto-white balance lock is disabled; the AWB
+algorithm is free to update its parameters if in AUTO
+mode.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Auto-white balance lock is enabled; the AWB
+algorithm will not update its parameters while the lock
+is active.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-white balance (AWB) is currently locked to its
+latest calculated values.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to <code>true</code> (ON),<wbr/> the AWB algorithm is locked to its latest parameters,<wbr/>
+and will not change color balance settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
+<p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
+get locked do not necessarily correspond to the settings that were present in the
+latest capture result received from the camera device,<wbr/> since additional captures
+and AWB updates may have occurred even before the result was sent out.<wbr/> If an
+application is switching between automatic and manual control and wishes to eliminate
+any flicker during the switch,<wbr/> the following procedure is recommended:</p>
+<ol>
+<li>Starting in auto-AWB mode:</li>
+<li>Lock AWB</li>
+<li>Wait for the first result to be output that has the AWB locked</li>
+<li>Copy AWB settings from that result into a request,<wbr/> set the request to manual AWB</li>
+<li>Submit the capture request,<wbr/> proceed to run manual AWB as desired.<wbr/></li>
+</ol>
+<p>Note that AWB lock is only meaningful when
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> is in the AUTO mode; in other modes,<wbr/>
+AWB is already fixed to a specific setting.<wbr/></p>
+<p>Some LEGACY devices may not support ON; the value is then overridden to OFF.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.awbMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>awb<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled.<wbr/></p>
+<p>The application-selected color transform matrix
+(<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>) and gains
+(<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>) are used by the camera
+device for manual white balance control.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is active.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">INCANDESCENT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses incandescent light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant A.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FLUORESCENT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses fluorescent light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant F2.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">WARM_FLUORESCENT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses warm fluorescent light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant F4.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DAYLIGHT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses daylight light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>While the exact white balance transforms are up to the
+camera device,<wbr/> they will approximately match the CIE
+standard illuminant D65.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CLOUDY_DAYLIGHT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses cloudy daylight light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TWILIGHT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses twilight light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SHADE</span>
+                    <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
+the camera device uses shade light as the assumed scene
+illumination for white balance.<wbr/></p>
+<p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
+and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
+For devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability,<wbr/> the
+values used by the camera device for the transform and gains
+will be available in the capture result for this request.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether auto-white balance (AWB) is currently setting the color
+transform fields,<wbr/> and what its illumination target
+is.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.awbAvailableModes">android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is AUTO.<wbr/></p>
+<p>When set to the ON mode,<wbr/> the camera device's auto-white balance
+routine is enabled,<wbr/> overriding the application's selected
+<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/> Note that when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>
+is OFF,<wbr/> the behavior of AWB is device dependent.<wbr/> It is recommened to
+also set AWB mode to OFF or lock AWB by using <a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> before
+setting AE mode to OFF.<wbr/></p>
+<p>When set to the OFF mode,<wbr/> the camera device's auto-white balance
+routine is disabled.<wbr/> The application manually controls the white
+balance by <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a>
+and <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>.<wbr/></p>
+<p>When set to any other modes,<wbr/> the camera device's auto-white
+balance routine is disabled.<wbr/> The camera device uses each
+particular illumination target for white balance
+adjustment.<wbr/> The application's values for
+<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/>
+<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> and
+<a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> are ignored.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.awbRegions">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>awb<wbr/>Regions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5 x area_count
+                </span>
+              <span class="entry_type_visibility"> [public as meteringRectangle]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of metering areas to use for auto-white-balance illuminant
+estimation.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates within android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+              <p>Coordinates must be between <code>[(0,<wbr/>0),<wbr/> (width,<wbr/> height))</code> of
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Not available if <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a> is 0.<wbr/>
+Otherwise will always be present.<wbr/></p>
+<p>The maximum number of regions supported by the device is determined by the value
+of <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a>.<wbr/></p>
+<p>The coordinate system is based on the active pixel array,<wbr/>
+with (0,<wbr/>0) being the top-left pixel in the active pixel array,<wbr/> and
+(<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>width - 1,<wbr/>
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>height - 1) being the
+bottom-right pixel in the active pixel array.<wbr/></p>
+<p>The weight must range from 0 to 1000,<wbr/> and represents a weight
+for every pixel in the area.<wbr/> This means that a large metering area
+with the same weight as a smaller area will have more effect in
+the metering result.<wbr/> Metering areas can partially overlap and the
+camera device will add the weights in the overlap region.<wbr/></p>
+<p>The weights are relative to weights of other white balance metering regions,<wbr/> so if
+only one region is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with
+0 weight is ignored.<wbr/></p>
+<p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
+camera device.<wbr/></p>
+<p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
+capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
+region and output only the intersection rectangle as the metering region in the result
+metadata.<wbr/>  If the region is entirely outside the crop region,<wbr/> it will be ignored and
+not reported in the result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL level representation of MeteringRectangle[] is a
+int[5 * area_<wbr/>count].<wbr/>
+Every five elements represent a metering region of
+(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax,<wbr/> weight).<wbr/>
+The rectangle is defined to be inclusive on xmin and ymin,<wbr/> but
+exclusive on xmax and ymax.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.captureIntent">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>capture<wbr/>Intent
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">CUSTOM</span>
+                    <span class="entry_type_enum_notes"><p>The goal of this request doesn't fall into the other
+categories.<wbr/> The camera device will default to preview-like
+behavior.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PREVIEW</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a preview-like use case.<wbr/></p>
+<p>The precapture trigger may be used to start off a metering
+w/<wbr/>flash sequence.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STILL_CAPTURE</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a still capture-type
+use case.<wbr/></p>
+<p>If the flash unit is under automatic control,<wbr/> it may fire as needed.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">VIDEO_RECORD</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a video recording
+use case.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">VIDEO_SNAPSHOT</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a video snapshot (still
+image while recording video) use case.<wbr/></p>
+<p>The camera device should take the highest-quality image
+possible (given the other settings) without disrupting the
+frame rate of video recording.<wbr/>  </p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a ZSL usecase; the
+application will stream full-resolution images and
+reprocess one or several later for a final
+capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MANUAL</span>
+                    <span class="entry_type_enum_notes"><p>This request is for manual capture use case where
+the applications want to directly control the capture parameters.<wbr/></p>
+<p>For example,<wbr/> the application may wish to manually control
+<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> etc.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Information to the camera device 3A (auto-exposure,<wbr/>
+auto-focus,<wbr/> auto-white balance) routines about the purpose
+of this capture,<wbr/> to help the camera device to decide optimal 3A
+strategy.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control (except for MANUAL) is only effective if
+<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF</code> and any 3A routine is active.<wbr/></p>
+<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
+contains PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/> MANUAL will be supported if
+<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains MANUAL_<wbr/>SENSOR.<wbr/> Other intent values are
+always supported.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.awbState">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>awb<wbr/>State
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">INACTIVE</span>
+                    <span class="entry_type_enum_notes"><p>AWB is not in auto mode,<wbr/> or has not yet started metering.<wbr/></p>
+<p>When a camera device is opened,<wbr/> it starts in this
+state.<wbr/> This is a transient state,<wbr/> the camera device may
+skip reporting this state in capture
+result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SEARCHING</span>
+                    <span class="entry_type_enum_notes"><p>AWB doesn't yet have a good set of control
+values for the current scene.<wbr/></p>
+<p>This is a transient state,<wbr/> the camera device
+may skip reporting this state in capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONVERGED</span>
+                    <span class="entry_type_enum_notes"><p>AWB has a good set of control values for the
+current scene.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">LOCKED</span>
+                    <span class="entry_type_enum_notes"><p>AWB has been locked.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Current state of auto-white balance (AWB) algorithm.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Switching between or enabling AWB modes (<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>) always
+resets the AWB state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
+or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
+the algorithm states to INACTIVE.<wbr/></p>
+<p>The camera device can do several state transitions between two results,<wbr/> if it is
+allowed by the state transition table.<wbr/> So INACTIVE may never actually be seen in
+a result.<wbr/></p>
+<p>The state in the result is the state for this image (in sync with this image): if
+AWB state becomes CONVERGED,<wbr/> then the image data associated with this result should
+be good to use.<wbr/></p>
+<p>Below are state transition tables for different AWB modes.<wbr/></p>
+<p>When <code><a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> != AWB_<wbr/>MODE_<wbr/>AUTO</code>:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center"></td>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device auto white balance algorithm is disabled</td>
+</tr>
+</tbody>
+</table>
+<p>When <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> is AWB_<wbr/>MODE_<wbr/>AUTO:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device initiates AWB scan</td>
+<td align="center">SEARCHING</td>
+<td align="center">Values changing</td>
+</tr>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Values locked</td>
+</tr>
+<tr>
+<td align="center">SEARCHING</td>
+<td align="center">Camera device finishes AWB scan</td>
+<td align="center">CONVERGED</td>
+<td align="center">Good values,<wbr/> not changing</td>
+</tr>
+<tr>
+<td align="center">SEARCHING</td>
+<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Values locked</td>
+</tr>
+<tr>
+<td align="center">CONVERGED</td>
+<td align="center">Camera device initiates AWB scan</td>
+<td align="center">SEARCHING</td>
+<td align="center">Values changing</td>
+</tr>
+<tr>
+<td align="center">CONVERGED</td>
+<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is ON</td>
+<td align="center">LOCKED</td>
+<td align="center">Values locked</td>
+</tr>
+<tr>
+<td align="center">LOCKED</td>
+<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is OFF</td>
+<td align="center">SEARCHING</td>
+<td align="center">Values not good after unlock</td>
+</tr>
+</tbody>
+</table>
+<p>For the above table,<wbr/> the camera device may skip reporting any state changes that happen
+without application intervention (i.<wbr/>e.<wbr/> mode switch,<wbr/> trigger,<wbr/> locking).<wbr/> Any state that
+can be skipped in that manner is called a transient state.<wbr/></p>
+<p>For example,<wbr/> for this AWB mode (AWB_<wbr/>MODE_<wbr/>AUTO),<wbr/> in addition to the state transitions
+listed in above table,<wbr/> it is also legal for the camera device to skip one or more
+transient states between two results.<wbr/> See below table for examples:</p>
+<table>
+<thead>
+<tr>
+<th align="center">State</th>
+<th align="center">Transition Cause</th>
+<th align="center">New State</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">INACTIVE</td>
+<td align="center">Camera device finished AWB scan</td>
+<td align="center">CONVERGED</td>
+<td align="center">Values are already good,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+<tr>
+<td align="center">LOCKED</td>
+<td align="center"><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a> is OFF</td>
+<td align="center">CONVERGED</td>
+<td align="center">Values good after unlock,<wbr/> transient states are skipped by camera device.<wbr/></td>
+</tr>
+</tbody>
+</table>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.effectMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>effect<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No color effect will be applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MONO</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "monocolor" effect where the image is mapped into
+a single color.<wbr/></p>
+<p>This will typically be grayscale.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">NEGATIVE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "photo-negative" effect where the image's colors
+are inverted.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SOLARIZE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "solarisation" effect (Sabattier effect) where the
+image is wholly or partially reversed in
+tone.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SEPIA</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "sepia" effect where the image is mapped into warm
+gray,<wbr/> red,<wbr/> and brown tones.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">POSTERIZE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "posterization" effect where the image uses
+discrete regions of tone rather than a continuous
+gradient of tones.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">WHITEBOARD</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "whiteboard" effect where the image is typically displayed
+as regions of white,<wbr/> with black or grey details.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BLACKBOARD</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>A "blackboard" effect where the image is typically displayed
+as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AQUA</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>An "aqua" effect where a blue hue is added to the image.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A special color effect to apply.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableEffects">android.<wbr/>control.<wbr/>available<wbr/>Effects</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When this mode is set,<wbr/> a color effect will be applied
+to images produced by the camera device.<wbr/> The interpretation
+and implementation of these color effects is left to the
+implementor of the camera device,<wbr/> and should not be
+depended on to be consistent (or present) across all
+devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Full application control of pipeline.<wbr/></p>
+<p>All control by the device's metering and focusing (3A)
+routines is disabled,<wbr/> and no other settings in
+android.<wbr/>control.<wbr/>* have any effect,<wbr/> except that
+<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> may be used by the camera
+device to select post-processing values for processing
+blocks that do not allow for manual control,<wbr/> or are not
+exposed by the camera API.<wbr/></p>
+<p>However,<wbr/> the camera device's 3A routines may continue to
+collect statistics and update their internal state so that
+when control is switched to AUTO mode,<wbr/> good control values
+can be immediately applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_notes"><p>Use settings for each individual 3A routine.<wbr/></p>
+<p>Manual control of capture parameters is disabled.<wbr/> All
+controls in android.<wbr/>control.<wbr/>* besides sceneMode take
+effect.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">USE_SCENE_MODE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Use a specific scene mode.<wbr/></p>
+<p>Enabling this disables control.<wbr/>aeMode,<wbr/> control.<wbr/>awbMode and
+control.<wbr/>afMode controls; the camera device will ignore
+those settings while USE_<wbr/>SCENE_<wbr/>MODE is active (except for
+FACE_<wbr/>PRIORITY scene mode).<wbr/> Other control entries are still active.<wbr/>
+This setting can only be used if scene mode is supported (i.<wbr/>e.<wbr/>
+<a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>
+contain some modes other than DISABLED).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">OFF_KEEP_STATE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Same as OFF mode,<wbr/> except that this capture will not be
+used by camera device background auto-exposure,<wbr/> auto-white balance and
+auto-focus algorithms (3A) to update their statistics.<wbr/></p>
+<p>Specifically,<wbr/> the 3A routines are locked to the last
+values set from a request with AUTO,<wbr/> OFF,<wbr/> or
+USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> and any statistics or state updates
+collected from manual captures with OFF_<wbr/>KEEP_<wbr/>STATE will be
+discarded by the camera device.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Overall mode of 3A (auto-exposure,<wbr/> auto-white-balance,<wbr/> auto-focus) control
+routines.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableModes">android.<wbr/>control.<wbr/>available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is a top-level 3A control switch.<wbr/> When set to OFF,<wbr/> all 3A control
+by the camera device is disabled.<wbr/> The application must set the fields for
+capture parameters itself.<wbr/></p>
+<p>When set to AUTO,<wbr/> the individual algorithm controls in
+android.<wbr/>control.<wbr/>* are in effect,<wbr/> such as <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>.<wbr/></p>
+<p>When set to USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> the individual controls in
+android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device implements
+one of the scene mode settings (such as ACTION,<wbr/> SUNSET,<wbr/> or PARTY)
+as it wishes.<wbr/> The camera device scene mode 3A settings are provided by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
+<p>When set to OFF_<wbr/>KEEP_<wbr/>STATE,<wbr/> it is similar to OFF mode,<wbr/> the only difference
+is that this frame will not be used by camera device background 3A statistics
+update,<wbr/> as if this frame is never captured.<wbr/> This mode can be used in the scenario
+where the application doesn't want a 3A manual control capture to affect
+the subsequent auto 3A capture results.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.sceneMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>control.<wbr/>scene<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">DISABLED</span>
+                    <span class="entry_type_enum_value">0</span>
+                    <span class="entry_type_enum_notes"><p>Indicates that no scene modes are set for a given capture request.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FACE_PRIORITY</span>
+                    <span class="entry_type_enum_notes"><p>If face detection support exists,<wbr/> use face
+detection data for auto-focus,<wbr/> auto-white balance,<wbr/> and
+auto-exposure routines.<wbr/></p>
+<p>If face detection statistics are disabled
+(i.<wbr/>e.<wbr/> <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> is set to OFF),<wbr/>
+this should still operate correctly (but will not return
+face detection statistics to the framework).<wbr/></p>
+<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
+remain active when FACE_<wbr/>PRIORITY is set.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ACTION</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving objects.<wbr/></p>
+<p>Similar to SPORTS.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PORTRAIT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for still photos of people.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">LANDSCAPE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for photos of distant macroscopic objects.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">NIGHT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for low-light settings.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">NIGHT_PORTRAIT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for still photos of people in low-light
+settings.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">THEATRE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings where flash must
+remain off.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BEACH</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor beach settings.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SNOW</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor settings containing snow.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SUNSET</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for scenes of the setting sun.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STEADYPHOTO</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized to avoid blurry photos due to small amounts of
+device motion (for example: due to hand shake).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FIREWORKS</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for nighttime photos of fireworks.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SPORTS</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving people.<wbr/></p>
+<p>Similar to ACTION.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PARTY</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings with multiple moving
+people.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CANDLELIGHT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for dim settings where the main light source
+is a flame.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BARCODE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optimized for accurately capturing a photo of barcode
+for use by camera applications that wish to read the
+barcode value.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_SPEED_VIDEO</span>
+                    <span class="entry_type_enum_deprecated">[deprecated]</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>This is deprecated,<wbr/> please use <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
+and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>
+for high speed video recording.<wbr/></p>
+<p>Optimized for high speed video recording (frame rate &gt;=60fps) use case.<wbr/></p>
+<p>The supported high speed video sizes and fps ranges are specified in
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> To get desired
+output frame rates,<wbr/> the application is only allowed to select video size
+and fps range combinations listed in this static metadata.<wbr/> The fps range
+can be control via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
+<p>In this mode,<wbr/> the camera device will override aeMode,<wbr/> awbMode,<wbr/> and afMode to
+ON,<wbr/> ON,<wbr/> and CONTINUOUS_<wbr/>VIDEO,<wbr/> respectively.<wbr/> All post-processing block mode
+controls will be overridden to be FAST.<wbr/> Therefore,<wbr/> no manual control of capture
+and post-processing parameters is possible.<wbr/> All other controls operate the
+same as when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == AUTO.<wbr/> This means that all other
+android.<wbr/>control.<wbr/>* fields continue to work,<wbr/> such as</p>
+<ul>
+<li><a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a></li>
+<li><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a></li>
+<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
+<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
+<li><a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></li>
+<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a></li>
+<li><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a></li>
+</ul>
+<p>Outside of android.<wbr/>control.<wbr/>*,<wbr/> the following controls will work:</p>
+<ul>
+<li><a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> (automatic flash for still capture will not work since aeMode is ON)</li>
+<li><a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> (if it is supported)</li>
+<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
+<li><a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a></li>
+</ul>
+<p>For high speed recording use case,<wbr/> the actual maximum supported frame rate may
+be lower than what camera can output,<wbr/> depending on the destination Surfaces for
+the image data.<wbr/> For example,<wbr/> if the destination surface is from video encoder,<wbr/>
+the application need check if the video encoder is capable of supporting the
+high frame rate for a given video size,<wbr/> or it will end up with lower recording
+frame rate.<wbr/> If the destination surface is from preview window,<wbr/> the preview frame
+rate will be bounded by the screen refresh rate.<wbr/></p>
+<p>The camera device will only support up to 2 output high speed streams
+(processed non-stalling format defined in <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>)
+in this mode.<wbr/> This control will be effective only if all of below conditions are true:</p>
+<ul>
+<li>The application created no more than maxNumHighSpeedStreams processed non-stalling
+format output streams,<wbr/> where maxNumHighSpeedStreams is calculated as
+min(2,<wbr/> <a href="#static_android.request.maxNumOutputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams</a>[Processed (but not-stalling)]).<wbr/></li>
+<li>The stream sizes are selected from the sizes reported by
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/></li>
+<li>No processed non-stalling or raw streams are configured.<wbr/></li>
+</ul>
+<p>When above conditions are NOT satistied,<wbr/> the controls of this mode and
+<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> will be ignored by the camera device,<wbr/>
+the camera device will fall back to <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> <code>==</code> AUTO,<wbr/>
+and the returned capture result metadata will give the fps range choosen
+by the camera device.<wbr/></p>
+<p>Switching into or out of this mode may trigger some camera ISP/<wbr/>sensor
+reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommended that
+the application avoids unnecessary scene mode switch as much as possible.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HDR</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Turn on a device-specific high dynamic range (HDR) mode.<wbr/></p>
+<p>In this scene mode,<wbr/> the camera device captures images
+that keep a larger range of scene illumination levels
+visible in the final image.<wbr/> For example,<wbr/> when taking a
+picture of a object in front of a bright window,<wbr/> both
+the object and the scene through the window may be
+visible when using HDR mode,<wbr/> while in normal AUTO mode,<wbr/>
+one or the other may be poorly exposed.<wbr/> As a tradeoff,<wbr/>
+HDR mode generally takes much longer to capture a single
+image,<wbr/> has no user control,<wbr/> and may have other artifacts
+depending on the HDR method used.<wbr/></p>
+<p>Therefore,<wbr/> HDR captures operate at a much slower rate
+than regular captures.<wbr/></p>
+<p>In this mode,<wbr/> on LIMITED or FULL devices,<wbr/> when a request
+is made with a <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> of
+STILL_<wbr/>CAPTURE,<wbr/> the camera device will capture an image
+using a high dynamic range capture technique.<wbr/>  On LEGACY
+devices,<wbr/> captures that target a JPEG-format output will
+be captured with HDR,<wbr/> and the capture intent is not
+relevant.<wbr/></p>
+<p>The HDR capture may involve the device capturing a burst
+of images internally and combining them into one,<wbr/> or it
+may involve the device using specialized high dynamic
+range capture hardware.<wbr/> In all cases,<wbr/> a single image is
+produced in response to a capture request submitted
+while in HDR mode.<wbr/></p>
+<p>Since substantial post-processing is generally needed to
+produce an HDR image,<wbr/> only YUV and JPEG outputs are
+supported for LIMITED/<wbr/>FULL device HDR captures,<wbr/> and only
+JPEG outputs are supported for LEGACY HDR
+captures.<wbr/> Using a RAW output for HDR capture is not
+supported.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_hidden">[hidden]</span>
+                    <span class="entry_type_enum_notes"><p>Same as FACE_<wbr/>PRIORITY scene mode,<wbr/> except that the camera
+device will choose higher sensitivity values (<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>)
+under low light conditions.<wbr/></p>
+<p>The camera device may be tuned to expose the images in a reduced
+sensitivity range to produce the best quality images.<wbr/> For example,<wbr/>
+if the <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> gives range of [100,<wbr/> 1600],<wbr/>
+the camera device auto-exposure routine tuning process may limit the actual
+exposure sensitivity range to [100,<wbr/> 1200] to ensure that the noise level isn't
+exessive in order to preserve the image quality.<wbr/> Under this situation,<wbr/> the image under
+low light may be under-exposed when the sensor max exposure time (bounded by the
+<a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a> when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of the
+ON_<wbr/>* modes) and effective max sensitivity are reached.<wbr/> This scene mode allows the
+camera device auto-exposure routine to increase the sensitivity up to the max
+sensitivity specified by <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a> when the scene is too
+dark and the max exposure time is reached.<wbr/> The captured images may be noisier
+compared with the images captured in normal FACE_<wbr/>PRIORITY mode; therefore,<wbr/> it is
+recommended that the application only use this scene mode when it is capable of
+reducing the noise level of the captured images.<wbr/></p>
+<p>Unlike the other scene modes,<wbr/> <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
+remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Control for which scene mode is currently active.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Scene modes are custom camera modes optimized for a certain set of conditions and
+capture settings.<wbr/></p>
+<p>This is the mode that that is active when
+<code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code>.<wbr/> Aside from FACE_<wbr/>PRIORITY,<wbr/> these modes will
+disable <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>
+while in use.<wbr/></p>
+<p>The interpretation and implementation of these scene modes is left
+to the implementor of the camera device.<wbr/> Their behavior will not be
+consistent across all devices,<wbr/> and any given device may only implement
+a subset of these modes.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL implementations that include scene modes are expected to provide
+the per-scene settings to use for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
+<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> in
+<a href="#static_android.control.sceneModeOverrides">android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides</a>.<wbr/></p>
+<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/>
+the HAL must list supported video size and fps range in
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>
+1280x720,<wbr/> if the HAL has two different sensor configurations for normal streaming
+mode and high speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture
+requests,<wbr/> the HAL may have to switch between different sensor modes.<wbr/>
+This mode is deprecated in HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a> and CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO
+capbility defined in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.videoStabilizationMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Video stabilization is disabled.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Video stabilization is enabled.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether video stabilization is
+active.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Video stabilization automatically warps images from
+the camera in order to stabilize motion between consecutive frames.<wbr/></p>
+<p>If enabled,<wbr/> video stabilization can modify the
+<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> to keep the video stream stabilized.<wbr/></p>
+<p>Switching between different video stabilization modes may take several
+frames to initialize,<wbr/> the camera device will report the current mode
+in capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/>
+the video stabilization modes in the first several capture results may
+still be "OFF",<wbr/> and it will become "ON" when the initialization is
+done.<wbr/></p>
+<p>In addition,<wbr/> not all recording sizes or frame rates may be supported for
+stabilization by a device that reports stabilization support.<wbr/> It is guaranteed
+that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+the recording resolution is less than or equal to 1920 x 1080 (width less than
+or equal to 1920,<wbr/> height less than or equal to 1080),<wbr/> and the recording
+frame rate is less than or equal to 30fps.<wbr/>  At other sizes,<wbr/> the CaptureResult
+<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a> field will return
+OFF if the recording output is not stabilized,<wbr/> or if there are no output
+Surface types that can be stabilized.<wbr/></p>
+<p>If a camera device supports both this mode and OIS
+(<a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may
+produce undesirable interaction,<wbr/> so it is recommended not to enable
+both at the same time.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_demosaic" class="section">demosaic</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.demosaic.mode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>demosaic.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Minimal or no slowdown of frame rate compared to
+Bayer RAW output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Improved processing quality but the frame rate might be slowed down
+relative to raw output.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Controls the quality of the demosaicing
+processing.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_edge" class="section">edge</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.edge.mode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>edge.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No edge enhancement is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Apply edge enhancement at a quality level that does not slow down frame rate
+relative to sensor output.<wbr/> It may be the same as OFF if edge enhancement will
+slow down frame rate relative to sensor.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Apply high-quality edge enhancement,<wbr/> at a cost of possibly reduced output frame rate.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different levels for different output streams,<wbr/>
+based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have
+edge enhancement applied,<wbr/> while higher-resolution streams have no edge enhancement
+applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned so that
+frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST (since it
+is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
+<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
+high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
+produced via reprocessing when a user takes a picture.<wbr/>  For such a use case,<wbr/> the
+high-resolution buffers must not have edge enhancement applied to maximize efficiency of
+preview and to avoid double-applying enhancement when reprocessed,<wbr/> while low-resolution
+buffers (used for recording or preview,<wbr/> generally) need edge enhancement applied for
+reasonable preview quality.<wbr/></p>
+<p>This mode is guaranteed to be supported by devices that support either the
+YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
+(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
+be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operation mode for edge
+enhancement.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Edge enhancement improves sharpness and details in the captured image.<wbr/> OFF means
+no enhancement will be applied by the camera device.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined enhancement
+will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the
+camera device will use the highest-quality enhancement algorithms,<wbr/>
+even if it slows down capture rate.<wbr/> FAST means the camera device will
+not slow down capture rate when applying edge enhancement.<wbr/> FAST may be the same as OFF if
+edge enhancement will slow down capture rate.<wbr/> Every output stream will have a similar
+amount of enhancement applied.<wbr/></p>
+<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
+buffer of high-resolution images during preview and reprocess image(s) from that buffer
+into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
+edge enhancement to low-resolution streams (below maximum recording resolution) to
+maximize preview quality,<wbr/> but does not apply edge enhancement to high-resolution streams,<wbr/>
+since those will be reprocessed later if necessary.<wbr/></p>
+<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera
+device will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV-domain edge enhancement,<wbr/> respectively.<wbr/>
+The camera device may adjust its internal edge enhancement parameters for best
+image quality based on the <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a>,<wbr/> if it is set.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
+adjust the internal edge enhancement reduction parameters appropriately to get the best
+quality images.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.edge.strength">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>edge.<wbr/>strength
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Control the amount of edge enhancement
+applied to the images</p>
+            </td>
+
+            <td class="entry_units">
+              1-10; 10 is maximum sharpening
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.edge.availableEdgeModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of edge enhancement modes for <a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a> that are supported by this camera
+device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Full-capability camera devices must always support OFF; camera devices that support
+YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING will list ZERO_<wbr/>SHUTTER_<wbr/>LAG; all devices will
+list FAST.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if edge enhancement control is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.edge.mode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>edge.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No edge enhancement is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Apply edge enhancement at a quality level that does not slow down frame rate
+relative to sensor output.<wbr/> It may be the same as OFF if edge enhancement will
+slow down frame rate relative to sensor.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Apply high-quality edge enhancement,<wbr/> at a cost of possibly reduced output frame rate.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different levels for different output streams,<wbr/>
+based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have
+edge enhancement applied,<wbr/> while higher-resolution streams have no edge enhancement
+applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned so that
+frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST (since it
+is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
+<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
+high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
+produced via reprocessing when a user takes a picture.<wbr/>  For such a use case,<wbr/> the
+high-resolution buffers must not have edge enhancement applied to maximize efficiency of
+preview and to avoid double-applying enhancement when reprocessed,<wbr/> while low-resolution
+buffers (used for recording or preview,<wbr/> generally) need edge enhancement applied for
+reasonable preview quality.<wbr/></p>
+<p>This mode is guaranteed to be supported by devices that support either the
+YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
+(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
+be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operation mode for edge
+enhancement.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Edge enhancement improves sharpness and details in the captured image.<wbr/> OFF means
+no enhancement will be applied by the camera device.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined enhancement
+will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the
+camera device will use the highest-quality enhancement algorithms,<wbr/>
+even if it slows down capture rate.<wbr/> FAST means the camera device will
+not slow down capture rate when applying edge enhancement.<wbr/> FAST may be the same as OFF if
+edge enhancement will slow down capture rate.<wbr/> Every output stream will have a similar
+amount of enhancement applied.<wbr/></p>
+<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
+buffer of high-resolution images during preview and reprocess image(s) from that buffer
+into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
+edge enhancement to low-resolution streams (below maximum recording resolution) to
+maximize preview quality,<wbr/> but does not apply edge enhancement to high-resolution streams,<wbr/>
+since those will be reprocessed later if necessary.<wbr/></p>
+<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera
+device will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV-domain edge enhancement,<wbr/> respectively.<wbr/>
+The camera device may adjust its internal edge enhancement parameters for best
+image quality based on the <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a>,<wbr/> if it is set.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
+adjust the internal edge enhancement reduction parameters appropriately to get the best
+quality images.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_flash" class="section">flash</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.flash.firingPower">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>firing<wbr/>Power
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Power for flash firing/<wbr/>torch</p>
+            </td>
+
+            <td class="entry_units">
+              10 is max power; 0 is no flash.<wbr/> Linear
+            </td>
+
+            <td class="entry_range">
+              <p>0 - 10</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Power for snapshot may use a different scale than
+for torch mode.<wbr/> Only one entry for torch mode will be
+used</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.flash.firingTime">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>firing<wbr/>Time
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Firing time of flash relative to start of
+exposure</p>
+            </td>
+
+            <td class="entry_units">
+              nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>0-(exposure time-flash duration)</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Clamped to (0,<wbr/> exposure time - flash
+duration).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.flash.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Do not fire the flash for this capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SINGLE</span>
+                    <span class="entry_type_enum_notes"><p>If the flash is available and charged,<wbr/> fire flash
+for this capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TORCH</span>
+                    <span class="entry_type_enum_notes"><p>Transition flash to continuously on.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired mode for for the camera device's flash control.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control is only effective when flash unit is available
+(<code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == true</code>).<wbr/></p>
+<p>When this control is used,<wbr/> the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> must be set to ON or OFF.<wbr/>
+Otherwise,<wbr/> the camera device auto-exposure related flash control (ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/>
+ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> or ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE) will override this control.<wbr/></p>
+<p>When set to OFF,<wbr/> the camera device will not fire flash for this capture.<wbr/></p>
+<p>When set to SINGLE,<wbr/> the camera device will fire flash regardless of the camera
+device's auto-exposure routine's result.<wbr/> When used in still capture case,<wbr/> this
+control should be used along with auto-exposure (AE) precapture metering sequence
+(<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>),<wbr/> otherwise,<wbr/> the image may be incorrectly exposed.<wbr/></p>
+<p>When set to TORCH,<wbr/> the flash will be on continuously.<wbr/> This mode can be used
+for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr/> or video recording.<wbr/></p>
+<p>The flash status will be reported by <a href="#dynamic_android.flash.state">android.<wbr/>flash.<wbr/>state</a> in the capture result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+                
+            
+
+                
+          <tr class="entry" id="static_android.flash.info.available">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>info.<wbr/>available
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FALSE</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TRUE</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether this camera device has a
+flash unit.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Will be <code>false</code> if no flash is available.<wbr/></p>
+<p>If there is no flash unit,<wbr/> none of the flash controls do
+anything.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.flash.info.chargeDuration">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>info.<wbr/>charge<wbr/>Duration
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Time taken before flash can fire
+again</p>
+            </td>
+
+            <td class="entry_units">
+              nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>0-1e9</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>1 second too long/<wbr/>too short for recharge? Should
+this be power-dependent?</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+        
+
+                
+          <tr class="entry" id="static_android.flash.colorTemperature">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>flash.<wbr/>color<wbr/>Temperature
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The x,<wbr/>y whitepoint of the
+flash</p>
+            </td>
+
+            <td class="entry_units">
+              pair of floats
+            </td>
+
+            <td class="entry_range">
+              <p>0-1 for both</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.flash.maxEnergy">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>flash.<wbr/>max<wbr/>Energy
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Max energy output of the flash for a full
+power single flash</p>
+            </td>
+
+            <td class="entry_units">
+              lumen-seconds
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.flash.firingPower">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>firing<wbr/>Power
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Power for flash firing/<wbr/>torch</p>
+            </td>
+
+            <td class="entry_units">
+              10 is max power; 0 is no flash.<wbr/> Linear
+            </td>
+
+            <td class="entry_range">
+              <p>0 - 10</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Power for snapshot may use a different scale than
+for torch mode.<wbr/> Only one entry for torch mode will be
+used</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.flash.firingTime">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>firing<wbr/>Time
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Firing time of flash relative to start of
+exposure</p>
+            </td>
+
+            <td class="entry_units">
+              nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>0-(exposure time-flash duration)</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Clamped to (0,<wbr/> exposure time - flash
+duration).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.flash.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Do not fire the flash for this capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SINGLE</span>
+                    <span class="entry_type_enum_notes"><p>If the flash is available and charged,<wbr/> fire flash
+for this capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TORCH</span>
+                    <span class="entry_type_enum_notes"><p>Transition flash to continuously on.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired mode for for the camera device's flash control.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control is only effective when flash unit is available
+(<code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == true</code>).<wbr/></p>
+<p>When this control is used,<wbr/> the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> must be set to ON or OFF.<wbr/>
+Otherwise,<wbr/> the camera device auto-exposure related flash control (ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/>
+ON_<wbr/>ALWAYS_<wbr/>FLASH,<wbr/> or ON_<wbr/>AUTO_<wbr/>FLASH_<wbr/>REDEYE) will override this control.<wbr/></p>
+<p>When set to OFF,<wbr/> the camera device will not fire flash for this capture.<wbr/></p>
+<p>When set to SINGLE,<wbr/> the camera device will fire flash regardless of the camera
+device's auto-exposure routine's result.<wbr/> When used in still capture case,<wbr/> this
+control should be used along with auto-exposure (AE) precapture metering sequence
+(<a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a>),<wbr/> otherwise,<wbr/> the image may be incorrectly exposed.<wbr/></p>
+<p>When set to TORCH,<wbr/> the flash will be on continuously.<wbr/> This mode can be used
+for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr/> or video recording.<wbr/></p>
+<p>The flash status will be reported by <a href="#dynamic_android.flash.state">android.<wbr/>flash.<wbr/>state</a> in the capture result metadata.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.flash.state">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>flash.<wbr/>state
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">UNAVAILABLE</span>
+                    <span class="entry_type_enum_notes"><p>No flash on camera.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CHARGING</span>
+                    <span class="entry_type_enum_notes"><p>Flash is charging and cannot be fired.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">READY</span>
+                    <span class="entry_type_enum_notes"><p>Flash is ready to fire.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FIRED</span>
+                    <span class="entry_type_enum_notes"><p>Flash fired for this capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PARTIAL</span>
+                    <span class="entry_type_enum_notes"><p>Flash partially illuminated this frame.<wbr/></p>
+<p>This is usually due to the next or previous frame having
+the flash fire,<wbr/> and the flash spilling into this capture
+due to hardware limitations.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Current state of the flash
+unit.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When the camera device doesn't have flash unit
+(i.<wbr/>e.<wbr/> <code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == false</code>),<wbr/> this state will always be UNAVAILABLE.<wbr/>
+Other states indicate the current flash status.<wbr/></p>
+<p>In certain conditions,<wbr/> this will be available on LEGACY devices:</p>
+<ul>
+<li>Flash-less cameras always return UNAVAILABLE.<wbr/></li>
+<li>Using <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>==</code> ON_<wbr/>ALWAYS_<wbr/>FLASH
+   will always return FIRED.<wbr/></li>
+<li>Using <a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> <code>==</code> TORCH
+   will always return FIRED.<wbr/></li>
+</ul>
+<p>In all other conditions the state will not be available on
+LEGACY devices (i.<wbr/>e.<wbr/> it will be <code>null</code>).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_hotPixel" class="section">hotPixel</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.hotPixel.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>hot<wbr/>Pixel.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No hot pixel correction is applied.<wbr/></p>
+<p>The frame rate must not be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Hot pixel correction is applied,<wbr/> without reducing frame
+rate relative to sensor raw output.<wbr/></p>
+<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>High-quality hot pixel correction is applied,<wbr/> at a cost
+of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
+<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operational mode for hot pixel correction.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.hotPixel.availableHotPixelModes">android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
+that do not accurately measure the incoming light (i.<wbr/>e.<wbr/> pixels that
+are stuck at an arbitrary value or are oversensitive).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.hotPixel.availableHotPixelModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of hot pixel correction modes for <a href="#controls_android.hotPixel.mode">android.<wbr/>hot<wbr/>Pixel.<wbr/>mode</a> that are supported by this
+camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.hotPixel.mode">android.<wbr/>hot<wbr/>Pixel.<wbr/>mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>FULL mode camera devices will always support FAST.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>To avoid performance issues,<wbr/> there will be significantly fewer hot
+pixels than actual pixels on the camera sensor.<wbr/>
+HAL must support both FAST and HIGH_<wbr/>QUALITY if hot pixel correction control is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.hotPixel.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>hot<wbr/>Pixel.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No hot pixel correction is applied.<wbr/></p>
+<p>The frame rate must not be reduced relative to sensor raw output
+for this option.<wbr/></p>
+<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Hot pixel correction is applied,<wbr/> without reducing frame
+rate relative to sensor raw output.<wbr/></p>
+<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>High-quality hot pixel correction is applied,<wbr/> at a cost
+of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
+<p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operational mode for hot pixel correction.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.hotPixel.availableHotPixelModes">android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
+that do not accurately measure the incoming light (i.<wbr/>e.<wbr/> pixels that
+are stuck at an arbitrary value or are oversensitive).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_jpeg" class="section">jpeg</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.jpeg.gpsLocation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Location
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public as location]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A location object to use when generating image GPS metadata.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Setting a location object in a request will include the GPS coordinates of the location
+into any JPEG images captured based on the request.<wbr/> These coordinates can then be
+viewed by anyone who receives the JPEG image.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.jpeg.gpsCoordinates">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Coordinates
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">double</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">latitude,<wbr/> longitude,<wbr/> altitude.<wbr/> First two in degrees,<wbr/> the third in meters</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>GPS coordinates to include in output JPEG
+EXIF.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>(-180 - 180],<wbr/> [-90,<wbr/>90],<wbr/> [-inf,<wbr/> inf]</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.jpeg.gpsProcessingMethod">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Processing<wbr/>Method
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [hidden as string]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>32 characters describing GPS algorithm to
+include in EXIF.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              UTF-8 null-terminated string
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.jpeg.gpsTimestamp">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Timestamp
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Time GPS fix was made to include in
+EXIF.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              UTC in seconds since January 1,<wbr/> 1970
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.jpeg.orientation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>orientation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The orientation for a JPEG image.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Degrees in multiples of 90
+            </td>
+
+            <td class="entry_range">
+              <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The clockwise rotation angle in degrees,<wbr/> relative to the orientation
+to the camera,<wbr/> that the JPEG picture needs to be rotated by,<wbr/> to be viewed
+upright.<wbr/></p>
+<p>Camera devices may either encode this value into the JPEG EXIF header,<wbr/> or
+rotate the image data to match this orientation.<wbr/> When the image data is rotated,<wbr/>
+the thumbnail data will also be rotated.<wbr/></p>
+<p>Note that this orientation is relative to the orientation of the camera sensor,<wbr/> given
+by <a href="#static_android.sensor.orientation">android.<wbr/>sensor.<wbr/>orientation</a>.<wbr/></p>
+<p>To translate from the device orientation given by the Android sensor APIs,<wbr/> the following
+sample code may be used:</p>
+<pre><code>private int getJpegOrientation(CameraCharacteristics c,<wbr/> int deviceOrientation) {
+    if (deviceOrientation == android.<wbr/>view.<wbr/>Orientation<wbr/>Event<wbr/>Listener.<wbr/>ORIENTATION_<wbr/>UNKNOWN) return 0;
+    int sensorOrientation = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>SENSOR_<wbr/>ORIENTATION);
+
+    //<wbr/> Round device orientation to a multiple of 90
+    deviceOrientation = (deviceOrientation + 45) /<wbr/> 90 * 90;
+
+    //<wbr/> Reverse device orientation for front-facing cameras
+    boolean facingFront = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING) == Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING_<wbr/>FRONT;
+    if (facingFront) deviceOrientation = -deviceOrientation;
+
+    //<wbr/> Calculate desired JPEG orientation relative to camera orientation to make
+    //<wbr/> the image upright relative to the device orientation
+    int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+
+    return jpegOrientation;
+}
+</code></pre>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.jpeg.quality">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>quality
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Compression quality of the final JPEG
+image.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>1-100; larger is higher quality</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>85-95 is typical usage range.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.jpeg.thumbnailQuality">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Quality
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Compression quality of JPEG
+thumbnail.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>1-100; larger is higher quality</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.jpeg.thumbnailSize">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as size]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Resolution of embedded JPEG thumbnail.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.jpeg.availableThumbnailSizes">android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to (0,<wbr/> 0) value,<wbr/> the JPEG EXIF will not contain thumbnail,<wbr/>
+but the captured JPEG will still be a valid image.<wbr/></p>
+<p>For best results,<wbr/> when issuing a request for a JPEG image,<wbr/> the thumbnail size selected
+should have the same aspect ratio as the main JPEG output.<wbr/></p>
+<p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+ratio,<wbr/> the camera device creates the thumbnail by cropping it from the primary image.<wbr/>
+For example,<wbr/> if the primary image has 4:3 aspect ratio,<wbr/> the thumbnail image has
+16:9 aspect ratio,<wbr/> the primary image will be cropped vertically (letterbox) to
+generate the thumbnail image.<wbr/> The thumbnail image will always have a smaller Field
+Of View (FOV) than the primary image when aspect ratios differ.<wbr/></p>
+<p>When an <a href="#controls_android.jpeg.orientation">android.<wbr/>jpeg.<wbr/>orientation</a> of non-zero degree is requested,<wbr/>
+the camera device will handle thumbnail rotation in one of the following ways:</p>
+<ul>
+<li>Set the <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
+  and keep jpeg and thumbnail image data unrotated.<wbr/></li>
+<li>Rotate the jpeg and thumbnail image data and not set
+  <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>.<wbr/> In this
+  case,<wbr/> LIMITED or FULL hardware level devices will report rotated thumnail size in
+  capture result,<wbr/> so the width and height will be interchanged if 90 or 270 degree
+  orientation is requested.<wbr/> LEGACY device will always report unrotated thumbnail
+  size.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.<wbr/>
+The cropping must be done on the primary jpeg image rather than the sensor active array.<wbr/>
+The stream cropping rule specified by "S5.<wbr/> Cropping" in camera3.<wbr/>h doesn't apply to the
+thumbnail image cropping.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.jpeg.availableThumbnailSizes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2 x n
+                </span>
+              <span class="entry_type_visibility"> [public as size]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of JPEG thumbnail sizes for <a href="#controls_android.jpeg.thumbnailSize">android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Size</a> supported by this
+camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This list will include at least one non-zero resolution,<wbr/> plus <code>(0,<wbr/>0)</code> for indicating no
+thumbnail should be generated.<wbr/></p>
+<p>Below condiditions will be satisfied for this size list:</p>
+<ul>
+<li>The sizes will be sorted by increasing pixel area (width x height).<wbr/>
+If several resolutions have the same area,<wbr/> they will be sorted by increasing width.<wbr/></li>
+<li>The aspect ratio of the largest thumbnail size will be same as the
+aspect ratio of largest JPEG output size in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a>.<wbr/>
+The largest size is defined as the size that has the largest pixel area
+in a given size list.<wbr/></li>
+<li>Each output JPEG size in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> will have at least
+one corresponding size that has the same aspect ratio in availableThumbnailSizes,<wbr/>
+and vice versa.<wbr/></li>
+<li>All non-<code>(0,<wbr/> 0)</code> sizes will have non-zero widths and heights.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.jpeg.maxSize">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>max<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum size in bytes for the compressed
+JPEG buffer</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Must be large enough to fit any JPEG produced by
+the camera</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is used for sizing the gralloc buffers for
+JPEG</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.jpeg.gpsLocation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Location
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public as location]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A location object to use when generating image GPS metadata.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Setting a location object in a request will include the GPS coordinates of the location
+into any JPEG images captured based on the request.<wbr/> These coordinates can then be
+viewed by anyone who receives the JPEG image.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.gpsCoordinates">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Coordinates
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">double</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">latitude,<wbr/> longitude,<wbr/> altitude.<wbr/> First two in degrees,<wbr/> the third in meters</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>GPS coordinates to include in output JPEG
+EXIF.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>(-180 - 180],<wbr/> [-90,<wbr/>90],<wbr/> [-inf,<wbr/> inf]</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.gpsProcessingMethod">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Processing<wbr/>Method
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [hidden as string]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>32 characters describing GPS algorithm to
+include in EXIF.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              UTF-8 null-terminated string
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.gpsTimestamp">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>gps<wbr/>Timestamp
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Time GPS fix was made to include in
+EXIF.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              UTC in seconds since January 1,<wbr/> 1970
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.orientation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>orientation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The orientation for a JPEG image.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Degrees in multiples of 90
+            </td>
+
+            <td class="entry_range">
+              <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The clockwise rotation angle in degrees,<wbr/> relative to the orientation
+to the camera,<wbr/> that the JPEG picture needs to be rotated by,<wbr/> to be viewed
+upright.<wbr/></p>
+<p>Camera devices may either encode this value into the JPEG EXIF header,<wbr/> or
+rotate the image data to match this orientation.<wbr/> When the image data is rotated,<wbr/>
+the thumbnail data will also be rotated.<wbr/></p>
+<p>Note that this orientation is relative to the orientation of the camera sensor,<wbr/> given
+by <a href="#static_android.sensor.orientation">android.<wbr/>sensor.<wbr/>orientation</a>.<wbr/></p>
+<p>To translate from the device orientation given by the Android sensor APIs,<wbr/> the following
+sample code may be used:</p>
+<pre><code>private int getJpegOrientation(CameraCharacteristics c,<wbr/> int deviceOrientation) {
+    if (deviceOrientation == android.<wbr/>view.<wbr/>Orientation<wbr/>Event<wbr/>Listener.<wbr/>ORIENTATION_<wbr/>UNKNOWN) return 0;
+    int sensorOrientation = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>SENSOR_<wbr/>ORIENTATION);
+
+    //<wbr/> Round device orientation to a multiple of 90
+    deviceOrientation = (deviceOrientation + 45) /<wbr/> 90 * 90;
+
+    //<wbr/> Reverse device orientation for front-facing cameras
+    boolean facingFront = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING) == Camera<wbr/>Characteristics.<wbr/>LENS_<wbr/>FACING_<wbr/>FRONT;
+    if (facingFront) deviceOrientation = -deviceOrientation;
+
+    //<wbr/> Calculate desired JPEG orientation relative to camera orientation to make
+    //<wbr/> the image upright relative to the device orientation
+    int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+
+    return jpegOrientation;
+}
+</code></pre>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.quality">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>quality
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Compression quality of the final JPEG
+image.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>1-100; larger is higher quality</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>85-95 is typical usage range.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.size">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>jpeg.<wbr/>size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The size of the compressed JPEG image,<wbr/> in
+bytes</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If no JPEG output is produced for the request,<wbr/>
+this must be 0.<wbr/></p>
+<p>Otherwise,<wbr/> this describes the real size of the compressed
+JPEG image placed in the output stream.<wbr/>  More specifically,<wbr/>
+if <a href="#static_android.jpeg.maxSize">android.<wbr/>jpeg.<wbr/>max<wbr/>Size</a> = 1000000,<wbr/> and a specific capture
+has <a href="#dynamic_android.jpeg.size">android.<wbr/>jpeg.<wbr/>size</a> = 500000,<wbr/> then the output buffer from
+the JPEG stream will be 1000000 bytes,<wbr/> of which the first
+500000 make up the real data.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.thumbnailQuality">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Quality
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Compression quality of JPEG
+thumbnail.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>1-100; larger is higher quality</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.jpeg.thumbnailSize">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>jpeg.<wbr/>thumbnail<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as size]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Resolution of embedded JPEG thumbnail.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.jpeg.availableThumbnailSizes">android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to (0,<wbr/> 0) value,<wbr/> the JPEG EXIF will not contain thumbnail,<wbr/>
+but the captured JPEG will still be a valid image.<wbr/></p>
+<p>For best results,<wbr/> when issuing a request for a JPEG image,<wbr/> the thumbnail size selected
+should have the same aspect ratio as the main JPEG output.<wbr/></p>
+<p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+ratio,<wbr/> the camera device creates the thumbnail by cropping it from the primary image.<wbr/>
+For example,<wbr/> if the primary image has 4:3 aspect ratio,<wbr/> the thumbnail image has
+16:9 aspect ratio,<wbr/> the primary image will be cropped vertically (letterbox) to
+generate the thumbnail image.<wbr/> The thumbnail image will always have a smaller Field
+Of View (FOV) than the primary image when aspect ratios differ.<wbr/></p>
+<p>When an <a href="#controls_android.jpeg.orientation">android.<wbr/>jpeg.<wbr/>orientation</a> of non-zero degree is requested,<wbr/>
+the camera device will handle thumbnail rotation in one of the following ways:</p>
+<ul>
+<li>Set the <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>
+  and keep jpeg and thumbnail image data unrotated.<wbr/></li>
+<li>Rotate the jpeg and thumbnail image data and not set
+  <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>.<wbr/> In this
+  case,<wbr/> LIMITED or FULL hardware level devices will report rotated thumnail size in
+  capture result,<wbr/> so the width and height will be interchanged if 90 or 270 degree
+  orientation is requested.<wbr/> LEGACY device will always report unrotated thumbnail
+  size.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.<wbr/>
+The cropping must be done on the primary jpeg image rather than the sensor active array.<wbr/>
+The stream cropping rule specified by "S5.<wbr/> Cropping" in camera3.<wbr/>h doesn't apply to the
+thumbnail image cropping.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_lens" class="section">lens</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.lens.aperture">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>aperture
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired lens aperture size,<wbr/> as a ratio of lens focal length to the
+effective aperture diameter.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              The f-number (f/<wbr/>N)
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Setting this value is only supported on the camera devices that have a variable
+aperture lens.<wbr/></p>
+<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/>
+this can be set along with <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>
+to achieve manual exposure control.<wbr/></p>
+<p>The requested aperture value may take several frames to reach the
+requested value; the camera device will report the current (intermediate)
+aperture size in capture result metadata while the aperture is changing.<wbr/>
+While the aperture is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
+<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of
+the ON modes,<wbr/> this will be overridden by the camera device
+auto-exposure algorithm,<wbr/> the overridden values are then provided
+back to the user in the corresponding result.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.lens.filterDensity">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>filter<wbr/>Density
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired setting for the lens neutral density filter(s).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Exposure Value (EV)
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control will not be supported on most camera devices.<wbr/></p>
+<p>Lens filters are typically used to lower the amount of light the
+sensor is exposed to (measured in steps of EV).<wbr/> As used here,<wbr/> an EV
+step is the standard logarithmic representation,<wbr/> which are
+non-negative,<wbr/> and inversely proportional to the amount of light
+hitting the sensor.<wbr/>  For example,<wbr/> setting this to 0 would result
+in no reduction of the incoming light,<wbr/> and setting this to 2 would
+mean that the filter is set to reduce incoming light by two stops
+(allowing 1/<wbr/>4 of the prior amount of light to the sensor).<wbr/></p>
+<p>It may take several frames before the lens filter density changes
+to the requested value.<wbr/> While the filter density is still changing,<wbr/>
+<a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.lens.focalLength">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>focal<wbr/>Length
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired lens focal length; used for optical zoom.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Millimeters
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This setting controls the physical focal length of the camera
+device's lens.<wbr/> Changing the focal length changes the field of
+view of the camera device,<wbr/> and is usually used for optical zoom.<wbr/></p>
+<p>Like <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>,<wbr/> this
+setting won't be applied instantaneously,<wbr/> and it may take several
+frames before the lens can change to the requested focal length.<wbr/>
+While the focal length is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will
+be set to MOVING.<wbr/></p>
+<p>Optical zoom will not be supported on most devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.lens.focusDistance">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>focus<wbr/>Distance
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Desired distance to plane of sharpest focus,<wbr/>
+measured from frontmost surface of the lens.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control can be used for setting manual focus,<wbr/> on devices that support
+the MANUAL_<wbr/>SENSOR capability and have a variable-focus lens (see
+<a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>).<wbr/></p>
+<p>A value of <code>0.<wbr/>0f</code> means infinity focus.<wbr/> The value set will be clamped to
+<code>[0.<wbr/>0f,<wbr/> <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>]</code>.<wbr/></p>
+<p>Like <a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> this setting won't be applied
+instantaneously,<wbr/> and it may take several frames before the lens
+can move to the requested focus distance.<wbr/> While the lens is still moving,<wbr/>
+<a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
+<p>LEGACY devices support at most setting this to <code>0.<wbr/>0f</code>
+for infinity focus.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.lens.opticalStabilizationMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Optical stabilization is unavailable.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optical stabilization is enabled.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Sets whether the camera device uses optical image stabilization (OIS)
+when capturing images.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>OIS is used to compensate for motion blur due to small
+movements of the camera during capture.<wbr/> Unlike digital image
+stabilization (<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> OIS
+makes use of mechanical elements to stabilize the camera
+sensor,<wbr/> and thus allows for longer exposure times before
+camera shake becomes apparent.<wbr/></p>
+<p>Switching between different optical stabilization modes may take several
+frames to initialize,<wbr/> the camera device will report the current mode in
+capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/> the
+optical stabilization modes in the first several capture results may still
+be "OFF",<wbr/> and it will become "ON" when the initialization is done.<wbr/></p>
+<p>If a camera device supports both OIS and digital image stabilization
+(<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may produce undesirable
+interaction,<wbr/> so it is recommended not to enable both at the same time.<wbr/></p>
+<p>Not all devices will support OIS; see
+<a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a> for
+available controls.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+                
+            
+
+                
+          <tr class="entry" id="static_android.lens.info.availableApertures">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of aperture size values for <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a> that are
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              The aperture f-number
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If the camera device doesn't support a variable lens aperture,<wbr/>
+this list will contain only one value,<wbr/> which is the fixed aperture size.<wbr/></p>
+<p>If the camera device supports a variable aperture,<wbr/> the aperture values
+in this list will be sorted in ascending order.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.info.availableFilterDensities">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of neutral density filter values for
+<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> that are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Exposure value (EV)
+            </td>
+
+            <td class="entry_range">
+              <p>Values are &gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If a neutral density filter is not supported by this camera device,<wbr/>
+this list will contain only 0.<wbr/> Otherwise,<wbr/> this list will include every
+filter density supported by the camera device,<wbr/> in ascending order.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.info.availableFocalLengths">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">The list of available focal lengths</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of focal lengths for <a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a> that are supported by this camera
+device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Millimeters
+            </td>
+
+            <td class="entry_range">
+              <p>Values are &gt; 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If optical zoom is not supported,<wbr/> this list will only contain
+a single value corresponding to the fixed focal length of the
+device.<wbr/> Otherwise,<wbr/> this list will include every focal length supported
+by the camera device,<wbr/> in ascending order.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.info.availableOpticalStabilization">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of optical image stabilization (OIS) modes for
+<a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> that are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If OIS is not supported by a given camera device,<wbr/> this list will
+contain only OFF.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.info.hyperfocalDistance">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>info.<wbr/>hyperfocal<wbr/>Distance
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Hyperfocal distance for this lens.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
+            </td>
+
+            <td class="entry_range">
+              <p>If lens is fixed focus,<wbr/> &gt;= 0.<wbr/> If lens has focuser unit,<wbr/> the value is
+within <code>(0.<wbr/>0f,<wbr/> <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>]</code></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If the lens is not fixed focus,<wbr/> the camera device will report this
+field when <a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a> is APPROXIMATE or CALIBRATED.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.info.minimumFocusDistance">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Shortest distance from frontmost surface
+of the lens that can be brought into sharp focus.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If the lens is fixed-focus,<wbr/> this will be
+0.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Mandatory for FULL devices; LIMITED devices
+must always set this value to 0 for fixed-focus; and may omit
+the minimum focus distance otherwise.<wbr/></p>
+<p>This field is also mandatory for all devices advertising
+the MANUAL_<wbr/>SENSOR capability.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.info.shadingMapSize">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [hidden as size]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">width and height (N,<wbr/> M) of lens shading map provided by the camera device.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Dimensions of lens shading map.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Both values &gt;= 1</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The map should be on the order of 30-40 rows and columns,<wbr/> and
+must be smaller than 64x64.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.info.focusDistanceCalibration">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">UNCALIBRATED</span>
+                    <span class="entry_type_enum_notes"><p>The lens focus distance is not accurate,<wbr/> and the units used for
+<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> do not correspond to any physical units.<wbr/></p>
+<p>Setting the lens to the same focus distance on separate occasions may
+result in a different real focus distance,<wbr/> depending on factors such
+as the orientation of the device,<wbr/> the age of the focusing mechanism,<wbr/>
+and the device temperature.<wbr/> The focus distance value will still be
+in the range of <code>[0,<wbr/> <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>]</code>,<wbr/> where 0
+represents the farthest focus.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">APPROXIMATE</span>
+                    <span class="entry_type_enum_notes"><p>The lens focus distance is measured in diopters.<wbr/></p>
+<p>However,<wbr/> setting the lens to the same focus distance
+on separate occasions may result in a different real
+focus distance,<wbr/> depending on factors such as the
+orientation of the device,<wbr/> the age of the focusing
+mechanism,<wbr/> and the device temperature.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CALIBRATED</span>
+                    <span class="entry_type_enum_notes"><p>The lens focus distance is measured in diopters,<wbr/> and
+is calibrated.<wbr/></p>
+<p>The lens mechanism is calibrated so that setting the
+same focus distance is repeatable on multiple
+occasions with good accuracy,<wbr/> and the focus distance
+corresponds to the real physical distance to the plane
+of best focus.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The lens focus distance calibration quality.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The lens focus distance calibration quality determines the reliability of
+focus related metadata entries,<wbr/> i.<wbr/>e.<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
+<a href="#dynamic_android.lens.focusRange">android.<wbr/>lens.<wbr/>focus<wbr/>Range</a>,<wbr/> <a href="#static_android.lens.info.hyperfocalDistance">android.<wbr/>lens.<wbr/>info.<wbr/>hyperfocal<wbr/>Distance</a>,<wbr/> and
+<a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>.<wbr/></p>
+<p>APPROXIMATE and CALIBRATED devices report the focus metadata in
+units of diopters (1/<wbr/>meter),<wbr/> so <code>0.<wbr/>0f</code> represents focusing at infinity,<wbr/>
+and increasing positive numbers represent focusing closer and closer
+to the camera device.<wbr/> The focus distance control also uses diopters
+on these devices.<wbr/></p>
+<p>UNCALIBRATED devices do not use units that are directly comparable
+to any real physical measurement,<wbr/> but <code>0.<wbr/>0f</code> still represents farthest
+focus,<wbr/> and <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> represents the
+nearest focus the device can achieve.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For devices advertise APPROXIMATE quality or higher,<wbr/> diopters 0 (infinity
+focus) must work.<wbr/> When autofocus is disabled (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> == OFF)
+and the lens focus distance is set to 0 diopters
+(<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> == 0),<wbr/> the lens will move to focus at infinity
+and is stably focused at infinity even if the device tilts.<wbr/> It may take the
+lens some time to move; during the move the lens state should be MOVING and
+the output diopter value should be changing toward 0.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+        
+
+                
+          <tr class="entry" id="static_android.lens.facing">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>lens.<wbr/>facing
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FRONT</span>
+                    <span class="entry_type_enum_notes"><p>The camera device faces the same direction as the device's screen.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BACK</span>
+                    <span class="entry_type_enum_notes"><p>The camera device faces the opposite direction as the device's screen.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">EXTERNAL</span>
+                    <span class="entry_type_enum_notes"><p>The camera device is an external camera,<wbr/> and has no fixed facing relative to the
+device's screen.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Direction the camera faces relative to
+device screen.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.poseRotation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>pose<wbr/>Rotation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The orientation of the camera relative to the sensor
+coordinate system.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Quaternion coefficients
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The four coefficients that describe the quaternion
+rotation from the Android sensor coordinate system to a
+camera-aligned coordinate system where the X-axis is
+aligned with the long side of the image sensor,<wbr/> the Y-axis
+is aligned with the short side of the image sensor,<wbr/> and
+the Z-axis is aligned with the optical axis of the sensor.<wbr/></p>
+<p>To convert from the quaternion coefficients <code>(x,<wbr/>y,<wbr/>z,<wbr/>w)</code>
+to the axis of rotation <code>(a_<wbr/>x,<wbr/> a_<wbr/>y,<wbr/> a_<wbr/>z)</code> and rotation
+amount <code>theta</code>,<wbr/> the following formulas can be used:</p>
+<pre><code> theta = 2 * acos(w)
+a_<wbr/>x = x /<wbr/> sin(theta/<wbr/>2)
+a_<wbr/>y = y /<wbr/> sin(theta/<wbr/>2)
+a_<wbr/>z = z /<wbr/> sin(theta/<wbr/>2)
+</code></pre>
+<p>To create a 3x3 rotation matrix that applies the rotation
+defined by this quaternion,<wbr/> the following matrix can be
+used:</p>
+<pre><code>R = [ 1 - 2y^2 - 2z^2,<wbr/>       2xy - 2zw,<wbr/>       2xz + 2yw,<wbr/>
+           2xy + 2zw,<wbr/> 1 - 2x^2 - 2z^2,<wbr/>       2yz - 2xw,<wbr/>
+           2xz - 2yw,<wbr/>       2yz + 2xw,<wbr/> 1 - 2x^2 - 2y^2 ]
+</code></pre>
+<p>This matrix can then be used to apply the rotation to a
+ column vector point with</p>
+<p><code>p' = Rp</code></p>
+<p>where <code>p</code> is in the device sensor coordinate system,<wbr/> and
+ <code>p'</code> is in the camera-oriented coordinate system.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.poseTranslation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>pose<wbr/>Translation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Position of the camera optical center.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Meters
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The position of the camera device's lens optical center,<wbr/>
+as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>,<wbr/> relative to the
+optical center of the largest camera device facing in the
+same direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
+axes</a>.<wbr/> Note that only the axis definitions are shared with
+the sensor coordinate system,<wbr/> but not the origin.<wbr/></p>
+<p>If this device is the largest or only camera device with a
+given facing,<wbr/> then this position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a
+camera device with a lens optical center located 3 cm from
+the main sensor along the +X axis (to the right from the
+user's perspective) will report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
+<p>To transform a pixel coordinates between two cameras
+facing the same direction,<wbr/> first the source camera
+<a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> must be corrected for.<wbr/>  Then
+the source camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs
+to be applied,<wbr/> followed by the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a>
+of the source camera,<wbr/> the translation of the source camera
+relative to the destination camera,<wbr/> the
+<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination camera,<wbr/> and
+finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
+of the destination camera.<wbr/> This obtains a
+radial-distortion-free coordinate in the destination
+camera pixel coordinates.<wbr/></p>
+<p>To compare this against a real image from the destination
+camera,<wbr/> the destination camera image then needs to be
+corrected for radial distortion before comparison or
+sampling.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.intrinsicCalibration">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The parameters for this camera device's intrinsic
+calibration.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Pixels in the
+            android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size
+            coordinate system.<wbr/>
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The five calibration parameters that describe the
+transform from camera-centric 3D coordinates to sensor
+pixel coordinates:</p>
+<pre><code>[f_<wbr/>x,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>x,<wbr/> c_<wbr/>y,<wbr/> s]
+</code></pre>
+<p>Where <code>f_<wbr/>x</code> and <code>f_<wbr/>y</code> are the horizontal and vertical
+focal lengths,<wbr/> <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code> is the position of the optical
+axis,<wbr/> and <code>s</code> is a skew parameter for the sensor plane not
+being aligned with the lens plane.<wbr/></p>
+<p>These are typically used within a transformation matrix K:</p>
+<pre><code>K = [ f_<wbr/>x,<wbr/>   s,<wbr/> c_<wbr/>x,<wbr/>
+       0,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>y,<wbr/>
+       0    0,<wbr/>   1 ]
+</code></pre>
+<p>which can then be combined with the camera pose rotation
+<code>R</code> and translation <code>t</code> (<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> and
+<a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a>,<wbr/> respective) to calculate the
+complete transform from world coordinates to pixel
+coordinates:</p>
+<pre><code>P = [ K 0   * [ R t
+     0 1 ]     0 1 ]
+</code></pre>
+<p>and with <code>p_<wbr/>w</code> being a point in the world coordinate system
+and <code>p_<wbr/>s</code> being a point in the camera active pixel array
+coordinate system,<wbr/> and with the mapping including the
+homogeneous division by z:</p>
+<pre><code> p_<wbr/>h = (x_<wbr/>h,<wbr/> y_<wbr/>h,<wbr/> z_<wbr/>h) = P p_<wbr/>w
+p_<wbr/>s = p_<wbr/>h /<wbr/> z_<wbr/>h
+</code></pre>
+<p>so <code>[x_<wbr/>s,<wbr/> y_<wbr/>s]</code> is the pixel coordinates of the world
+point,<wbr/> <code>z_<wbr/>s = 1</code>,<wbr/> and <code>w_<wbr/>s</code> is a measurement of disparity
+(depth) in pixel coordinates.<wbr/></p>
+<p>Note that the coordinate system for this transform is the
+<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> system,<wbr/>
+where <code>(0,<wbr/>0)</code> is the top-left of the
+preCorrectionActiveArraySize rectangle.<wbr/> Once the pose and
+intrinsic calibration transforms have been applied to a
+world point,<wbr/> then the <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>
+transform needs to be applied,<wbr/> and the result adjusted to
+be in the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> coordinate
+system (where <code>(0,<wbr/> 0)</code> is the top-left of the
+activeArraySize rectangle),<wbr/> to determine the final pixel
+coordinate of the world point for processed (non-RAW)
+output buffers.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.radialDistortion">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>radial<wbr/>Distortion
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  6
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The correction coefficients to correct for this camera device's
+radial and tangential lens distortion.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Unitless coefficients.<wbr/>
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Four radial distortion coefficients <code>[kappa_<wbr/>0,<wbr/> kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
+kappa_<wbr/>3]</code> and two tangential distortion coefficients
+<code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
+lens's geometric distortion with the mapping equations:</p>
+<pre><code> x_<wbr/>c = x_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>4 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>5 * ( r^2 + 2 * x_<wbr/>i^2 )
+ y_<wbr/>c = y_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>5 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>4 * ( r^2 + 2 * y_<wbr/>i^2 )
+</code></pre>
+<p>Here,<wbr/> <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> are the coordinates to sample in the
+input image that correspond to the pixel values in the
+corrected image at the coordinate <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code>:</p>
+<pre><code> correctedImage(x_<wbr/>i,<wbr/> y_<wbr/>i) = sample_<wbr/>at(x_<wbr/>c,<wbr/> y_<wbr/>c,<wbr/> inputImage)
+</code></pre>
+<p>The pixel coordinates are defined in a normalized
+coordinate system related to the
+<a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> calibration fields.<wbr/>
+Both <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code> and <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> have <code>(0,<wbr/>0)</code> at the
+lens optical center <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code>.<wbr/> The maximum magnitudes
+of both x and y coordinates are normalized to be 1 at the
+edge further from the optical center,<wbr/> so the range
+for both dimensions is <code>-1 &lt;= x &lt;= 1</code>.<wbr/></p>
+<p>Finally,<wbr/> <code>r</code> represents the radial distance from the
+optical center,<wbr/> <code>r^2 = x_<wbr/>i^2 + y_<wbr/>i^2</code>,<wbr/> and its magnitude
+is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p>
+<p>The distortion model used is the Brown-Conrady model.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.lens.aperture">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>aperture
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired lens aperture size,<wbr/> as a ratio of lens focal length to the
+effective aperture diameter.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              The f-number (f/<wbr/>N)
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Setting this value is only supported on the camera devices that have a variable
+aperture lens.<wbr/></p>
+<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/>
+this can be set along with <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
+<a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> and <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>
+to achieve manual exposure control.<wbr/></p>
+<p>The requested aperture value may take several frames to reach the
+requested value; the camera device will report the current (intermediate)
+aperture size in capture result metadata while the aperture is changing.<wbr/>
+While the aperture is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
+<p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is one of
+the ON modes,<wbr/> this will be overridden by the camera device
+auto-exposure algorithm,<wbr/> the overridden values are then provided
+back to the user in the corresponding result.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.filterDensity">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>filter<wbr/>Density
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired setting for the lens neutral density filter(s).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Exposure Value (EV)
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control will not be supported on most camera devices.<wbr/></p>
+<p>Lens filters are typically used to lower the amount of light the
+sensor is exposed to (measured in steps of EV).<wbr/> As used here,<wbr/> an EV
+step is the standard logarithmic representation,<wbr/> which are
+non-negative,<wbr/> and inversely proportional to the amount of light
+hitting the sensor.<wbr/>  For example,<wbr/> setting this to 0 would result
+in no reduction of the incoming light,<wbr/> and setting this to 2 would
+mean that the filter is set to reduce incoming light by two stops
+(allowing 1/<wbr/>4 of the prior amount of light to the sensor).<wbr/></p>
+<p>It may take several frames before the lens filter density changes
+to the requested value.<wbr/> While the filter density is still changing,<wbr/>
+<a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will be set to MOVING.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.focalLength">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>focal<wbr/>Length
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired lens focal length; used for optical zoom.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Millimeters
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This setting controls the physical focal length of the camera
+device's lens.<wbr/> Changing the focal length changes the field of
+view of the camera device,<wbr/> and is usually used for optical zoom.<wbr/></p>
+<p>Like <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>,<wbr/> this
+setting won't be applied instantaneously,<wbr/> and it may take several
+frames before the lens can change to the requested focal length.<wbr/>
+While the focal length is still changing,<wbr/> <a href="#dynamic_android.lens.state">android.<wbr/>lens.<wbr/>state</a> will
+be set to MOVING.<wbr/></p>
+<p>Optical zoom will not be supported on most devices.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.focusDistance">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>focus<wbr/>Distance
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Desired distance to plane of sharpest focus,<wbr/>
+measured from frontmost surface of the lens.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              See android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Should be zero for fixed-focus cameras</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.focusRange">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>focus<wbr/>Range
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as pairFloatFloat]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+                <div class="entry_type_notes">Range of scene distances that are in focus</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The range of scene distances that are in
+sharp focus (depth of field).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              A pair of focus distances in diopters: (near,<wbr/>
+          far); see android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration for details.<wbr/>
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;=0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If variable focus not supported,<wbr/> can still report
+fixed depth of field range</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.opticalStabilizationMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Optical stabilization is unavailable.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Optical stabilization is enabled.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Sets whether the camera device uses optical image stabilization (OIS)
+when capturing images.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>OIS is used to compensate for motion blur due to small
+movements of the camera during capture.<wbr/> Unlike digital image
+stabilization (<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> OIS
+makes use of mechanical elements to stabilize the camera
+sensor,<wbr/> and thus allows for longer exposure times before
+camera shake becomes apparent.<wbr/></p>
+<p>Switching between different optical stabilization modes may take several
+frames to initialize,<wbr/> the camera device will report the current mode in
+capture result metadata.<wbr/> For example,<wbr/> When "ON" mode is requested,<wbr/> the
+optical stabilization modes in the first several capture results may still
+be "OFF",<wbr/> and it will become "ON" when the initialization is done.<wbr/></p>
+<p>If a camera device supports both OIS and digital image stabilization
+(<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> turning both modes on may produce undesirable
+interaction,<wbr/> so it is recommended not to enable both at the same time.<wbr/></p>
+<p>Not all devices will support OIS; see
+<a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a> for
+available controls.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.state">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>state
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">STATIONARY</span>
+                    <span class="entry_type_enum_notes"><p>The lens parameters (<a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
+<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>) are not changing.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MOVING</span>
+                    <span class="entry_type_enum_notes"><p>One or several of the lens parameters
+(<a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
+<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> or <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>) is
+currently changing.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Current lens status.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For lens parameters <a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
+<a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>,<wbr/> when changes are requested,<wbr/>
+they may take several frames to reach the requested values.<wbr/> This state indicates
+the current status of the lens parameters.<wbr/></p>
+<p>When the state is STATIONARY,<wbr/> the lens parameters are not changing.<wbr/> This could be
+either because the parameters are all fixed,<wbr/> or because the lens has had enough
+time to reach the most recently-requested values.<wbr/>
+If all these lens parameters are not changable for a camera device,<wbr/> as listed below:</p>
+<ul>
+<li>Fixed focus (<code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> == 0</code>),<wbr/> which means
+<a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> parameter will always be 0.<wbr/></li>
+<li>Fixed focal length (<a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a> contains single value),<wbr/>
+which means the optical zoom is not supported.<wbr/></li>
+<li>No ND filter (<a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a> contains only 0).<wbr/></li>
+<li>Fixed aperture (<a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a> contains single value).<wbr/></li>
+</ul>
+<p>Then this state will always be STATIONARY.<wbr/></p>
+<p>When the state is MOVING,<wbr/> it indicates that at least one of the lens parameters
+is changing.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.poseRotation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>pose<wbr/>Rotation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The orientation of the camera relative to the sensor
+coordinate system.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Quaternion coefficients
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The four coefficients that describe the quaternion
+rotation from the Android sensor coordinate system to a
+camera-aligned coordinate system where the X-axis is
+aligned with the long side of the image sensor,<wbr/> the Y-axis
+is aligned with the short side of the image sensor,<wbr/> and
+the Z-axis is aligned with the optical axis of the sensor.<wbr/></p>
+<p>To convert from the quaternion coefficients <code>(x,<wbr/>y,<wbr/>z,<wbr/>w)</code>
+to the axis of rotation <code>(a_<wbr/>x,<wbr/> a_<wbr/>y,<wbr/> a_<wbr/>z)</code> and rotation
+amount <code>theta</code>,<wbr/> the following formulas can be used:</p>
+<pre><code> theta = 2 * acos(w)
+a_<wbr/>x = x /<wbr/> sin(theta/<wbr/>2)
+a_<wbr/>y = y /<wbr/> sin(theta/<wbr/>2)
+a_<wbr/>z = z /<wbr/> sin(theta/<wbr/>2)
+</code></pre>
+<p>To create a 3x3 rotation matrix that applies the rotation
+defined by this quaternion,<wbr/> the following matrix can be
+used:</p>
+<pre><code>R = [ 1 - 2y^2 - 2z^2,<wbr/>       2xy - 2zw,<wbr/>       2xz + 2yw,<wbr/>
+           2xy + 2zw,<wbr/> 1 - 2x^2 - 2z^2,<wbr/>       2yz - 2xw,<wbr/>
+           2xz - 2yw,<wbr/>       2yz + 2xw,<wbr/> 1 - 2x^2 - 2y^2 ]
+</code></pre>
+<p>This matrix can then be used to apply the rotation to a
+ column vector point with</p>
+<p><code>p' = Rp</code></p>
+<p>where <code>p</code> is in the device sensor coordinate system,<wbr/> and
+ <code>p'</code> is in the camera-oriented coordinate system.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.poseTranslation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>pose<wbr/>Translation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Position of the camera optical center.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Meters
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The position of the camera device's lens optical center,<wbr/>
+as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>,<wbr/> relative to the
+optical center of the largest camera device facing in the
+same direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
+axes</a>.<wbr/> Note that only the axis definitions are shared with
+the sensor coordinate system,<wbr/> but not the origin.<wbr/></p>
+<p>If this device is the largest or only camera device with a
+given facing,<wbr/> then this position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a
+camera device with a lens optical center located 3 cm from
+the main sensor along the +X axis (to the right from the
+user's perspective) will report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
+<p>To transform a pixel coordinates between two cameras
+facing the same direction,<wbr/> first the source camera
+<a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> must be corrected for.<wbr/>  Then
+the source camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs
+to be applied,<wbr/> followed by the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a>
+of the source camera,<wbr/> the translation of the source camera
+relative to the destination camera,<wbr/> the
+<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination camera,<wbr/> and
+finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
+of the destination camera.<wbr/> This obtains a
+radial-distortion-free coordinate in the destination
+camera pixel coordinates.<wbr/></p>
+<p>To compare this against a real image from the destination
+camera,<wbr/> the destination camera image then needs to be
+corrected for radial distortion before comparison or
+sampling.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.intrinsicCalibration">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The parameters for this camera device's intrinsic
+calibration.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Pixels in the
+            android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size
+            coordinate system.<wbr/>
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The five calibration parameters that describe the
+transform from camera-centric 3D coordinates to sensor
+pixel coordinates:</p>
+<pre><code>[f_<wbr/>x,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>x,<wbr/> c_<wbr/>y,<wbr/> s]
+</code></pre>
+<p>Where <code>f_<wbr/>x</code> and <code>f_<wbr/>y</code> are the horizontal and vertical
+focal lengths,<wbr/> <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code> is the position of the optical
+axis,<wbr/> and <code>s</code> is a skew parameter for the sensor plane not
+being aligned with the lens plane.<wbr/></p>
+<p>These are typically used within a transformation matrix K:</p>
+<pre><code>K = [ f_<wbr/>x,<wbr/>   s,<wbr/> c_<wbr/>x,<wbr/>
+       0,<wbr/> f_<wbr/>y,<wbr/> c_<wbr/>y,<wbr/>
+       0    0,<wbr/>   1 ]
+</code></pre>
+<p>which can then be combined with the camera pose rotation
+<code>R</code> and translation <code>t</code> (<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> and
+<a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a>,<wbr/> respective) to calculate the
+complete transform from world coordinates to pixel
+coordinates:</p>
+<pre><code>P = [ K 0   * [ R t
+     0 1 ]     0 1 ]
+</code></pre>
+<p>and with <code>p_<wbr/>w</code> being a point in the world coordinate system
+and <code>p_<wbr/>s</code> being a point in the camera active pixel array
+coordinate system,<wbr/> and with the mapping including the
+homogeneous division by z:</p>
+<pre><code> p_<wbr/>h = (x_<wbr/>h,<wbr/> y_<wbr/>h,<wbr/> z_<wbr/>h) = P p_<wbr/>w
+p_<wbr/>s = p_<wbr/>h /<wbr/> z_<wbr/>h
+</code></pre>
+<p>so <code>[x_<wbr/>s,<wbr/> y_<wbr/>s]</code> is the pixel coordinates of the world
+point,<wbr/> <code>z_<wbr/>s = 1</code>,<wbr/> and <code>w_<wbr/>s</code> is a measurement of disparity
+(depth) in pixel coordinates.<wbr/></p>
+<p>Note that the coordinate system for this transform is the
+<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> system,<wbr/>
+where <code>(0,<wbr/>0)</code> is the top-left of the
+preCorrectionActiveArraySize rectangle.<wbr/> Once the pose and
+intrinsic calibration transforms have been applied to a
+world point,<wbr/> then the <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>
+transform needs to be applied,<wbr/> and the result adjusted to
+be in the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> coordinate
+system (where <code>(0,<wbr/> 0)</code> is the top-left of the
+activeArraySize rectangle),<wbr/> to determine the final pixel
+coordinate of the world point for processed (non-RAW)
+output buffers.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.radialDistortion">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>radial<wbr/>Distortion
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  6
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The correction coefficients to correct for this camera device's
+radial and tangential lens distortion.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Unitless coefficients.<wbr/>
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Four radial distortion coefficients <code>[kappa_<wbr/>0,<wbr/> kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
+kappa_<wbr/>3]</code> and two tangential distortion coefficients
+<code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
+lens's geometric distortion with the mapping equations:</p>
+<pre><code> x_<wbr/>c = x_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>4 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>5 * ( r^2 + 2 * x_<wbr/>i^2 )
+ y_<wbr/>c = y_<wbr/>i * ( kappa_<wbr/>0 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>5 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>4 * ( r^2 + 2 * y_<wbr/>i^2 )
+</code></pre>
+<p>Here,<wbr/> <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> are the coordinates to sample in the
+input image that correspond to the pixel values in the
+corrected image at the coordinate <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code>:</p>
+<pre><code> correctedImage(x_<wbr/>i,<wbr/> y_<wbr/>i) = sample_<wbr/>at(x_<wbr/>c,<wbr/> y_<wbr/>c,<wbr/> inputImage)
+</code></pre>
+<p>The pixel coordinates are defined in a normalized
+coordinate system related to the
+<a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> calibration fields.<wbr/>
+Both <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code> and <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> have <code>(0,<wbr/>0)</code> at the
+lens optical center <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code>.<wbr/> The maximum magnitudes
+of both x and y coordinates are normalized to be 1 at the
+edge further from the optical center,<wbr/> so the range
+for both dimensions is <code>-1 &lt;= x &lt;= 1</code>.<wbr/></p>
+<p>Finally,<wbr/> <code>r</code> represents the radial distance from the
+optical center,<wbr/> <code>r^2 = x_<wbr/>i^2 + y_<wbr/>i^2</code>,<wbr/> and its magnitude
+is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p>
+<p>The distortion model used is the Brown-Conrady model.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_noiseReduction" class="section">noiseReduction</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.noiseReduction.mode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>noise<wbr/>Reduction.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No noise reduction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Noise reduction is applied without reducing frame rate relative to sensor
+output.<wbr/> It may be the same as OFF if noise reduction will reduce frame rate
+relative to sensor.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>High-quality noise reduction is applied,<wbr/> at the cost of possibly reduced frame
+rate relative to sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MINIMAL</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>MINIMAL noise reduction is applied without reducing frame rate relative to
+sensor output.<wbr/> </p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Noise reduction is applied at different levels for different output streams,<wbr/>
+based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have noise
+reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if supported) or no
+noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of noise reduction
+for low-resolution streams is tuned so that frame rate is not impacted,<wbr/> and the quality
+is equal to or better than FAST (since it is only applied to lower-resolution outputs,<wbr/>
+quality may improve from FAST).<wbr/></p>
+<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
+high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
+produced via reprocessing when a user takes a picture.<wbr/>  For such a use case,<wbr/> the
+high-resolution buffers must not have noise reduction applied to maximize efficiency of
+preview and to avoid over-applying noise filtering when reprocessing,<wbr/> while
+low-resolution buffers (used for recording or preview,<wbr/> generally) need noise reduction
+applied for reasonable preview quality.<wbr/></p>
+<p>This mode is guaranteed to be supported by devices that support either the
+YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
+(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
+be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Mode of operation for the noise reduction algorithm.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The noise reduction algorithm attempts to improve image quality by removing
+excessive noise added by the capture process,<wbr/> especially in dark conditions.<wbr/></p>
+<p>OFF means no noise reduction will be applied by the camera device,<wbr/> for both raw and
+YUV domain.<wbr/></p>
+<p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,<wbr/>to remove
+demosaicing or other processing artifacts.<wbr/> For YUV_<wbr/>REPROCESSING,<wbr/> MINIMAL is same as OFF.<wbr/>
+This mode is optional,<wbr/> may not be support by all devices.<wbr/> The application should check
+<a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> before using it.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined noise filtering
+will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device
+will use the highest-quality noise filtering algorithms,<wbr/>
+even if it slows down capture rate.<wbr/> FAST means the camera device will not
+slow down capture rate when applying noise filtering.<wbr/> FAST may be the same as MINIMAL if
+MINIMAL is listed,<wbr/> or the same as OFF if any noise filtering will slow down capture rate.<wbr/>
+Every output stream will have a similar amount of enhancement applied.<wbr/></p>
+<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
+buffer of high-resolution images during preview and reprocess image(s) from that buffer
+into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
+noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+preview quality,<wbr/> but does not apply noise reduction to high-resolution streams,<wbr/> since
+those will be reprocessed later if necessary.<wbr/></p>
+<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera device
+will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV domain noise reduction,<wbr/> respectively.<wbr/> The camera device
+may adjust the noise reduction parameters for best image quality based on the
+<a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> if it is set.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
+adjust the internal noise reduction parameters appropriately to get the best quality
+images.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.noiseReduction.strength">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>noise<wbr/>Reduction.<wbr/>strength
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Control the amount of noise reduction
+applied to the images</p>
+            </td>
+
+            <td class="entry_units">
+              1-10; 10 is max noise reduction
+            </td>
+
+            <td class="entry_range">
+              <p>1 - 10</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.noiseReduction.availableNoiseReductionModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of noise reduction modes for <a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a> that are supported
+by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Full-capability camera devices will always support OFF and FAST.<wbr/></p>
+<p>Camera devices that support YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING will support
+ZERO_<wbr/>SHUTTER_<wbr/>LAG.<wbr/></p>
+<p>Legacy-capability camera devices will only support FAST mode.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if noise reduction control is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.noiseReduction.mode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>noise<wbr/>Reduction.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No noise reduction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Noise reduction is applied without reducing frame rate relative to sensor
+output.<wbr/> It may be the same as OFF if noise reduction will reduce frame rate
+relative to sensor.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>High-quality noise reduction is applied,<wbr/> at the cost of possibly reduced frame
+rate relative to sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MINIMAL</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>MINIMAL noise reduction is applied without reducing frame rate relative to
+sensor output.<wbr/> </p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Noise reduction is applied at different levels for different output streams,<wbr/>
+based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have noise
+reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if supported) or no
+noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of noise reduction
+for low-resolution streams is tuned so that frame rate is not impacted,<wbr/> and the quality
+is equal to or better than FAST (since it is only applied to lower-resolution outputs,<wbr/>
+quality may improve from FAST).<wbr/></p>
+<p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
+high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
+produced via reprocessing when a user takes a picture.<wbr/>  For such a use case,<wbr/> the
+high-resolution buffers must not have noise reduction applied to maximize efficiency of
+preview and to avoid over-applying noise filtering when reprocessing,<wbr/> while
+low-resolution buffers (used for recording or preview,<wbr/> generally) need noise reduction
+applied for reasonable preview quality.<wbr/></p>
+<p>This mode is guaranteed to be supported by devices that support either the
+YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING capabilities
+(<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> lists either of those capabilities) and it will
+be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Mode of operation for the noise reduction algorithm.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The noise reduction algorithm attempts to improve image quality by removing
+excessive noise added by the capture process,<wbr/> especially in dark conditions.<wbr/></p>
+<p>OFF means no noise reduction will be applied by the camera device,<wbr/> for both raw and
+YUV domain.<wbr/></p>
+<p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,<wbr/>to remove
+demosaicing or other processing artifacts.<wbr/> For YUV_<wbr/>REPROCESSING,<wbr/> MINIMAL is same as OFF.<wbr/>
+This mode is optional,<wbr/> may not be support by all devices.<wbr/> The application should check
+<a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> before using it.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined noise filtering
+will be applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device
+will use the highest-quality noise filtering algorithms,<wbr/>
+even if it slows down capture rate.<wbr/> FAST means the camera device will not
+slow down capture rate when applying noise filtering.<wbr/> FAST may be the same as MINIMAL if
+MINIMAL is listed,<wbr/> or the same as OFF if any noise filtering will slow down capture rate.<wbr/>
+Every output stream will have a similar amount of enhancement applied.<wbr/></p>
+<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG is meant to be used by applications that maintain a continuous circular
+buffer of high-resolution images during preview and reprocess image(s) from that buffer
+into a final capture when triggered by the user.<wbr/> In this mode,<wbr/> the camera device applies
+noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+preview quality,<wbr/> but does not apply noise reduction to high-resolution streams,<wbr/> since
+those will be reprocessed later if necessary.<wbr/></p>
+<p>For YUV_<wbr/>REPROCESSING,<wbr/> these FAST/<wbr/>HIGH_<wbr/>QUALITY modes both mean that the camera device
+will apply FAST/<wbr/>HIGH_<wbr/>QUALITY YUV domain noise reduction,<wbr/> respectively.<wbr/> The camera device
+may adjust the noise reduction parameters for best image quality based on the
+<a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> if it is set.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
+adjust the internal noise reduction parameters appropriately to get the best quality
+images.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_quirks" class="section">quirks</td></tr>
+
+
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.quirks.meteringCropRegion">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>quirks.<wbr/>metering<wbr/>Crop<wbr/>Region
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>If set to 1,<wbr/> the camera service does not
+scale 'normalized' coordinates with respect to the crop
+region.<wbr/> This applies to metering input (a{e,<wbr/>f,<wbr/>wb}Region
+and output (face rectangles).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Normalized coordinates refer to those in the
+(-1000,<wbr/>1000) range mentioned in the
+android.<wbr/>hardware.<wbr/>Camera API.<wbr/></p>
+<p>HAL implementations should instead always use and emit
+sensor array-relative coordinates for all region data.<wbr/> Does
+not need to be listed in static metadata.<wbr/> Support will be
+removed in future versions of camera service.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.quirks.triggerAfWithAuto">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>quirks.<wbr/>trigger<wbr/>Af<wbr/>With<wbr/>Auto
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>If set to 1,<wbr/> then the camera service always
+switches to FOCUS_<wbr/>MODE_<wbr/>AUTO before issuing a AF
+trigger.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL implementations should implement AF trigger
+modes for AUTO,<wbr/> MACRO,<wbr/> CONTINUOUS_<wbr/>FOCUS,<wbr/> and
+CONTINUOUS_<wbr/>PICTURE modes instead of using this flag.<wbr/> Does
+not need to be listed in static metadata.<wbr/> Support will be
+removed in future versions of camera service</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.quirks.useZslFormat">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>quirks.<wbr/>use<wbr/>Zsl<wbr/>Format
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>If set to 1,<wbr/> the camera service uses
+CAMERA2_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>ZSL instead of
+HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>IMPLEMENTATION_<wbr/>DEFINED for the zero
+shutter lag stream</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL implementations should use gralloc usage flags
+to determine that a stream will be used for
+zero-shutter-lag,<wbr/> instead of relying on an explicit
+format setting.<wbr/> Does not need to be listed in static
+metadata.<wbr/> Support will be removed in future versions of
+camera service.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.quirks.usePartialResult">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="5">
+              android.<wbr/>quirks.<wbr/>use<wbr/>Partial<wbr/>Result
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>If set to 1,<wbr/> the HAL will always split result
+metadata for a single capture into multiple buffers,<wbr/>
+returned using multiple process_<wbr/>capture_<wbr/>result calls.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Does not need to be listed in static
+metadata.<wbr/> Support for partial results will be reworked in
+future versions of camera service.<wbr/> This quirk will stop
+working at that point; DO NOT USE without careful
+consideration of future support.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Refer to <code>camera3_<wbr/>capture_<wbr/>result::partial_<wbr/>result</code>
+for information on how to implement partial results.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.quirks.partialResult">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="5">
+              android.<wbr/>quirks.<wbr/>partial<wbr/>Result
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [hidden as boolean]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FINAL</span>
+                    <span class="entry_type_enum_notes"><p>The last or only metadata result buffer
+for this capture.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PARTIAL</span>
+                    <span class="entry_type_enum_notes"><p>A partial buffer of result metadata for this
+capture.<wbr/> More result buffers for this capture will be sent
+by the camera device,<wbr/> the last of which will be marked
+FINAL.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether a result given to the framework is the
+final one for the capture,<wbr/> or only a partial that contains a
+subset of the full set of dynamic metadata
+values.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>Optional.<wbr/> Default value is FINAL.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The entries in the result metadata buffers for a
+single capture may not overlap,<wbr/> except for this entry.<wbr/> The
+FINAL buffers must retain FIFO ordering relative to the
+requests that generate them,<wbr/> so the FINAL buffer for frame 3 must
+always be sent to the framework after the FINAL buffer for frame 2,<wbr/> and
+before the FINAL buffer for frame 4.<wbr/> PARTIAL buffers may be returned
+in any order relative to other frames,<wbr/> but all PARTIAL buffers for a given
+capture must arrive before the FINAL buffer for that capture.<wbr/> This entry may
+only be used by the camera device if quirks.<wbr/>usePartialResult is set to 1.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Refer to <code>camera3_<wbr/>capture_<wbr/>result::partial_<wbr/>result</code>
+for information on how to implement partial results.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_request" class="section">request</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.request.frameCount">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="1">
+              android.<wbr/>request.<wbr/>frame<wbr/>Count
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A frame counter set by the framework.<wbr/> Must
+be maintained unchanged in output frame.<wbr/> This value monotonically
+increases with every new result (that is,<wbr/> each new result has a unique
+frameCount value).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              incrementing integer
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>Any int.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.request.id">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>request.<wbr/>id
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>An application-specified ID for the current
+request.<wbr/> Must be maintained unchanged in output
+frame</p>
+            </td>
+
+            <td class="entry_units">
+              arbitrary integer assigned by application
+            </td>
+
+            <td class="entry_range">
+              <p>Any int</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.request.inputStreams">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>input<wbr/>Streams
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List which camera reprocess stream is used
+for the source of reprocessing data.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              List of camera reprocess stream IDs
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>Typically,<wbr/> only one entry allowed,<wbr/> must be a valid reprocess stream ID.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_HAL2">HAL2</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only meaningful when <a href="#controls_android.request.type">android.<wbr/>request.<wbr/>type</a> ==
+REPROCESS.<wbr/> Ignored otherwise</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.request.metadataMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>request.<wbr/>metadata<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">NONE</span>
+                    <span class="entry_type_enum_notes"><p>No metadata should be produced on output,<wbr/> except
+for application-bound buffer data.<wbr/> If no
+application-bound streams exist,<wbr/> no frame should be
+placed in the output frame queue.<wbr/> If such streams
+exist,<wbr/> a frame should be placed on the output queue
+with null metadata but with the necessary output buffer
+information.<wbr/> Timestamp information should still be
+included with any output stream buffers</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_notes"><p>All metadata should be produced.<wbr/> Statistics will
+only be produced if they are separately
+enabled</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>How much metadata to produce on
+output</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.request.outputStreams">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>output<wbr/>Streams
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Lists which camera output streams image data
+from this capture must be sent to</p>
+            </td>
+
+            <td class="entry_units">
+              List of camera stream IDs
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>List must only include streams that have been
+created</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_HAL2">HAL2</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If no output streams are listed,<wbr/> then the image
+data should simply be discarded.<wbr/> The image data must
+still be captured for metadata and statistics production,<wbr/>
+and the lens and flash must operate as requested.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.request.type">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="1">
+              android.<wbr/>request.<wbr/>type
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">CAPTURE</span>
+                    <span class="entry_type_enum_notes"><p>Capture a new image from the imaging hardware,<wbr/>
+and process it according to the
+settings</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">REPROCESS</span>
+                    <span class="entry_type_enum_notes"><p>Process previously captured data; the
+<a href="#controls_android.request.inputStreams">android.<wbr/>request.<wbr/>input<wbr/>Streams</a> parameter determines the
+source reprocessing stream.<wbr/> TODO: Mark dynamic metadata
+needed for reprocessing with [RP]</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The type of the request; either CAPTURE or
+REPROCESS.<wbr/> For HAL3,<wbr/> this tag is redundant.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_HAL2">HAL2</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.request.maxNumOutputStreams">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Streams
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum numbers of different types of output streams
+that can be configured and used simultaneously by a camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>For processed (and stalling) format streams,<wbr/> &gt;= 1.<wbr/></p>
+<p>For Raw format (either stalling or non-stalling) streams,<wbr/> &gt;= 0.<wbr/></p>
+<p>For processed (but not stalling) format streams,<wbr/> &gt;= 3
+for FULL mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>);
+&gt;= 2 for LIMITED mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>).<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is a 3 element tuple that contains the max number of output simultaneous
+streams for raw sensor,<wbr/> processed (but not stalling),<wbr/> and processed (and stalling)
+formats respectively.<wbr/> For example,<wbr/> assuming that JPEG is typically a processed and
+stalling stream,<wbr/> if max raw sensor format output stream number is 1,<wbr/> max YUV streams
+number is 3,<wbr/> and max JPEG stream number is 2,<wbr/> then this tuple should be <code>(1,<wbr/> 3,<wbr/> 2)</code>.<wbr/></p>
+<p>This lists the upper bound of the number of output streams supported by
+the camera device.<wbr/> Using more streams simultaneously may require more hardware and
+CPU resources that will consume more power.<wbr/> The image format for an output stream can
+be any supported format provided by <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a>.<wbr/>
+The formats defined in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> can be catergorized
+into the 3 stream types as below:</p>
+<ul>
+<li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.<wbr/>
+  Typically <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">JPEG format</a>.<wbr/></li>
+<li>Raw formats: <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">RAW_<wbr/>SENSOR</a>,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">RAW10</a>,<wbr/> or <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">RAW12</a>.<wbr/></li>
+<li>Processed (but not-stalling): any non-RAW format without a stall duration.<wbr/>
+  Typically <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">YUV_<wbr/>420_<wbr/>888</a>,<wbr/>
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#NV21">NV21</a>,<wbr/> or
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12">YV12</a>.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.maxNumOutputRaw">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Raw
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum numbers of different types of output streams
+that can be configured and used simultaneously by a camera device
+for any <code>RAW</code> formats.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value contains the max number of output simultaneous
+streams from the raw sensor.<wbr/></p>
+<p>This lists the upper bound of the number of output streams supported by
+the camera device.<wbr/> Using more streams simultaneously may require more hardware and
+CPU resources that will consume more power.<wbr/> The image format for this kind of an output stream can
+be any <code>RAW</code> and supported format provided by <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/></p>
+<p>In particular,<wbr/> a <code>RAW</code> format is typically one of:</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">RAW_<wbr/>SENSOR</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">RAW10</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">RAW12</a></li>
+</ul>
+<p>LEGACY mode devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> <code>==</code> LEGACY)
+never support raw streams.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.maxNumOutputProc">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Proc
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum numbers of different types of output streams
+that can be configured and used simultaneously by a camera device
+for any processed (but not-stalling) formats.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 3
+for FULL mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>);
+&gt;= 2 for LIMITED mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>).<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value contains the max number of output simultaneous
+streams for any processed (but not-stalling) formats.<wbr/></p>
+<p>This lists the upper bound of the number of output streams supported by
+the camera device.<wbr/> Using more streams simultaneously may require more hardware and
+CPU resources that will consume more power.<wbr/> The image format for this kind of an output stream can
+be any non-<code>RAW</code> and supported format provided by <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/></p>
+<p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration.<wbr/>
+Typically:</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">YUV_<wbr/>420_<wbr/>888</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#NV21">NV21</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12">YV12</a></li>
+<li>Implementation-defined formats,<wbr/> i.<wbr/>e.<wbr/> <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#isOutputSupportedFor(Class)">StreamConfigurationMap#isOutputSupportedFor(Class)</a></li>
+</ul>
+<p>For full guarantees,<wbr/> query <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> with a
+processed format -- it will return 0 for a non-stalling stream.<wbr/></p>
+<p>LEGACY devices will support at least 2 processing/<wbr/>non-stalling streams.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.maxNumOutputProcStalling">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Proc<wbr/>Stalling
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum numbers of different types of output streams
+that can be configured and used simultaneously by a camera device
+for any processed (and stalling) formats.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 1</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value contains the max number of output simultaneous
+streams for any processed (but not-stalling) formats.<wbr/></p>
+<p>This lists the upper bound of the number of output streams supported by
+the camera device.<wbr/> Using more streams simultaneously may require more hardware and
+CPU resources that will consume more power.<wbr/> The image format for this kind of an output stream can
+be any non-<code>RAW</code> and supported format provided by <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/></p>
+<p>A processed and stalling format is defined as any non-RAW format with a stallDurations
+&gt; 0.<wbr/>  Typically only the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">JPEG format</a> is a
+stalling format.<wbr/></p>
+<p>For full guarantees,<wbr/> query <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> with a
+processed format -- it will return a non-0 value for a stalling stream.<wbr/></p>
+<p>LEGACY devices will support up to 1 processing/<wbr/>stalling stream.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.maxNumReprocessStreams">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Reprocess<wbr/>Streams
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  1
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>How many reprocessing streams of any type
+can be allocated at the same time.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_HAL2">HAL2</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only used by HAL2.<wbr/>x.<wbr/></p>
+<p>When set to 0,<wbr/> it means no reprocess stream is supported.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.maxNumInputStreams">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum numbers of any type of input streams
+that can be configured and used simultaneously by a camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>0 or 1.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to 0,<wbr/> it means no input stream is supported.<wbr/></p>
+<p>The image format for a input stream can be any supported format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/> When using an
+input stream,<wbr/> there must be at least one output stream configured to to receive the
+reprocessed images.<wbr/></p>
+<p>When an input stream and some output streams are used in a reprocessing request,<wbr/>
+only the input buffer will be used to produce these output stream buffers,<wbr/> and a
+new sensor image will not be captured.<wbr/></p>
+<p>For example,<wbr/> for Zero Shutter Lag (ZSL) still capture use case,<wbr/> the input
+stream image format will be PRIVATE,<wbr/> the associated output stream image format
+should be JPEG.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For the reprocessing flow and controls,<wbr/> see
+hardware/<wbr/>libhardware/<wbr/>include/<wbr/>hardware/<wbr/>camera3.<wbr/>h Section 10 for more details.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.pipelineMaxDepth">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Specifies the number of maximum pipeline stages a frame
+has to go through from when it's exposed to when it's available
+to the framework.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A typical minimum value for this is 2 (one stage to expose,<wbr/>
+one stage to readout) from the sensor.<wbr/> The ISP then usually adds
+its own stages to do custom HW processing.<wbr/> Further stages may be
+added by SW processing.<wbr/></p>
+<p>Depending on what settings are used (e.<wbr/>g.<wbr/> YUV,<wbr/> JPEG) and what
+processing is enabled (e.<wbr/>g.<wbr/> face detection),<wbr/> the actual pipeline
+depth (specified by <a href="#dynamic_android.request.pipelineDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Depth</a>) may be less than
+the max pipeline depth.<wbr/></p>
+<p>A pipeline depth of X stages is equivalent to a pipeline latency of
+X frame intervals.<wbr/></p>
+<p>This value will normally be 8 or less,<wbr/> however,<wbr/> for high speed capture session,<wbr/>
+the max pipeline depth will be up to 8 x size of high speed capture request list.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value should be 4 or less,<wbr/> expect for the high speed recording session,<wbr/> where the
+max batch sizes may be larger than 1.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.partialResultCount">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>partial<wbr/>Result<wbr/>Count
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Defines how many sub-components
+a result will be composed of.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 1</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>In order to combat the pipeline latency,<wbr/> partial results
+may be delivered to the application layer from the camera device as
+soon as they are available.<wbr/></p>
+<p>Optional; defaults to 1.<wbr/> A value of 1 means that partial
+results are not supported,<wbr/> and only the final TotalCaptureResult will
+be produced by the camera device.<wbr/></p>
+<p>A typical use case for this might be: after requesting an
+auto-focus (AF) lock the new AF state might be available 50%
+of the way through the pipeline.<wbr/>  The camera device could
+then immediately dispatch this state via a partial result to
+the application,<wbr/> and the rest of the metadata via later
+partial results.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableCapabilities">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Capabilities
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">BACKWARD_COMPATIBLE</span>
+                    <span class="entry_type_enum_notes"><p>The minimal set of capabilities that every camera
+device (regardless of <a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>)
+supports.<wbr/></p>
+<p>This capability is listed by all normal devices,<wbr/> and
+indicates that the camera device has a feature set
+that's comparable to the baseline requirements for the
+older android.<wbr/>hardware.<wbr/>Camera API.<wbr/></p>
+<p>Devices with the DEPTH_<wbr/>OUTPUT capability might not list this
+capability,<wbr/> indicating that they support only depth measurement,<wbr/>
+not standard color output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MANUAL_SENSOR</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device can be manually controlled (3A algorithms such
+as auto-exposure,<wbr/> and auto-focus can be bypassed).<wbr/>
+The camera device supports basic manual control of the sensor image
+acquisition related stages.<wbr/> This means the following controls are
+guaranteed to be supported:</p>
+<ul>
+<li>Manual frame duration control<ul>
+<li><a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a></li>
+<li><a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a></li>
+</ul>
+</li>
+<li>Manual exposure control<ul>
+<li><a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a></li>
+<li><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></li>
+</ul>
+</li>
+<li>Manual sensitivity control<ul>
+<li><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a></li>
+<li><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></li>
+</ul>
+</li>
+<li>Manual lens control (if the lens is adjustable)<ul>
+<li>android.<wbr/>lens.<wbr/>*</li>
+</ul>
+</li>
+<li>Manual flash control (if a flash unit is present)<ul>
+<li>android.<wbr/>flash.<wbr/>*</li>
+</ul>
+</li>
+<li>Manual black level locking<ul>
+<li><a href="#controls_android.blackLevel.lock">android.<wbr/>black<wbr/>Level.<wbr/>lock</a></li>
+</ul>
+</li>
+<li>Auto exposure lock<ul>
+<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
+</ul>
+</li>
+</ul>
+<p>If any of the above 3A algorithms are enabled,<wbr/> then the camera
+device will accurately report the values applied by 3A in the
+result.<wbr/></p>
+<p>A given camera device may also support additional manual sensor controls,<wbr/>
+but this capability only covers the above list of controls.<wbr/></p>
+<p>If this is supported,<wbr/> <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> will
+additionally return a min frame duration that is greater than
+zero for each supported size-format combination.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MANUAL_POST_PROCESSING</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device post-processing stages can be manually controlled.<wbr/>
+The camera device supports basic manual control of the image post-processing
+stages.<wbr/> This means the following controls are guaranteed to be supported:</p>
+<ul>
+<li>
+<p>Manual tonemap control</p>
+<ul>
+<li><a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a></li>
+<li><a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a></li>
+<li><a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></li>
+<li><a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a></li>
+<li><a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a></li>
+</ul>
+</li>
+<li>
+<p>Manual white balance control</p>
+<ul>
+<li><a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a></li>
+<li><a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a></li>
+</ul>
+</li>
+<li>Manual lens shading map control<ul>
+<li><a href="#controls_android.shading.mode">android.<wbr/>shading.<wbr/>mode</a></li>
+<li><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a></li>
+<li><a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a></li>
+<li><a href="#static_android.lens.info.shadingMapSize">android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size</a></li>
+</ul>
+</li>
+<li>Manual aberration correction control (if aberration correction is supported)<ul>
+<li><a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a></li>
+<li><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></li>
+</ul>
+</li>
+<li>Auto white balance lock<ul>
+<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
+</ul>
+</li>
+</ul>
+<p>If auto white balance is enabled,<wbr/> then the camera device
+will accurately report the values applied by AWB in the result.<wbr/></p>
+<p>A given camera device may also support additional post-processing
+controls,<wbr/> but this capability only covers the above list of controls.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">RAW</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports outputting RAW buffers and
+metadata for interpreting them.<wbr/></p>
+<p>Devices supporting the RAW capability allow both for
+saving DNG files,<wbr/> and for direct application processing of
+raw sensor images.<wbr/></p>
+<ul>
+<li>RAW_<wbr/>SENSOR is supported as an output format.<wbr/></li>
+<li>The maximum available resolution for RAW_<wbr/>SENSOR streams
+  will match either the value in
+  <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a> or
+  <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>.<wbr/></li>
+<li>All DNG-related optional metadata entries are provided
+  by the camera device.<wbr/></li>
+</ul></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PRIVATE_REPROCESSING</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports the Zero Shutter Lag reprocessing use case.<wbr/></p>
+<ul>
+<li>One input stream is supported,<wbr/> that is,<wbr/> <code><a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> == 1</code>.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> is supported as an output/<wbr/>input format,<wbr/>
+  that is,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> is included in the lists of
+  formats returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputFormats">StreamConfigurationMap#getOutputFormats</a>.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getValidOutputFormatsForInput">StreamConfigurationMap#getValidOutputFormatsForInput</a>
+  returns non empty int[] for each supported input format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/></li>
+<li>Each size returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputSizes">getInputSizes(ImageFormat.<wbr/>PRIVATE)</a> is also included in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">getOutputSizes(ImageFormat.<wbr/>PRIVATE)</a></li>
+<li>Using <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> does not cause a frame rate drop
+  relative to the sensor's maximum capture rate (at that resolution).<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> will be reprocessable into both
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> and
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> formats.<wbr/></li>
+<li>The maximum available resolution for PRIVATE streams
+  (both input/<wbr/>output) will match the maximum available
+  resolution of JPEG streams.<wbr/></li>
+<li>Static metadata <a href="#static_android.reprocess.maxCaptureStall">android.<wbr/>reprocess.<wbr/>max<wbr/>Capture<wbr/>Stall</a>.<wbr/></li>
+<li>Only below controls are effective for reprocessing requests and
+  will be present in capture results,<wbr/> other controls in reprocess
+  requests will be ignored by the camera device.<wbr/><ul>
+<li>android.<wbr/>jpeg.<wbr/>*</li>
+<li><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></li>
+<li><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></li>
+</ul>
+</li>
+<li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> and
+  <a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a> will both list ZERO_<wbr/>SHUTTER_<wbr/>LAG as a supported mode.<wbr/></li>
+</ul></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">READ_SENSOR_SETTINGS</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports accurately reporting the sensor settings for many of
+the sensor controls while the built-in 3A algorithm is running.<wbr/>  This allows
+reporting of sensor settings even when these settings cannot be manually changed.<wbr/></p>
+<p>The values reported for the following controls are guaranteed to be available
+in the CaptureResult,<wbr/> including when 3A is enabled:</p>
+<ul>
+<li>Exposure control<ul>
+<li><a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a></li>
+</ul>
+</li>
+<li>Sensitivity control<ul>
+<li><a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a></li>
+</ul>
+</li>
+<li>Lens controls (if the lens is adjustable)<ul>
+<li><a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a></li>
+<li><a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a></li>
+</ul>
+</li>
+</ul>
+<p>This capability is a subset of the MANUAL_<wbr/>SENSOR control capability,<wbr/> and will
+always be included if the MANUAL_<wbr/>SENSOR capability is available.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BURST_CAPTURE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports capturing high-resolution images at &gt;= 20 frames per
+second,<wbr/> in at least the uncompressed YUV format,<wbr/> when post-processing settings are set
+to FAST.<wbr/> Additionally,<wbr/> maximum-resolution images can be captured at &gt;= 10 frames
+per second.<wbr/>  Here,<wbr/> 'high resolution' means at least 8 megapixels,<wbr/> or the maximum
+resolution of the device,<wbr/> whichever is smaller.<wbr/></p>
+<p>More specifically,<wbr/> this means that a size matching the camera device's active array
+size is listed as a supported size for the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> format in either <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a> or <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighResolutionOutputSizes">StreamConfigurationMap#getHighResolutionOutputSizes</a>,<wbr/>
+with a minimum frame duration for that format and size of either &lt;= 1/<wbr/>20 s,<wbr/> or
+&lt;= 1/<wbr/>10 s,<wbr/> respectively; and the <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a> entry
+lists at least one FPS range where the minimum FPS is &gt;= 1 /<wbr/> minimumFrameDuration
+for the maximum-size YUV_<wbr/>420_<wbr/>888 format.<wbr/>  If that maximum size is listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighResolutionOutputSizes">StreamConfigurationMap#getHighResolutionOutputSizes</a>,<wbr/>
+then the list of resolutions for YUV_<wbr/>420_<wbr/>888 from <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a> contains at
+least one resolution &gt;= 8 megapixels,<wbr/> with a minimum frame duration of &lt;= 1/<wbr/>20
+s.<wbr/></p>
+<p>If the device supports the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">ImageFormat#RAW10</a>,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">ImageFormat#RAW12</a>,<wbr/> then those can also be captured at the same rate
+as the maximum-size YUV_<wbr/>420_<wbr/>888 resolution is.<wbr/></p>
+<p>If the device supports the PRIVATE_<wbr/>REPROCESSING capability,<wbr/> then the same guarantees
+as for the YUV_<wbr/>420_<wbr/>888 format also apply to the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> format.<wbr/></p>
+<p>In addition,<wbr/> the <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a> field is guaranted to have a value between 0
+and 4,<wbr/> inclusive.<wbr/> <a href="#static_android.control.aeLockAvailable">android.<wbr/>control.<wbr/>ae<wbr/>Lock<wbr/>Available</a> and <a href="#static_android.control.awbLockAvailable">android.<wbr/>control.<wbr/>awb<wbr/>Lock<wbr/>Available</a>
+are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
+consistent image output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">YUV_REPROCESSING</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports the YUV_<wbr/>420_<wbr/>888 reprocessing use case,<wbr/> similar as
+PRIVATE_<wbr/>REPROCESSING,<wbr/> This capability requires the camera device to support the
+following:</p>
+<ul>
+<li>One input stream is supported,<wbr/> that is,<wbr/> <code><a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> == 1</code>.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> is supported as an output/<wbr/>input format,<wbr/> that is,<wbr/>
+  YUV_<wbr/>420_<wbr/>888 is included in the lists of formats returned by
+  <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a> and
+  <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputFormats">StreamConfigurationMap#getOutputFormats</a>.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getValidOutputFormatsForInput">StreamConfigurationMap#getValidOutputFormatsForInput</a>
+  returns non-empty int[] for each supported input format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/></li>
+<li>Each size returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputSizes">get<wbr/>Input<wbr/>Sizes(YUV_<wbr/>420_<wbr/>888)</a> is also included in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">get<wbr/>Output<wbr/>Sizes(YUV_<wbr/>420_<wbr/>888)</a></li>
+<li>Using <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> does not cause a frame rate drop
+  relative to the sensor's maximum capture rate (at that resolution).<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> will be reprocessable into both
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> and <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> formats.<wbr/></li>
+<li>The maximum available resolution for <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> streams (both input/<wbr/>output) will match the
+  maximum available resolution of <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> streams.<wbr/></li>
+<li>Static metadata <a href="#static_android.reprocess.maxCaptureStall">android.<wbr/>reprocess.<wbr/>max<wbr/>Capture<wbr/>Stall</a>.<wbr/></li>
+<li>Only the below controls are effective for reprocessing requests and will be present
+  in capture results.<wbr/> The reprocess requests are from the original capture results that
+  are associated with the intermediate <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a>
+  output buffers.<wbr/>  All other controls in the reprocess requests will be ignored by the
+  camera device.<wbr/><ul>
+<li>android.<wbr/>jpeg.<wbr/>*</li>
+<li><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></li>
+<li><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></li>
+<li><a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a></li>
+</ul>
+</li>
+<li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a> and
+  <a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a> will both list ZERO_<wbr/>SHUTTER_<wbr/>LAG as a supported mode.<wbr/></li>
+</ul></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DEPTH_OUTPUT</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device can produce depth measurements from its field of view.<wbr/></p>
+<p>This capability requires the camera device to support the following:</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH16">ImageFormat#DEPTH16</a> is supported as an output format.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH_POINT_CLOUD">Image<wbr/>Format#DEPTH_<wbr/>POINT_<wbr/>CLOUD</a> is optionally supported as an
+  output format.<wbr/></li>
+<li>This camera device,<wbr/> and all camera devices with the same <a href="#static_android.lens.facing">android.<wbr/>lens.<wbr/>facing</a>,<wbr/>
+  will list the following calibration entries in both
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a> and
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>:<ul>
+<li><a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a></li>
+<li><a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a></li>
+<li><a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a></li>
+<li><a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a></li>
+</ul>
+</li>
+<li>The <a href="#static_android.depth.depthIsExclusive">android.<wbr/>depth.<wbr/>depth<wbr/>Is<wbr/>Exclusive</a> entry is listed by this device.<wbr/></li>
+<li>A LIMITED camera with only the DEPTH_<wbr/>OUTPUT capability does not have to support
+  normal YUV_<wbr/>420_<wbr/>888,<wbr/> JPEG,<wbr/> and PRIV-format outputs.<wbr/> It only has to support the DEPTH16
+  format.<wbr/></li>
+</ul>
+<p>Generally,<wbr/> depth output operates at a slower frame rate than standard color capture,<wbr/>
+so the DEPTH16 and DEPTH_<wbr/>POINT_<wbr/>CLOUD formats will commonly have a stall duration that
+should be accounted for (see
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>).<wbr/>
+On a device that supports both depth and color-based output,<wbr/> to enable smooth preview,<wbr/>
+using a repeating burst is recommended,<wbr/> where a depth-output target is only included
+once every N frames,<wbr/> where N is the ratio between preview output rate and depth output
+rate,<wbr/> including depth stall time.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CONSTRAINED_HIGH_SPEED_VIDEO</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The device supports constrained high speed video recording (frame rate &gt;=120fps)
+use case.<wbr/> The camera device will support high speed capture session created by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>,<wbr/> which
+only accepts high speed request lists created by
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>.<wbr/></p>
+<p>A camera device can still support high speed video streaming by advertising the high speed
+FPS ranges in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a>.<wbr/> For this case,<wbr/> all normal
+capture request per frame control and synchronization requirements will apply to
+the high speed fps ranges,<wbr/> the same as all other fps ranges.<wbr/> This capability describes
+the capability of a specialized operating mode with many limitations (see below),<wbr/> which
+is only targeted at high speed video recording.<wbr/></p>
+<p>The supported high speed video sizes and fps ranges are specified in
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/>
+To get desired output frame rates,<wbr/> the application is only allowed to select video size
+and FPS range combinations provided by
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoSizes">StreamConfigurationMap#getHighSpeedVideoSizes</a>.<wbr/>
+The fps range can be controlled via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
+<p>In this capability,<wbr/> the camera device will override aeMode,<wbr/> awbMode,<wbr/> and afMode to
+ON,<wbr/> AUTO,<wbr/> and CONTINUOUS_<wbr/>VIDEO,<wbr/> respectively.<wbr/> All post-processing block mode
+controls will be overridden to be FAST.<wbr/> Therefore,<wbr/> no manual control of capture
+and post-processing parameters is possible.<wbr/> All other controls operate the
+same as when <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == AUTO.<wbr/> This means that all other
+android.<wbr/>control.<wbr/>* fields continue to work,<wbr/> such as</p>
+<ul>
+<li><a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a></li>
+<li><a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a></li>
+<li><a href="#controls_android.control.aeLock">android.<wbr/>control.<wbr/>ae<wbr/>Lock</a></li>
+<li><a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a></li>
+<li><a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></li>
+<li><a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a></li>
+<li><a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a></li>
+<li><a href="#controls_android.control.aePrecaptureTrigger">android.<wbr/>control.<wbr/>ae<wbr/>Precapture<wbr/>Trigger</a></li>
+</ul>
+<p>Outside of android.<wbr/>control.<wbr/>*,<wbr/> the following controls will work:</p>
+<ul>
+<li><a href="#controls_android.flash.mode">android.<wbr/>flash.<wbr/>mode</a> (TORCH mode only,<wbr/> automatic flash for still capture will not
+work since aeMode is ON)</li>
+<li><a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a> (if it is supported)</li>
+<li><a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a></li>
+<li><a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> (if it is supported)</li>
+</ul>
+<p>For high speed recording use case,<wbr/> the actual maximum supported frame rate may
+be lower than what camera can output,<wbr/> depending on the destination Surfaces for
+the image data.<wbr/> For example,<wbr/> if the destination surface is from video encoder,<wbr/>
+the application need check if the video encoder is capable of supporting the
+high frame rate for a given video size,<wbr/> or it will end up with lower recording
+frame rate.<wbr/> If the destination surface is from preview window,<wbr/> the actual preview frame
+rate will be bounded by the screen refresh rate.<wbr/></p>
+<p>The camera device will only support up to 2 high speed simultaneous output surfaces
+(preview and recording surfaces)
+in this mode.<wbr/> Above controls will be effective only if all of below conditions are true:</p>
+<ul>
+<li>The application creates a camera capture session with no more than 2 surfaces via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>.<wbr/> The
+targeted surfaces must be preview surface (either from
+<a href="https://developer.android.com/reference/android/view/SurfaceView.html">SurfaceView</a> or <a href="https://developer.android.com/reference/android/graphics/SurfaceTexture.html">SurfaceTexture</a>) or
+recording surface(either from <a href="https://developer.android.com/reference/android/media/MediaRecorder.html#getSurface">MediaRecorder#getSurface</a> or
+<a href="https://developer.android.com/reference/android/media/MediaCodec.html#createInputSurface">MediaCodec#createInputSurface</a>).<wbr/></li>
+<li>The stream sizes are selected from the sizes reported by
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoSizes">StreamConfigurationMap#getHighSpeedVideoSizes</a>.<wbr/></li>
+<li>The FPS ranges are selected from
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/></li>
+</ul>
+<p>When above conditions are NOT satistied,<wbr/>
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
+will fail.<wbr/></p>
+<p>Switching to a FPS range that has different maximum FPS may trigger some camera device
+reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommended that
+the application avoids unnecessary maximum target FPS changes as much as possible
+during high speed streaming.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of capabilities that this camera device
+advertises as fully supporting.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A capability is a contract that the camera device makes in order
+to be able to satisfy one or more use cases.<wbr/></p>
+<p>Listing a capability guarantees that the whole set of features
+required to support a common use will all be available.<wbr/></p>
+<p>Using a subset of the functionality provided by an unsupported
+capability may be possible on a specific camera device implementation;
+to do this query each of <a href="#static_android.request.availableRequestKeys">android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys</a>,<wbr/>
+<a href="#static_android.request.availableResultKeys">android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys</a>,<wbr/>
+<a href="#static_android.request.availableCharacteristicsKeys">android.<wbr/>request.<wbr/>available<wbr/>Characteristics<wbr/>Keys</a>.<wbr/></p>
+<p>The following capabilities are guaranteed to be available on
+<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> <code>==</code> FULL devices:</p>
+<ul>
+<li>MANUAL_<wbr/>SENSOR</li>
+<li>MANUAL_<wbr/>POST_<wbr/>PROCESSING</li>
+</ul>
+<p>Other capabilities may be available on either FULL or LIMITED
+devices,<wbr/> but the application should query this key to be sure.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Additional constraint details per-capability will be available
+in the Compatibility Test Suite.<wbr/></p>
+<p>Minimum baseline requirements required for the
+BACKWARD_<wbr/>COMPATIBLE capability are not explicitly listed.<wbr/>
+Instead refer to "BC" tags and the camera CTS tests in the
+android.<wbr/>hardware.<wbr/>camera2.<wbr/>cts package.<wbr/></p>
+<p>Listed controls that can be either request or result (e.<wbr/>g.<wbr/>
+<a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>) must be available both in the
+request and the result in order to be considered to be
+capability-compliant.<wbr/></p>
+<p>For example,<wbr/> if the HAL claims to support MANUAL control,<wbr/>
+then exposure time must be configurable via the request <em>and</em>
+the actual exposure applied must be available via
+the result.<wbr/></p>
+<p>If MANUAL_<wbr/>SENSOR is omitted,<wbr/> the HAL may choose to omit the
+<a href="#static_android.scaler.availableMinFrameDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations</a> static property entirely.<wbr/></p>
+<p>For PRIVATE_<wbr/>REPROCESSING and YUV_<wbr/>REPROCESSING capabilities,<wbr/> see
+hardware/<wbr/>libhardware/<wbr/>include/<wbr/>hardware/<wbr/>camera3.<wbr/>h Section 10 for more information.<wbr/></p>
+<p>Devices that support the MANUAL_<wbr/>SENSOR capability must support the
+CAMERA3_<wbr/>TEMPLATE_<wbr/>MANUAL template defined in camera3.<wbr/>h.<wbr/></p>
+<p>Devices that support the PRIVATE_<wbr/>REPROCESSING capability or the
+YUV_<wbr/>REPROCESSING capability must support the
+CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LAG template defined in camera3.<wbr/>h.<wbr/></p>
+<p>For DEPTH_<wbr/>OUTPUT,<wbr/> the depth-format keys
+<a href="#static_android.depth.availableDepthStreamConfigurations">android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stream<wbr/>Configurations</a>,<wbr/>
+<a href="#static_android.depth.availableDepthMinFrameDurations">android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Min<wbr/>Frame<wbr/>Durations</a>,<wbr/>
+<a href="#static_android.depth.availableDepthStallDurations">android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stall<wbr/>Durations</a> must be available,<wbr/> in
+addition to the other keys explicitly mentioned in the DEPTH_<wbr/>OUTPUT
+enum notes.<wbr/> The entry <a href="#static_android.depth.maxDepthSamples">android.<wbr/>depth.<wbr/>max<wbr/>Depth<wbr/>Samples</a> must be available
+if the DEPTH_<wbr/>POINT_<wbr/>CLOUD format is supported (HAL pixel format BLOB,<wbr/> dataspace
+DEPTH).<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableRequestKeys">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of all keys that the camera device has available
+to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureRequest.html">CaptureRequest</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Attempting to set a key into a CaptureRequest that is not
+listed here will result in an invalid request and will be rejected
+by the camera device.<wbr/></p>
+<p>This field can be used to query the feature set of a camera device
+at a more granular level than capabilities.<wbr/> This is especially
+important for optional keys that are not listed under any capability
+in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
+extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not consume any request tags that are not listed either
+here or in the vendor tag list.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible
+via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureRequestKeys">CameraCharacteristics#getAvailableCaptureRequestKeys</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableResultKeys">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of all keys that the camera device has available
+to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Attempting to get a key from a CaptureResult that is not
+listed here will always return a <code>null</code> value.<wbr/> Getting a key from
+a CaptureResult that is listed here will generally never return a <code>null</code>
+value.<wbr/></p>
+<p>The following keys may return <code>null</code> unless they are enabled:</p>
+<ul>
+<li><a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> (non-null iff <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON)</li>
+</ul>
+<p>(Those sometimes-null keys will nevertheless be listed here
+if they are available.<wbr/>)</p>
+<p>This field can be used to query the feature set of a camera device
+at a more granular level than capabilities.<wbr/> This is especially
+important for optional keys that are not listed under any capability
+in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Tags listed here must always have an entry in the result metadata,<wbr/>
+even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
+matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
+<p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
+extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not produce any result tags that are not listed either
+here or in the vendor tag list.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible via <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.request.availableCharacteristicsKeys">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Characteristics<wbr/>Keys
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of all keys that the camera device has available
+to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry follows the same rules as
+<a href="#static_android.request.availableResultKeys">android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys</a> (except that it applies for
+CameraCharacteristics instead of CaptureResult).<wbr/> See above for more
+details.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Keys listed here must always have an entry in the static info metadata,<wbr/>
+even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
+matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
+<p>Vendor tags must not be listed here.<wbr/> Use the vendor tag metadata
+extensions C api instead (refer to camera3.<wbr/>h for more details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not have any tags in its static info that are not listed
+either here or in the vendor tag list.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible
+via <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getKeys">CameraCharacteristics#getKeys</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.request.frameCount">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>frame<wbr/>Count
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A frame counter set by the framework.<wbr/> This value monotonically
+increases with every new result (that is,<wbr/> each new result has a unique
+frameCount value).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              count of frames
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>&gt; 0</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Reset on release()</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.request.id">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>request.<wbr/>id
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>An application-specified ID for the current
+request.<wbr/> Must be maintained unchanged in output
+frame</p>
+            </td>
+
+            <td class="entry_units">
+              arbitrary integer assigned by application
+            </td>
+
+            <td class="entry_range">
+              <p>Any int</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.request.metadataMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>request.<wbr/>metadata<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">NONE</span>
+                    <span class="entry_type_enum_notes"><p>No metadata should be produced on output,<wbr/> except
+for application-bound buffer data.<wbr/> If no
+application-bound streams exist,<wbr/> no frame should be
+placed in the output frame queue.<wbr/> If such streams
+exist,<wbr/> a frame should be placed on the output queue
+with null metadata but with the necessary output buffer
+information.<wbr/> Timestamp information should still be
+included with any output stream buffers</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_notes"><p>All metadata should be produced.<wbr/> Statistics will
+only be produced if they are separately
+enabled</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>How much metadata to produce on
+output</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.request.outputStreams">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>output<wbr/>Streams
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Lists which camera output streams image data
+from this capture must be sent to</p>
+            </td>
+
+            <td class="entry_units">
+              List of camera stream IDs
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>List must only include streams that have been
+created</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_HAL2">HAL2</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If no output streams are listed,<wbr/> then the image
+data should simply be discarded.<wbr/> The image data must
+still be captured for metadata and statistics production,<wbr/>
+and the lens and flash must operate as requested.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.request.pipelineDepth">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>pipeline<wbr/>Depth
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Specifies the number of pipeline stages the frame went
+through from when it was exposed to when the final completed result
+was available to the framework.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&lt;= <a href="#static_android.request.pipelineMaxDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Depending on what settings are used in the request,<wbr/> and
+what streams are configured,<wbr/> the data may undergo less processing,<wbr/>
+and some pipeline stages skipped.<wbr/></p>
+<p>See <a href="#static_android.request.pipelineMaxDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth</a> for more details.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value must always represent the accurate count of how many
+pipeline stages were actually used.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_scaler" class="section">scaler</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.scaler.cropRegion">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>crop<wbr/>Region
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as rectangle]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired region of the sensor to read out for this capture.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates relative to
+          android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control can be used to implement digital zoom.<wbr/></p>
+<p>The crop region coordinate system is based off
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with <code>(0,<wbr/> 0)</code> being the
+top-left corner of the sensor active array.<wbr/></p>
+<p>Output streams use this rectangle to produce their output,<wbr/>
+cropping to a smaller region if necessary to maintain the
+stream's aspect ratio,<wbr/> then scaling the sensor input to
+match the output's configured resolution.<wbr/></p>
+<p>The crop region is applied after the RAW to other color
+space (e.<wbr/>g.<wbr/> YUV) conversion.<wbr/> Since raw streams
+(e.<wbr/>g.<wbr/> RAW16) don't have the conversion stage,<wbr/> they are not
+croppable.<wbr/> The crop region will be ignored by raw streams.<wbr/></p>
+<p>For non-raw streams,<wbr/> any additional per-stream cropping will
+be done to maximize the final pixel area of the stream.<wbr/></p>
+<p>For example,<wbr/> if the crop region is set to a 4:3 aspect
+ratio,<wbr/> then 4:3 streams will use the exact crop
+region.<wbr/> 16:9 streams will further crop vertically
+(letterbox).<wbr/></p>
+<p>Conversely,<wbr/> if the crop region is set to a 16:9,<wbr/> then 4:3
+outputs will crop horizontally (pillarbox),<wbr/> and 16:9
+streams will match exactly.<wbr/> These additional crops will
+be centered within the crop region.<wbr/></p>
+<p>The width and height of the crop region cannot
+be set to be smaller than
+<code>floor( activeArraySize.<wbr/>width /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code> and
+<code>floor( activeArraySize.<wbr/>height /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code>,<wbr/> respectively.<wbr/></p>
+<p>The camera device may adjust the crop region to account
+for rounding and other hardware requirements; the final
+crop region used will be included in the output capture
+result.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The output streams must maintain square pixels at all
+times,<wbr/> no matter what the relative aspect ratios of the
+crop region and the stream are.<wbr/>  Negative values for
+corner are allowed for raw output if full pixel array is
+larger than active pixel array.<wbr/> Width and height may be
+rounded to nearest larger supportable width,<wbr/> especially
+for raw output,<wbr/> where only a few fixed scales may be
+possible.<wbr/></p>
+<p>For a set of output streams configured,<wbr/> if the sensor output is cropped to a smaller
+size than active array size,<wbr/> the HAL need follow below cropping rules:</p>
+<ul>
+<li>
+<p>The HAL need handle the cropRegion as if the sensor crop size is the effective active
+array size.<wbr/>More specifically,<wbr/> the HAL must transform the request cropRegion from
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> to the sensor cropped pixel area size in this way:</p>
+<ol>
+<li>Translate the requested cropRegion w.<wbr/>r.<wbr/>t.,<wbr/> the left top corner of the sensor
+cropped pixel area by (tx,<wbr/> ty),<wbr/>
+where <code>tx = sensorCrop.<wbr/>top * (sensorCrop.<wbr/>height /<wbr/> activeArraySize.<wbr/>height)</code>
+and <code>tx = sensorCrop.<wbr/>left * (sensorCrop.<wbr/>width /<wbr/> activeArraySize.<wbr/>width)</code>.<wbr/> The
+(sensorCrop.<wbr/>top,<wbr/> sensorCrop.<wbr/>left) is the coordinate based off the
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></li>
+<li>Scale the width and height of requested cropRegion with scaling factor of
+sensor<wbr/>Crop.<wbr/>width/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>width and sensor<wbr/>Crop.<wbr/>height/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>height
+respectively.<wbr/>
+Once this new cropRegion is calculated,<wbr/> the HAL must use this region to crop the image
+with regard to the sensor crop size (effective active array size).<wbr/> The HAL still need
+follow the general cropping rule for this new cropRegion and effective active
+array size.<wbr/></li>
+</ol>
+</li>
+<li>
+<p>The HAL must report the cropRegion with regard to <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>
+The HAL need convert the new cropRegion generated above w.<wbr/>r.<wbr/>t.,<wbr/> full active array size.<wbr/>
+The reported cropRegion may be slightly different with the requested cropRegion since
+the HAL may adjust the crop region to account for rounding,<wbr/> conversion error,<wbr/> or other
+hardware limitations.<wbr/></p>
+</li>
+</ul>
+<p>HAL2.<wbr/>x uses only (x,<wbr/> y,<wbr/> width)</p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.scaler.availableFormats">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Formats
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden as imageFormat]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">RAW16</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_value">0x20</span>
+                    <span class="entry_type_enum_notes"><p>RAW16 is a standard,<wbr/> cross-platform format for raw image
+buffers with 16-bit pixels.<wbr/></p>
+<p>Buffers of this format are typically expected to have a
+Bayer Color Filter Array (CFA) layout,<wbr/> which is given in
+<a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>.<wbr/> Sensors with
+CFAs that are not representable by a format in
+<a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a> should not
+use this format.<wbr/></p>
+<p>Buffers of this format will also follow the constraints given for
+RAW_<wbr/>OPAQUE buffers,<wbr/> but with relaxed performance constraints.<wbr/></p>
+<p>This format is intended to give users access to the full contents
+of the buffers coming directly from the image sensor prior to any
+cropping or scaling operations,<wbr/> and all coordinate systems for
+metadata used for this format are relative to the size of the
+active region of the image sensor before any geometric distortion
+correction has been applied (i.<wbr/>e.<wbr/>
+<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>).<wbr/> Supported
+dimensions for this format are limited to the full dimensions of
+the sensor (e.<wbr/>g.<wbr/> either <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a> or
+<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> will be the
+only supported output size).<wbr/></p>
+<p>See <a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a> for
+the full set of performance guarantees.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">RAW_OPAQUE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_value">0x24</span>
+                    <span class="entry_type_enum_notes"><p>RAW_<wbr/>OPAQUE is a format for raw image buffers coming from an
+image sensor.<wbr/></p>
+<p>The actual structure of buffers of this format is
+platform-specific,<wbr/> but must follow several constraints:</p>
+<ol>
+<li>No image post-processing operations may have been applied to
+buffers of this type.<wbr/> These buffers contain raw image data coming
+directly from the image sensor.<wbr/></li>
+<li>If a buffer of this format is passed to the camera device for
+reprocessing,<wbr/> the resulting images will be identical to the images
+produced if the buffer had come directly from the sensor and was
+processed with the same settings.<wbr/></li>
+</ol>
+<p>The intended use for this format is to allow access to the native
+raw format buffers coming directly from the camera sensor without
+any additional conversions or decrease in framerate.<wbr/></p>
+<p>See <a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a> for the full set of
+performance guarantees.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">YV12</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_value">0x32315659</span>
+                    <span class="entry_type_enum_notes"><p>YCrCb 4:2:0 Planar</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">YCrCb_420_SP</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_value">0x11</span>
+                    <span class="entry_type_enum_notes"><p>NV21</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">IMPLEMENTATION_DEFINED</span>
+                    <span class="entry_type_enum_value">0x22</span>
+                    <span class="entry_type_enum_notes"><p>System internal format,<wbr/> not application-accessible</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">YCbCr_420_888</span>
+                    <span class="entry_type_enum_value">0x23</span>
+                    <span class="entry_type_enum_notes"><p>Flexible YUV420 Format</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BLOB</span>
+                    <span class="entry_type_enum_value">0x21</span>
+                    <span class="entry_type_enum_notes"><p>JPEG format</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The list of image formats that are supported by this
+camera device for output streams.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All camera devices will support JPEG and YUV_<wbr/>420_<wbr/>888 formats.<wbr/></p>
+<p>When set to YUV_<wbr/>420_<wbr/>888,<wbr/> application can access the YUV420 data directly.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>These format values are from HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>* in
+system/<wbr/>core/<wbr/>include/<wbr/>system/<wbr/>graphics.<wbr/>h.<wbr/></p>
+<p>When IMPLEMENTATION_<wbr/>DEFINED is used,<wbr/> the platform
+gralloc module will select a format based on the usage flags provided
+by the camera HAL device and the other endpoint of the stream.<wbr/> It is
+usually used by preview and recording streams,<wbr/> where the application doesn't
+need access the image data.<wbr/></p>
+<p>YCb<wbr/>Cr_<wbr/>420_<wbr/>888 format must be supported by the HAL.<wbr/> When an image stream
+needs CPU/<wbr/>application direct access,<wbr/> this format will be used.<wbr/></p>
+<p>The BLOB format must be supported by the HAL.<wbr/> This is used for the JPEG stream.<wbr/></p>
+<p>A RAW_<wbr/>OPAQUE buffer should contain only pixel data.<wbr/> It is strongly
+recommended that any information used by the camera device when
+processing images is fully expressed by the result metadata
+for that image buffer.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableJpegMinDurations">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Min<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The minimum frame duration that is supported
+for each resolution in <a href="#static_android.scaler.availableJpegSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>TODO: Remove property.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This corresponds to the minimum steady-state frame duration when only
+that JPEG stream is active and captured in a burst,<wbr/> with all
+processing (typically in android.<wbr/>*.<wbr/>mode) set to FAST.<wbr/></p>
+<p>When multiple streams are configured,<wbr/> the minimum
+frame duration will be &gt;= max(individual stream min
+durations)</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableJpegSizes">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden as size]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The JPEG resolutions that are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>TODO: Remove property.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The resolutions are listed as <code>(width,<wbr/> height)</code> pairs.<wbr/> All camera devices will support
+sensor maximum resolution (defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>).<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must include sensor maximum resolution
+(defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>),<wbr/>
+and should include half/<wbr/>quarter of sensor maximum resolution.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableMaxDigitalZoom">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum ratio between both active area width
+and crop region width,<wbr/> and active area height and
+crop region height,<wbr/> for <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Zoom scale factor
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;=1</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This represents the maximum amount of zooming possible by
+the camera device,<wbr/> or equivalently,<wbr/> the minimum cropping
+window size.<wbr/></p>
+<p>Crop regions that have a width or height that is smaller
+than this ratio allows will be rounded up to the minimum
+allowed size by the camera device.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableProcessedMinDurations">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Min<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>For each available processed output size (defined in
+<a href="#static_android.scaler.availableProcessedSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Sizes</a>),<wbr/> this property lists the
+minimum supportable frame duration for that size.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This should correspond to the frame duration when only that processed
+stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
+set to FAST.<wbr/></p>
+<p>When multiple streams are configured,<wbr/> the minimum frame duration will
+be &gt;= max(individual stream min durations).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableProcessedSizes">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Processed<wbr/>Sizes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden as size]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The resolutions available for use with
+processed output streams,<wbr/> such as YV12,<wbr/> NV12,<wbr/> and
+platform opaque YUV/<wbr/>RGB streams to the GPU or video
+encoders.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The resolutions are listed as <code>(width,<wbr/> height)</code> pairs.<wbr/></p>
+<p>For a given use case,<wbr/> the actual maximum supported resolution
+may be lower than what is listed here,<wbr/> depending on the destination
+Surface for the image data.<wbr/> For example,<wbr/> for recording video,<wbr/>
+the video encoder chosen may have a maximum size limit (e.<wbr/>g.<wbr/> 1080p)
+smaller than what the camera (e.<wbr/>g.<wbr/> maximum resolution is 3264x2448)
+can provide.<wbr/></p>
+<p>Please reference the documentation for the image data destination to
+check if it limits the maximum size for image data.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
+the HAL must include all JPEG sizes listed in <a href="#static_android.scaler.availableJpegSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes</a>
+and each below resolution if it is smaller than or equal to the sensor
+maximum resolution (if they are not listed in JPEG sizes already):</p>
+<ul>
+<li>240p (320 x 240)</li>
+<li>480p (640 x 480)</li>
+<li>720p (1280 x 720)</li>
+<li>1080p (1920 x 1080)</li>
+</ul>
+<p>For LIMITED capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
+the HAL only has to list up to the maximum video size supported by the devices.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableRawMinDurations">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Min<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>For each available raw output size (defined in
+<a href="#static_android.scaler.availableRawSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Sizes</a>),<wbr/> this property lists the minimum
+supportable frame duration for that size.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Should correspond to the frame duration when only the raw stream is
+active.<wbr/></p>
+<p>When multiple streams are configured,<wbr/> the minimum
+frame duration will be &gt;= max(individual stream min
+durations)</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableRawSizes">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="1">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Raw<wbr/>Sizes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [system as size]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The resolutions available for use with raw
+sensor output streams,<wbr/> listed as width,<wbr/>
+height</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableInputOutputFormatsMap">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [hidden as reprocessFormatsMap]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The mapping of image formats that are supported by this
+camera device for input streams,<wbr/> to their corresponding output formats.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All camera devices with at least 1
+<a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> will have at least one
+available input format.<wbr/></p>
+<p>The camera device will support the following map of formats,<wbr/>
+if its dependent capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>) is supported:</p>
+<table>
+<thead>
+<tr>
+<th align="left">Input Format</th>
+<th align="left">Output Format</th>
+<th align="left">Capability</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a></td>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
+<td align="left">PRIVATE_<wbr/>REPROCESSING</td>
+</tr>
+<tr>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a></td>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
+<td align="left">PRIVATE_<wbr/>REPROCESSING</td>
+</tr>
+<tr>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
+<td align="left">YUV_<wbr/>REPROCESSING</td>
+</tr>
+<tr>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
+<td align="left"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
+<td align="left">YUV_<wbr/>REPROCESSING</td>
+</tr>
+</tbody>
+</table>
+<p>PRIVATE refers to a device-internal format that is not directly application-visible.<wbr/>  A
+PRIVATE input surface can be acquired by <a href="https://developer.android.com/reference/android/media/ImageReader.html#newInstance">ImageReader#newInstance</a>
+with <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> as the format.<wbr/></p>
+<p>For a PRIVATE_<wbr/>REPROCESSING-capable camera device,<wbr/> using the PRIVATE format as either input
+or output will never hurt maximum frame rate (i.<wbr/>e.<wbr/>  <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">getOutputStallDuration(ImageFormat.<wbr/>PRIVATE,<wbr/> size)</a> is always 0),<wbr/></p>
+<p>Attempting to configure an input stream with output streams not
+listed as available in this map is not valid.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For the formats,<wbr/> see <code>system/<wbr/>core/<wbr/>include/<wbr/>system/<wbr/>graphics.<wbr/>h</code> for a definition
+of the image format enumerations.<wbr/> The PRIVATE format refers to the
+HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>IMPLEMENTATION_<wbr/>DEFINED format.<wbr/> The HAL could determine
+the actual format by using the gralloc usage flags.<wbr/>
+For ZSL use case in particular,<wbr/> the HAL could choose appropriate format (partially
+processed YUV or RAW based format) by checking the format and GRALLOC_<wbr/>USAGE_<wbr/>HW_<wbr/>CAMERA_<wbr/>ZSL.<wbr/>
+See camera3.<wbr/>h for more details.<wbr/></p>
+<p>This value is encoded as a variable-size array-of-arrays.<wbr/>
+The inner array always contains <code>[format,<wbr/> length,<wbr/> ...<wbr/>]</code> where
+<code>...<wbr/></code> has <code>length</code> elements.<wbr/> An inner array is followed by another
+inner array if the total metadata entry size hasn't yet been exceeded.<wbr/></p>
+<p>A code sample to read/<wbr/>write this encoding (with a device that
+supports reprocessing IMPLEMENTATION_<wbr/>DEFINED to YUV_<wbr/>420_<wbr/>888,<wbr/> and JPEG,<wbr/>
+and reprocessing YUV_<wbr/>420_<wbr/>888 to YUV_<wbr/>420_<wbr/>888 and JPEG):</p>
+<pre><code>//<wbr/> reading
+int32_<wbr/>t* contents = &amp;entry.<wbr/>i32[0];
+for (size_<wbr/>t i = 0; i &lt; entry.<wbr/>count; ) {
+    int32_<wbr/>t format = contents[i++];
+    int32_<wbr/>t length = contents[i++];
+    int32_<wbr/>t output_<wbr/>formats[length];
+    memcpy(&amp;output_<wbr/>formats[0],<wbr/> &amp;contents[i],<wbr/>
+           length * sizeof(int32_<wbr/>t));
+    i += length;
+}
+
+//<wbr/> writing (static example,<wbr/> PRIVATE_<wbr/>REPROCESSING + YUV_<wbr/>REPROCESSING)
+int32_<wbr/>t[] contents = {
+  IMPLEMENTATION_<wbr/>DEFINED,<wbr/> 2,<wbr/> YUV_<wbr/>420_<wbr/>888,<wbr/> BLOB,<wbr/>
+  YUV_<wbr/>420_<wbr/>888,<wbr/> 2,<wbr/> YUV_<wbr/>420_<wbr/>888,<wbr/> BLOB,<wbr/>
+};
+update_<wbr/>camera_<wbr/>metadata_<wbr/>entry(metadata,<wbr/> index,<wbr/> &amp;contents[0],<wbr/>
+      sizeof(contents)/<wbr/>sizeof(contents[0]),<wbr/> &amp;updated_<wbr/>entry);
+</code></pre>
+<p>If the HAL claims to support any of the capabilities listed in the
+above details,<wbr/> then it must also support all the input-output
+combinations listed for that capability.<wbr/> It can optionally support
+additional formats if it so chooses.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableStreamConfigurations">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 4
+                </span>
+              <span class="entry_type_visibility"> [hidden as streamConfiguration]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OUTPUT</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">INPUT</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The available stream configurations that this
+camera device supports
+(i.<wbr/>e.<wbr/> format,<wbr/> width,<wbr/> height,<wbr/> output/<wbr/>input stream).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The configurations are listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code>
+tuples.<wbr/></p>
+<p>For a given use case,<wbr/> the actual maximum supported resolution
+may be lower than what is listed here,<wbr/> depending on the destination
+Surface for the image data.<wbr/> For example,<wbr/> for recording video,<wbr/>
+the video encoder chosen may have a maximum size limit (e.<wbr/>g.<wbr/> 1080p)
+smaller than what the camera (e.<wbr/>g.<wbr/> maximum resolution is 3264x2448)
+can provide.<wbr/></p>
+<p>Please reference the documentation for the image data destination to
+check if it limits the maximum size for image data.<wbr/></p>
+<p>Not all output formats may be supported in a configuration with
+an input stream of a particular format.<wbr/> For more details,<wbr/> see
+<a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a>.<wbr/></p>
+<p>The following table describes the minimum required output stream
+configurations based on the hardware level
+(<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>):</p>
+<table>
+<thead>
+<tr>
+<th align="center">Format</th>
+<th align="center">Size</th>
+<th align="center">Hardware Level</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">JPEG</td>
+<td align="center"><a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></td>
+<td align="center">Any</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">1920x1080 (1080p)</td>
+<td align="center">Any</td>
+<td align="center">if 1080p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">1280x720 (720)</td>
+<td align="center">Any</td>
+<td align="center">if 720p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">640x480 (480p)</td>
+<td align="center">Any</td>
+<td align="center">if 480p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">JPEG</td>
+<td align="center">320x240 (240p)</td>
+<td align="center">Any</td>
+<td align="center">if 240p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center">YUV_<wbr/>420_<wbr/>888</td>
+<td align="center">all output sizes available for JPEG</td>
+<td align="center">FULL</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center">YUV_<wbr/>420_<wbr/>888</td>
+<td align="center">all output sizes available for JPEG,<wbr/> up to the maximum video size</td>
+<td align="center">LIMITED</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center">IMPLEMENTATION_<wbr/>DEFINED</td>
+<td align="center">same as YUV_<wbr/>420_<wbr/>888</td>
+<td align="center">Any</td>
+<td align="center"></td>
+</tr>
+</tbody>
+</table>
+<p>Refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> for additional
+mandatory stream configurations on a per-capability basis.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>It is recommended (but not mandatory) to also include half/<wbr/>quarter
+of sensor maximum resolution for JPEG formats (regardless of hardware
+level).<wbr/></p>
+<p>(The following is a rewording of the above required table):</p>
+<p>For JPEG format,<wbr/> the sizes may be restricted by below conditions:</p>
+<ul>
+<li>The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
+(e.<wbr/>g.<wbr/> 4:3,<wbr/> 16:9,<wbr/> 3:2 etc.<wbr/>).<wbr/> If the sensor maximum resolution
+(defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) has an aspect ratio other than these,<wbr/>
+it does not have to be included in the supported JPEG sizes.<wbr/></li>
+<li>Some hardware JPEG encoders may have pixel boundary alignment requirements,<wbr/> such as
+the dimensions being a multiple of 16.<wbr/></li>
+</ul>
+<p>Therefore,<wbr/> the maximum JPEG size may be smaller than sensor maximum resolution.<wbr/>
+However,<wbr/> the largest JPEG size must be as close as possible to the sensor maximum
+resolution given above constraints.<wbr/> It is required that after aspect ratio adjustments,<wbr/>
+additional size reduction due to other issues must be less than 3% in area.<wbr/> For example,<wbr/>
+if the sensor maximum resolution is 3280x2464,<wbr/> if the maximum JPEG size has aspect
+ratio 4:3,<wbr/> the JPEG encoder alignment requirement is 16,<wbr/> the maximum JPEG size will be
+3264x2448.<wbr/></p>
+<p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
+the HAL must include all YUV_<wbr/>420_<wbr/>888 sizes that have JPEG sizes listed
+here as output streams.<wbr/></p>
+<p>It must also include each below resolution if it is smaller than or
+equal to the sensor maximum resolution (for both YUV_<wbr/>420_<wbr/>888 and JPEG
+formats),<wbr/> as output streams:</p>
+<ul>
+<li>240p (320 x 240)</li>
+<li>480p (640 x 480)</li>
+<li>720p (1280 x 720)</li>
+<li>1080p (1920 x 1080)</li>
+</ul>
+<p>For LIMITED capability devices
+(<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
+the HAL only has to list up to the maximum video size
+supported by the device.<wbr/></p>
+<p>Regardless of hardware level,<wbr/> every output resolution available for
+YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.<wbr/></p>
+<p>This supercedes the following fields,<wbr/> which are now deprecated:</p>
+<ul>
+<li>availableFormats</li>
+<li>available[Processed,<wbr/>Raw,<wbr/>Jpeg]Sizes</li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableMinFrameDurations">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Min<wbr/>Frame<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [hidden as streamConfigurationDuration]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the minimum frame duration for each
+format/<wbr/>size combination.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This should correspond to the frame duration when only that
+stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
+set to either OFF or FAST.<wbr/></p>
+<p>When multiple streams are used in a request,<wbr/> the minimum frame
+duration will be max(individual stream min durations).<wbr/></p>
+<p>The minimum frame duration of a stream (of a particular format,<wbr/> size)
+is the same regardless of whether the stream is input or output.<wbr/></p>
+<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
+calculating the max frame rate.<wbr/></p>
+<p>(Keep in sync with
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>)</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.availableStallDurations">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [hidden as streamConfigurationDuration]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the maximum stall duration for each
+output format/<wbr/>size combination.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A stall duration is how much extra time would get added
+to the normal minimum frame duration for a repeating request
+that has streams with non-zero stall.<wbr/></p>
+<p>For example,<wbr/> consider JPEG captures which have the following
+characteristics:</p>
+<ul>
+<li>JPEG streams act like processed YUV streams in requests for which
+they are not included; in requests in which they are directly
+referenced,<wbr/> they act as JPEG streams.<wbr/> This is because supporting a
+JPEG stream requires the underlying YUV data to always be ready for
+use by a JPEG encoder,<wbr/> but the encoder will only be used (and impact
+frame duration) on requests that actually reference a JPEG stream.<wbr/></li>
+<li>The JPEG processor can run concurrently to the rest of the camera
+pipeline,<wbr/> but cannot process more than 1 capture at a time.<wbr/></li>
+</ul>
+<p>In other words,<wbr/> using a repeating YUV request would result
+in a steady frame rate (let's say it's 30 FPS).<wbr/> If a single
+JPEG request is submitted periodically,<wbr/> the frame rate will stay
+at 30 FPS (as long as we wait for the previous JPEG to return each
+time).<wbr/> If we try to submit a repeating YUV + JPEG request,<wbr/> then
+the frame rate will drop from 30 FPS.<wbr/></p>
+<p>In general,<wbr/> submitting a new request with a non-0 stall time
+stream will <em>not</em> cause a frame rate drop unless there are still
+outstanding buffers for that stream from previous requests.<wbr/></p>
+<p>Submitting a repeating request with streams (call this <code>S</code>)
+is the same as setting the minimum frame duration from
+the normal minimum frame duration corresponding to <code>S</code>,<wbr/> added with
+the maximum stall duration for <code>S</code>.<wbr/></p>
+<p>If interleaving requests with and without a stall duration,<wbr/>
+a request will stall by the maximum of the remaining times
+for each can-stall stream with outstanding buffers.<wbr/></p>
+<p>This means that a stalling request will not have an exposure start
+until the stall has completed.<wbr/></p>
+<p>This should correspond to the stall duration when only that stream is
+active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode) set to FAST
+or OFF.<wbr/> Setting any of the processing modes to HIGH_<wbr/>QUALITY
+effectively results in an indeterminate stall duration for all
+streams in a request (the regular stall calculation rules are
+ignored).<wbr/></p>
+<p>The following formats may always have a stall duration:</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">ImageFormat#RAW_<wbr/>SENSOR</a></li>
+</ul>
+<p>The following formats will never have a stall duration:</p>
+<ul>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">ImageFormat#RAW10</a></li>
+</ul>
+<p>All other formats may or may not have an allowed stall duration on
+a per-capability basis; refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
+for more details.<wbr/></p>
+<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> for more information about
+calculating the max frame rate (absent stalls).<wbr/></p>
+<p>(Keep up to date with
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> )</p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If possible,<wbr/> it is recommended that all non-JPEG formats
+(such as RAW16) should not have a stall duration.<wbr/> RAW10,<wbr/> RAW12,<wbr/> RAW_<wbr/>OPAQUE
+and IMPLEMENTATION_<wbr/>DEFINED must not have stall durations.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.streamConfigurationMap">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public as streamConfigurationMap]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The available stream configurations that this
+camera device supports; also includes the minimum frame durations
+and the stall durations for each format/<wbr/>size combination.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All camera devices will support sensor maximum resolution (defined by
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) for the JPEG format.<wbr/></p>
+<p>For a given use case,<wbr/> the actual maximum supported resolution
+may be lower than what is listed here,<wbr/> depending on the destination
+Surface for the image data.<wbr/> For example,<wbr/> for recording video,<wbr/>
+the video encoder chosen may have a maximum size limit (e.<wbr/>g.<wbr/> 1080p)
+smaller than what the camera (e.<wbr/>g.<wbr/> maximum resolution is 3264x2448)
+can provide.<wbr/></p>
+<p>Please reference the documentation for the image data destination to
+check if it limits the maximum size for image data.<wbr/></p>
+<p>The following table describes the minimum required output stream
+configurations based on the hardware level
+(<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>):</p>
+<table>
+<thead>
+<tr>
+<th align="center">Format</th>
+<th align="center">Size</th>
+<th align="center">Hardware Level</th>
+<th align="center">Notes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
+<td align="center"><a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> (*1)</td>
+<td align="center">Any</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
+<td align="center">1920x1080 (1080p)</td>
+<td align="center">Any</td>
+<td align="center">if 1080p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
+<td align="center">1280x720 (720p)</td>
+<td align="center">Any</td>
+<td align="center">if 720p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
+<td align="center">640x480 (480p)</td>
+<td align="center">Any</td>
+<td align="center">if 480p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a></td>
+<td align="center">320x240 (240p)</td>
+<td align="center">Any</td>
+<td align="center">if 240p &lt;= activeArraySize</td>
+</tr>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
+<td align="center">all output sizes available for JPEG</td>
+<td align="center">FULL</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></td>
+<td align="center">all output sizes available for JPEG,<wbr/> up to the maximum video size</td>
+<td align="center">LIMITED</td>
+<td align="center"></td>
+</tr>
+<tr>
+<td align="center"><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a></td>
+<td align="center">same as YUV_<wbr/>420_<wbr/>888</td>
+<td align="center">Any</td>
+<td align="center"></td>
+</tr>
+</tbody>
+</table>
+<p>Refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for additional mandatory
+stream configurations on a per-capability basis.<wbr/></p>
+<p>*1: For JPEG format,<wbr/> the sizes may be restricted by below conditions:</p>
+<ul>
+<li>The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
+(e.<wbr/>g.<wbr/> 4:3,<wbr/> 16:9,<wbr/> 3:2 etc.<wbr/>).<wbr/> If the sensor maximum resolution
+(defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) has an aspect ratio other than these,<wbr/>
+it does not have to be included in the supported JPEG sizes.<wbr/></li>
+<li>Some hardware JPEG encoders may have pixel boundary alignment requirements,<wbr/> such as
+the dimensions being a multiple of 16.<wbr/>
+Therefore,<wbr/> the maximum JPEG size may be smaller than sensor maximum resolution.<wbr/>
+However,<wbr/> the largest JPEG size will be as close as possible to the sensor maximum
+resolution given above constraints.<wbr/> It is required that after aspect ratio adjustments,<wbr/>
+additional size reduction due to other issues must be less than 3% in area.<wbr/> For example,<wbr/>
+if the sensor maximum resolution is 3280x2464,<wbr/> if the maximum JPEG size has aspect
+ratio 4:3,<wbr/> and the JPEG encoder alignment requirement is 16,<wbr/> the maximum JPEG size will be
+3264x2448.<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Do not set this property directly
+(it is synthetic and will not be available at the HAL layer);
+set the <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> instead.<wbr/></p>
+<p>Not all output formats may be supported in a configuration with
+an input stream of a particular format.<wbr/> For more details,<wbr/> see
+<a href="#static_android.scaler.availableInputOutputFormatsMap">android.<wbr/>scaler.<wbr/>available<wbr/>Input<wbr/>Output<wbr/>Formats<wbr/>Map</a>.<wbr/></p>
+<p>It is recommended (but not mandatory) to also include half/<wbr/>quarter
+of sensor maximum resolution for JPEG formats (regardless of hardware
+level).<wbr/></p>
+<p>(The following is a rewording of the above required table):</p>
+<p>The HAL must include sensor maximum resolution (defined by
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>).<wbr/></p>
+<p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
+the HAL must include all YUV_<wbr/>420_<wbr/>888 sizes that have JPEG sizes listed
+here as output streams.<wbr/></p>
+<p>It must also include each below resolution if it is smaller than or
+equal to the sensor maximum resolution (for both YUV_<wbr/>420_<wbr/>888 and JPEG
+formats),<wbr/> as output streams:</p>
+<ul>
+<li>240p (320 x 240)</li>
+<li>480p (640 x 480)</li>
+<li>720p (1280 x 720)</li>
+<li>1080p (1920 x 1080)</li>
+</ul>
+<p>For LIMITED capability devices
+(<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>),<wbr/>
+the HAL only has to list up to the maximum video size
+supported by the device.<wbr/></p>
+<p>Regardless of hardware level,<wbr/> every output resolution available for
+YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.<wbr/></p>
+<p>This supercedes the following fields,<wbr/> which are now deprecated:</p>
+<ul>
+<li>availableFormats</li>
+<li>available[Processed,<wbr/>Raw,<wbr/>Jpeg]Sizes</li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.scaler.croppingType">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>scaler.<wbr/>cropping<wbr/>Type
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">CENTER_ONLY</span>
+                    <span class="entry_type_enum_notes"><p>The camera device only supports centered crop regions.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FREEFORM</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports arbitrarily chosen crop regions.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The crop type that this camera device supports.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When passing a non-centered crop region (<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>) to a camera
+device that only supports CENTER_<wbr/>ONLY cropping,<wbr/> the camera device will move the
+crop region to the center of the sensor active array (<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>)
+and keep the crop region width and height unchanged.<wbr/> The camera device will return the
+final used crop region in metadata result <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>.<wbr/></p>
+<p>Camera devices that support FREEFORM cropping will support any crop region that
+is inside of the active array.<wbr/> The camera device will apply the same crop region and
+return the final used crop region in capture result metadata <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>.<wbr/></p>
+<p>LEGACY capability devices will only support CENTER_<wbr/>ONLY cropping.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.scaler.cropRegion">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>scaler.<wbr/>crop<wbr/>Region
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as rectangle]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The desired region of the sensor to read out for this capture.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates relative to
+          android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This control can be used to implement digital zoom.<wbr/></p>
+<p>The crop region coordinate system is based off
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with <code>(0,<wbr/> 0)</code> being the
+top-left corner of the sensor active array.<wbr/></p>
+<p>Output streams use this rectangle to produce their output,<wbr/>
+cropping to a smaller region if necessary to maintain the
+stream's aspect ratio,<wbr/> then scaling the sensor input to
+match the output's configured resolution.<wbr/></p>
+<p>The crop region is applied after the RAW to other color
+space (e.<wbr/>g.<wbr/> YUV) conversion.<wbr/> Since raw streams
+(e.<wbr/>g.<wbr/> RAW16) don't have the conversion stage,<wbr/> they are not
+croppable.<wbr/> The crop region will be ignored by raw streams.<wbr/></p>
+<p>For non-raw streams,<wbr/> any additional per-stream cropping will
+be done to maximize the final pixel area of the stream.<wbr/></p>
+<p>For example,<wbr/> if the crop region is set to a 4:3 aspect
+ratio,<wbr/> then 4:3 streams will use the exact crop
+region.<wbr/> 16:9 streams will further crop vertically
+(letterbox).<wbr/></p>
+<p>Conversely,<wbr/> if the crop region is set to a 16:9,<wbr/> then 4:3
+outputs will crop horizontally (pillarbox),<wbr/> and 16:9
+streams will match exactly.<wbr/> These additional crops will
+be centered within the crop region.<wbr/></p>
+<p>The width and height of the crop region cannot
+be set to be smaller than
+<code>floor( activeArraySize.<wbr/>width /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code> and
+<code>floor( activeArraySize.<wbr/>height /<wbr/> <a href="#static_android.scaler.availableMaxDigitalZoom">android.<wbr/>scaler.<wbr/>available<wbr/>Max<wbr/>Digital<wbr/>Zoom</a> )</code>,<wbr/> respectively.<wbr/></p>
+<p>The camera device may adjust the crop region to account
+for rounding and other hardware requirements; the final
+crop region used will be included in the output capture
+result.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The output streams must maintain square pixels at all
+times,<wbr/> no matter what the relative aspect ratios of the
+crop region and the stream are.<wbr/>  Negative values for
+corner are allowed for raw output if full pixel array is
+larger than active pixel array.<wbr/> Width and height may be
+rounded to nearest larger supportable width,<wbr/> especially
+for raw output,<wbr/> where only a few fixed scales may be
+possible.<wbr/></p>
+<p>For a set of output streams configured,<wbr/> if the sensor output is cropped to a smaller
+size than active array size,<wbr/> the HAL need follow below cropping rules:</p>
+<ul>
+<li>
+<p>The HAL need handle the cropRegion as if the sensor crop size is the effective active
+array size.<wbr/>More specifically,<wbr/> the HAL must transform the request cropRegion from
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> to the sensor cropped pixel area size in this way:</p>
+<ol>
+<li>Translate the requested cropRegion w.<wbr/>r.<wbr/>t.,<wbr/> the left top corner of the sensor
+cropped pixel area by (tx,<wbr/> ty),<wbr/>
+where <code>tx = sensorCrop.<wbr/>top * (sensorCrop.<wbr/>height /<wbr/> activeArraySize.<wbr/>height)</code>
+and <code>tx = sensorCrop.<wbr/>left * (sensorCrop.<wbr/>width /<wbr/> activeArraySize.<wbr/>width)</code>.<wbr/> The
+(sensorCrop.<wbr/>top,<wbr/> sensorCrop.<wbr/>left) is the coordinate based off the
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></li>
+<li>Scale the width and height of requested cropRegion with scaling factor of
+sensor<wbr/>Crop.<wbr/>width/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>width and sensor<wbr/>Crop.<wbr/>height/<wbr/>active<wbr/>Array<wbr/>Size.<wbr/>height
+respectively.<wbr/>
+Once this new cropRegion is calculated,<wbr/> the HAL must use this region to crop the image
+with regard to the sensor crop size (effective active array size).<wbr/> The HAL still need
+follow the general cropping rule for this new cropRegion and effective active
+array size.<wbr/></li>
+</ol>
+</li>
+<li>
+<p>The HAL must report the cropRegion with regard to <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/>
+The HAL need convert the new cropRegion generated above w.<wbr/>r.<wbr/>t.,<wbr/> full active array size.<wbr/>
+The reported cropRegion may be slightly different with the requested cropRegion since
+the HAL may adjust the crop region to account for rounding,<wbr/> conversion error,<wbr/> or other
+hardware limitations.<wbr/></p>
+</li>
+</ul>
+<p>HAL2.<wbr/>x uses only (x,<wbr/> y,<wbr/> width)</p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_sensor" class="section">sensor</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.sensor.exposureTime">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>exposure<wbr/>Time
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Duration each pixel is exposed to
+light.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If the sensor can't expose this exact duration,<wbr/> it will shorten the
+duration exposed to the nearest possible value (rather than expose longer).<wbr/>
+The final exposure time used will be available in the output capture result.<wbr/></p>
+<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
+OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.sensor.frameDuration">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>frame<wbr/>Duration
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Duration from start of frame exposure to
+start of next frame exposure.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
+<a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/> The duration
+is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The maximum frame rate that can be supported by a camera subsystem is
+a function of many factors:</p>
+<ul>
+<li>Requested resolutions of output image streams</li>
+<li>Availability of binning /<wbr/> skipping modes on the imager</li>
+<li>The bandwidth of the imager interface</li>
+<li>The bandwidth of the various ISP processing blocks</li>
+</ul>
+<p>Since these factors can vary greatly between different ISPs and
+sensors,<wbr/> the camera abstraction tries to represent the bandwidth
+restrictions with as simple a model as possible.<wbr/></p>
+<p>The model presented has the following characteristics:</p>
+<ul>
+<li>The image sensor is always configured to output the smallest
+resolution possible given the application's requested output stream
+sizes.<wbr/>  The smallest resolution is defined as being at least as large
+as the largest requested output stream size; the camera pipeline must
+never digitally upsample sensor data when the crop region covers the
+whole sensor.<wbr/> In general,<wbr/> this means that if only small output stream
+resolutions are configured,<wbr/> the sensor can provide a higher frame
+rate.<wbr/></li>
+<li>Since any request may use any or all the currently configured
+output streams,<wbr/> the sensor and ISP must be configured to support
+scaling a single capture to all the streams at the same time.<wbr/>  This
+means the camera pipeline must be ready to produce the largest
+requested output size without any delay.<wbr/>  Therefore,<wbr/> the overall
+frame rate of a given configured stream set is governed only by the
+largest requested stream resolution.<wbr/></li>
+<li>Using more than one output stream in a request does not affect the
+frame duration.<wbr/></li>
+<li>Certain format-streams may need to do additional background processing
+before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
+can run concurrently to the rest of the camera pipeline,<wbr/> but
+cannot process more than 1 capture at a time.<wbr/></li>
+</ul>
+<p>The necessary information for the application,<wbr/> given the model above,<wbr/>
+is provided via the <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> field using
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/>
+These are used to determine the maximum frame rate /<wbr/> minimum frame
+duration that is possible for a given stream configuration.<wbr/></p>
+<p>Specifically,<wbr/> the application can use the following rules to
+determine the minimum frame duration it can request from the camera
+device:</p>
+<ol>
+<li>Let the set of currently configured input/<wbr/>output streams
+be called <code>S</code>.<wbr/></li>
+<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking
+it up in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> using <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
+(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be
+called <code>F</code>.<wbr/></li>
+<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
+for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
+used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
+</ol>
+<p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>
+using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code>
+determines the steady state frame rate that the application will get
+if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind of
+request be called <code>Rsimple</code>.<wbr/></p>
+<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+by a single capture of a new request <code>Rstall</code> (which has at least
+one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+same minimum frame duration this will not cause a frame rate loss
+if all buffers from the previous <code>Rstall</code> have already been
+delivered.<wbr/></p>
+<p>For more details about stalling,<wbr/> see
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
+<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
+OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For more details about stalling,<wbr/> see
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.sensor.sensitivity">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>sensitivity
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The amount of gain applied to sensor data
+before processing.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              ISO arithmetic units
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The sensitivity is the standard ISO sensitivity value,<wbr/>
+as defined in ISO 12232:2006.<wbr/></p>
+<p>The sensitivity must be within <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a>,<wbr/> and
+if if it less than <a href="#static_android.sensor.maxAnalogSensitivity">android.<wbr/>sensor.<wbr/>max<wbr/>Analog<wbr/>Sensitivity</a>,<wbr/> the camera device
+is guaranteed to use only analog amplification for applying the gain.<wbr/></p>
+<p>If the camera device cannot apply the exact sensitivity
+requested,<wbr/> it will reduce the gain to the nearest supported
+value.<wbr/> The final sensitivity used will be available in the
+output capture result.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>ISO 12232:2006 REI method is acceptable.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.sensor.testPatternData">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A pixel <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> that supplies the test pattern
+when <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a> is SOLID_<wbr/>COLOR.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Each color channel is treated as an unsigned 32-bit integer.<wbr/>
+The camera device then uses the most significant X bits
+that correspond to how many bits are in its Bayer raw sensor
+output.<wbr/></p>
+<p>For example,<wbr/> a sensor with RAW10 Bayer output would use the
+10 most significant bits from each color channel.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.sensor.testPatternMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No test pattern mode is used,<wbr/> and the camera
+device returns captures from the image sensor.<wbr/></p>
+<p>This is the default if the key is not set.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SOLID_COLOR</span>
+                    <span class="entry_type_enum_notes"><p>Each pixel in <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> is replaced by its
+respective color channel provided in
+<a href="#controls_android.sensor.testPatternData">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data</a>.<wbr/></p>
+<p>For example:</p>
+<pre><code>android.<wbr/>testPatternData = [0,<wbr/> 0xFFFFFFFF,<wbr/> 0xFFFFFFFF,<wbr/> 0]
+</code></pre>
+<p>All green pixels are 100% green.<wbr/> All red/<wbr/>blue pixels are black.<wbr/></p>
+<pre><code>android.<wbr/>testPatternData = [0xFFFFFFFF,<wbr/> 0,<wbr/> 0xFFFFFFFF,<wbr/> 0]
+</code></pre>
+<p>All red pixels are 100% red.<wbr/> Only the odd green pixels
+are 100% green.<wbr/> All blue pixels are 100% black.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">COLOR_BARS</span>
+                    <span class="entry_type_enum_notes"><p>All pixel data is replaced with an 8-bar color pattern.<wbr/></p>
+<p>The vertical bars (left-to-right) are as follows:</p>
+<ul>
+<li>100% white</li>
+<li>yellow</li>
+<li>cyan</li>
+<li>green</li>
+<li>magenta</li>
+<li>red</li>
+<li>blue</li>
+<li>black</li>
+</ul>
+<p>In general the image would look like the following:</p>
+<pre><code>W Y C G M R B K
+W Y C G M R B K
+W Y C G M R B K
+W Y C G M R B K
+W Y C G M R B K
+.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
+.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
+.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
+
+(B = Blue,<wbr/> K = Black)
+</code></pre>
+<p>Each bar should take up 1/<wbr/>8 of the sensor pixel array width.<wbr/>
+When this is not possible,<wbr/> the bar size should be rounded
+down to the nearest integer and the pattern can repeat
+on the right side.<wbr/></p>
+<p>Each bar's height must always take up the full sensor
+pixel array height.<wbr/></p>
+<p>Each pixel in this test pattern must be set to either
+0% intensity or 100% intensity.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY</span>
+                    <span class="entry_type_enum_notes"><p>The test pattern is similar to COLOR_<wbr/>BARS,<wbr/> except that
+each bar should start at its specified color at the top,<wbr/>
+and fade to gray at the bottom.<wbr/></p>
+<p>Furthermore each bar is further subdivided into a left and
+right half.<wbr/> The left half should have a smooth gradient,<wbr/>
+and the right half should have a quantized gradient.<wbr/></p>
+<p>In particular,<wbr/> the right half's should consist of blocks of the
+same color for 1/<wbr/>16th active sensor pixel array width.<wbr/></p>
+<p>The least significant bits in the quantized gradient should
+be copied from the most significant bits of the smooth gradient.<wbr/></p>
+<p>The height of each bar should always be a multiple of 128.<wbr/>
+When this is not the case,<wbr/> the pattern should repeat at the bottom
+of the image.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PN9</span>
+                    <span class="entry_type_enum_notes"><p>All pixel data is replaced by a pseudo-random sequence
+generated from a PN9 512-bit sequence (typically implemented
+in hardware with a linear feedback shift register).<wbr/></p>
+<p>The generator should be reset at the beginning of each frame,<wbr/>
+and thus each subsequent raw frame with this test pattern should
+be exactly the same as the last.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CUSTOM1</span>
+                    <span class="entry_type_enum_value">256</span>
+                    <span class="entry_type_enum_notes"><p>The first custom test pattern.<wbr/> All custom patterns that are
+available only on this camera device are at least this numeric
+value.<wbr/></p>
+<p>All of the custom test patterns will be static
+(that is the raw image must not vary from frame to frame).<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>When enabled,<wbr/> the sensor sends a test pattern instead of
+doing a real exposure from the camera.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.sensor.availableTestPatternModes">android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When a test pattern is enabled,<wbr/> all manual sensor controls specified
+by android.<wbr/>sensor.<wbr/>* will be ignored.<wbr/> All other controls should
+work as normal.<wbr/></p>
+<p>For example,<wbr/> if manual flash is enabled,<wbr/> flash firing should still
+occur (and that the test pattern remain unmodified,<wbr/> since the flash
+would not actually affect it).<wbr/></p>
+<p>Defaults to OFF.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All test patterns are specified in the Bayer domain.<wbr/></p>
+<p>The HAL may choose to substitute test patterns from the sensor
+with test patterns from on-device memory.<wbr/> In that case,<wbr/> it should be
+indistinguishable to the ISP whether the data came from the
+sensor interconnect bus (such as CSI2) or memory.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+                
+            
+
+                
+          <tr class="entry" id="static_android.sensor.info.activeArraySize">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as rectangle]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">Four ints defining the active pixel rectangle</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The area of the image sensor which corresponds to active pixels after any geometric
+distortion correction has been applied.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates on the image sensor
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is the rectangle representing the size of the active region of the sensor (i.<wbr/>e.<wbr/>
+the region that actually receives light from the scene) after any geometric correction
+has been applied,<wbr/> and should be treated as the maximum size in pixels of any of the
+image output formats aside from the raw formats.<wbr/></p>
+<p>This rectangle is defined relative to the full pixel array; (0,<wbr/>0) is the top-left of
+the full pixel array,<wbr/> and the size of the full pixel array is given by
+<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+<p>The coordinate system for most other keys that list pixel coordinates,<wbr/> including
+<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>,<wbr/> is defined relative to the active array rectangle given in
+this field,<wbr/> with <code>(0,<wbr/> 0)</code> being the top-left of this rectangle.<wbr/></p>
+<p>The active array may be smaller than the full pixel array,<wbr/> since the full array may
+include black calibration pixels or other inactive regions,<wbr/> and geometric correction
+resulting in scaling or cropping may have been applied.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This array contains <code>(xmin,<wbr/> ymin,<wbr/> width,<wbr/> height)</code>.<wbr/> The <code>(xmin,<wbr/> ymin)</code> must be
+&gt;= <code>(0,<wbr/>0)</code>.<wbr/>
+The <code>(width,<wbr/> height)</code> must be &lt;= <code><a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a></code>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.sensitivityRange">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as rangeInt]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">Range of supported sensitivities</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Range of sensitivities for <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> supported by this
+camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Min &lt;= 100,<wbr/> Max &gt;= 800</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The values are the standard ISO sensitivity values,<wbr/>
+as defined in ISO 12232:2006.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.colorFilterArrangement">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">RGGB</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">GRBG</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">GBRG</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">BGGR</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">RGB</span>
+                    <span class="entry_type_enum_notes"><p>Sensor is not Bayer; output has 3 16-bit
+values for each pixel,<wbr/> instead of just 1 16-bit value
+per pixel.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The arrangement of color filters on sensor;
+represents the colors in the top-left 2x2 section of
+the sensor,<wbr/> in reading order.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.exposureTimeRange">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as rangeLong]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">nanoseconds</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The range of image exposure times for <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> supported
+by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>The minimum exposure time will be less than 100 us.<wbr/> For FULL
+capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
+the maximum exposure time will be greater than 100ms.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For FULL capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
+The maximum of the range SHOULD be at least 1 second (1e9),<wbr/> MUST be at least
+100ms.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.maxFrameDuration">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum possible frame duration (minimum frame rate) for
+<a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> that is supported this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>For FULL capability devices
+(<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/> at least 100ms.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Attempting to use frame durations beyond the maximum will result in the frame
+duration being clipped to the maximum.<wbr/> See that control for a full definition of frame
+durations.<wbr/></p>
+<p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
+for the minimum frame duration values.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For FULL capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
+The maximum of the range SHOULD be at least
+1 second (1e9),<wbr/> MUST be at least 100ms (100e6).<wbr/></p>
+<p><a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a> must be greater or
+equal to the <a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a> max
+value (since exposure time overrides frame duration).<wbr/></p>
+<p>Available minimum frame durations for JPEG must be no greater
+than that of the YUV_<wbr/>420_<wbr/>888/<wbr/>IMPLEMENTATION_<wbr/>DEFINED
+minimum frame durations (for that respective size).<wbr/></p>
+<p>Since JPEG processing is considered offline and can take longer than
+a single uncompressed capture,<wbr/> refer to
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>
+for details about encoding this scenario.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.physicalSize">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>physical<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as sizeF]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">width x height</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The physical dimensions of the full pixel
+array.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Millimeters
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is the physical size of the sensor pixel
+array defined by <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Needed for FOV calculation for old API</p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.pixelArraySize">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [public as size]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Dimensions of the full pixel array,<wbr/> possibly
+including black calibration pixels.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixels
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The pixel count of the full pixel array of the image sensor,<wbr/> which covers
+<a href="#static_android.sensor.info.physicalSize">android.<wbr/>sensor.<wbr/>info.<wbr/>physical<wbr/>Size</a> area.<wbr/>  This represents the full pixel dimensions of
+the raw buffers produced by this sensor.<wbr/></p>
+<p>If a camera device supports raw sensor formats,<wbr/> either this or
+<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> is the maximum dimensions for the raw
+output formats listed in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> (this depends on
+whether or not the image sensor returns buffers containing pixels that are not
+part of the active array region for blacklevel calibration or other purposes).<wbr/></p>
+<p>Some parts of the full pixel array may not receive light from the scene,<wbr/>
+or be otherwise inactive.<wbr/>  The <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> key
+defines the rectangle of active pixels that will be included in processed image
+formats.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.whiteLevel">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum raw value output by sensor.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt; 255 (8-bit output)</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This specifies the fully-saturated encoding level for the raw
+sample values from the sensor.<wbr/>  This is typically caused by the
+sensor becoming highly non-linear or clipping.<wbr/> The minimum for
+each channel is specified by the offset in the
+<a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a> key.<wbr/></p>
+<p>The white level is typically determined either by sensor bit depth
+(8-14 bits is expected),<wbr/> or by the point where the sensor response
+becomes too non-linear to be useful.<wbr/>  The default value for this is
+maximum representable value for a 16-bit raw sample (2^16 - 1).<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The full bit depth of the sensor must be available in the raw data,<wbr/>
+so the value for linear sensors should not be significantly lower
+than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixel).<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.timestampSource">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">UNKNOWN</span>
+                    <span class="entry_type_enum_notes"><p>Timestamps from <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> are in nanoseconds and monotonic,<wbr/>
+but can not be compared to timestamps from other subsystems
+(e.<wbr/>g.<wbr/> accelerometer,<wbr/> gyro etc.<wbr/>),<wbr/> or other instances of the same or different
+camera devices in the same system.<wbr/> Timestamps between streams and results for
+a single camera instance are comparable,<wbr/> and the timestamps for all buffers
+and the result metadata generated by a single capture are identical.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">REALTIME</span>
+                    <span class="entry_type_enum_notes"><p>Timestamps from <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> are in the same timebase as
+<a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,<wbr/>
+and they can be compared to other timestamps using that base.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The time base source for sensor capture start timestamps.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The timestamps provided for captures are always in nanoseconds and monotonic,<wbr/> but
+may not based on a time source that can be compared to other system time sources.<wbr/></p>
+<p>This characteristic defines the source for the timestamps,<wbr/> and therefore whether they
+can be compared against other system time sources/<wbr/>timestamps.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.lensShadingApplied">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FALSE</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TRUE</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the RAW images output from this camera device are subject to
+lens shading correction.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If TRUE,<wbr/> all images produced by the camera device in the RAW image formats will
+have lens shading correction already applied to it.<wbr/> If FALSE,<wbr/> the images will
+not be adjusted for lens shading correction.<wbr/>
+See <a href="#static_android.request.maxNumOutputRaw">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Raw</a> for a list of RAW image formats.<wbr/></p>
+<p>This key will be <code>null</code> for all devices do not report this information.<wbr/>
+Devices with RAW capability will always report this information in this key.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.info.preCorrectionActiveArraySize">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as rectangle]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">Four ints defining the active pixel rectangle</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The area of the image sensor which corresponds to active pixels prior to the
+application of any geometric distortion correction.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Pixel coordinates on the image sensor
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is the rectangle representing the size of the active region of the sensor (i.<wbr/>e.<wbr/>
+the region that actually receives light from the scene) before any geometric correction
+has been applied,<wbr/> and should be treated as the active region rectangle for any of the
+raw formats.<wbr/>  All metadata associated with raw processing (e.<wbr/>g.<wbr/> the lens shading
+correction map,<wbr/> and radial distortion fields) treats the top,<wbr/> left of this rectangle as
+the origin,<wbr/> (0,<wbr/>0).<wbr/></p>
+<p>The size of this region determines the maximum field of view and the maximum number of
+pixels that an image from this sensor can contain,<wbr/> prior to the application of
+geometric distortion correction.<wbr/> The effective maximum pixel dimensions of a
+post-distortion-corrected image is given by the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>
+field,<wbr/> and the effective maximum field of view for a post-distortion-corrected image
+can be calculated by applying the geometric distortion correction fields to this
+rectangle,<wbr/> and cropping to the rectangle given in <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+<p>E.<wbr/>g.<wbr/> to calculate position of a pixel,<wbr/> (x,<wbr/>y),<wbr/> in a processed YUV output image with the
+dimensions in <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> given the position of a pixel,<wbr/>
+(x',<wbr/> y'),<wbr/> in the raw pixel array with dimensions give in
+<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>:</p>
+<ol>
+<li>Choose a pixel (x',<wbr/> y') within the active array region of the raw buffer given in
+<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a>,<wbr/> otherwise this pixel is considered
+to be outside of the FOV,<wbr/> and will not be shown in the processed output image.<wbr/></li>
+<li>Apply geometric distortion correction to get the post-distortion pixel coordinate,<wbr/>
+(x_<wbr/>i,<wbr/> y_<wbr/>i).<wbr/> When applying geometric correction metadata,<wbr/> note that metadata for raw
+buffers is defined relative to the top,<wbr/> left of the
+<a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> rectangle.<wbr/></li>
+<li>If the resulting corrected pixel coordinate is within the region given in
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> then the position of this pixel in the
+processed output image buffer is <code>(x_<wbr/>i - activeArray.<wbr/>left,<wbr/> y_<wbr/>i - activeArray.<wbr/>top)</code>,<wbr/>
+when the top,<wbr/> left coordinate of that buffer is treated as (0,<wbr/> 0).<wbr/></li>
+</ol>
+<p>Thus,<wbr/> for pixel x',<wbr/>y' = (25,<wbr/> 25) on a sensor where <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>
+is (100,<wbr/>100),<wbr/> <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> is (10,<wbr/> 10,<wbr/> 100,<wbr/> 100),<wbr/>
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> is (20,<wbr/> 20,<wbr/> 80,<wbr/> 80),<wbr/> and the geometric distortion
+correction doesn't change the pixel coordinate,<wbr/> the resulting pixel selected in
+pixel coordinates would be x,<wbr/>y = (25,<wbr/> 25) relative to the top,<wbr/>left of the raw buffer
+with dimensions given in <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>,<wbr/> and would be (5,<wbr/> 5)
+relative to the top,<wbr/>left of post-processed YUV output buffer with dimensions given in
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+<p>The currently supported fields that correct for geometric distortion are:</p>
+<ol>
+<li><a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>.<wbr/></li>
+</ol>
+<p>If all of the geometric distortion fields are no-ops,<wbr/> this rectangle will be the same
+as the post-distortion-corrected rectangle given in
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+<p>This rectangle is defined relative to the full pixel array; (0,<wbr/>0) is the top-left of
+the full pixel array,<wbr/> and the size of the full pixel array is given by
+<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+<p>The pre-correction active array may be smaller than the full pixel array,<wbr/> since the
+full array may include black calibration pixels or other inactive regions.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This array contains <code>(xmin,<wbr/> ymin,<wbr/> width,<wbr/> height)</code>.<wbr/> The <code>(xmin,<wbr/> ymin)</code> must be
+&gt;= <code>(0,<wbr/>0)</code>.<wbr/>
+The <code>(width,<wbr/> height)</code> must be &lt;= <code><a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a></code>.<wbr/></p>
+<p>If omitted by the HAL implementation,<wbr/> the camera framework will assume that this is
+the same as the post-correction active array region given in
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+        
+
+                
+          <tr class="entry" id="static_android.sensor.referenceIlluminant1">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">DAYLIGHT</span>
+                    <span class="entry_type_enum_value">1</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FLUORESCENT</span>
+                    <span class="entry_type_enum_value">2</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TUNGSTEN</span>
+                    <span class="entry_type_enum_value">3</span>
+                    <span class="entry_type_enum_notes"><p>Incandescent light</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FLASH</span>
+                    <span class="entry_type_enum_value">4</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FINE_WEATHER</span>
+                    <span class="entry_type_enum_value">9</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CLOUDY_WEATHER</span>
+                    <span class="entry_type_enum_value">10</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SHADE</span>
+                    <span class="entry_type_enum_value">11</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DAYLIGHT_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">12</span>
+                    <span class="entry_type_enum_notes"><p>D 5700 - 7100K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DAY_WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">13</span>
+                    <span class="entry_type_enum_notes"><p>N 4600 - 5400K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">COOL_WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">14</span>
+                    <span class="entry_type_enum_notes"><p>W 3900 - 4500K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_value">15</span>
+                    <span class="entry_type_enum_notes"><p>WW 3200 - 3700K</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STANDARD_A</span>
+                    <span class="entry_type_enum_value">17</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STANDARD_B</span>
+                    <span class="entry_type_enum_value">18</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">STANDARD_C</span>
+                    <span class="entry_type_enum_value">19</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D55</span>
+                    <span class="entry_type_enum_value">20</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D65</span>
+                    <span class="entry_type_enum_value">21</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D75</span>
+                    <span class="entry_type_enum_value">22</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">D50</span>
+                    <span class="entry_type_enum_value">23</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ISO_STUDIO_TUNGSTEN</span>
+                    <span class="entry_type_enum_value">24</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The standard reference illuminant used as the scene light source when
+calculating the <a href="#static_android.sensor.colorTransform1">android.<wbr/>sensor.<wbr/>color<wbr/>Transform1</a>,<wbr/>
+<a href="#static_android.sensor.calibrationTransform1">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform1</a>,<wbr/> and
+<a href="#static_android.sensor.forwardMatrix1">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix1</a> matrices.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The values in this key correspond to the values defined for the
+EXIF LightSource tag.<wbr/> These illuminants are standard light sources
+that are often used calibrating camera devices.<wbr/></p>
+<p>If this key is present,<wbr/> then <a href="#static_android.sensor.colorTransform1">android.<wbr/>sensor.<wbr/>color<wbr/>Transform1</a>,<wbr/>
+<a href="#static_android.sensor.calibrationTransform1">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform1</a>,<wbr/> and
+<a href="#static_android.sensor.forwardMatrix1">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix1</a> will also be present.<wbr/></p>
+<p>Some devices may choose to provide a second set of calibration
+information for improved quality,<wbr/> including
+<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a> and its corresponding matrices.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The first reference illuminant (<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>)
+and corresponding matrices must be present to support the RAW capability
+and DNG output.<wbr/></p>
+<p>When producing raw images with a color profile that has only been
+calibrated against a single light source,<wbr/> it is valid to omit
+<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a> along with the
+<a href="#static_android.sensor.colorTransform2">android.<wbr/>sensor.<wbr/>color<wbr/>Transform2</a>,<wbr/> <a href="#static_android.sensor.calibrationTransform2">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2</a>,<wbr/>
+and <a href="#static_android.sensor.forwardMatrix2">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2</a> matrices.<wbr/></p>
+<p>If only <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a> is included,<wbr/> it should be
+chosen so that it is representative of typical scene lighting.<wbr/>  In
+general,<wbr/> D50 or DAYLIGHT will be chosen for this case.<wbr/></p>
+<p>If both <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a> and
+<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a> are included,<wbr/> they should be
+chosen to represent the typical range of scene lighting conditions.<wbr/>
+In general,<wbr/> low color temperature illuminant such as Standard-A will
+be chosen for the first reference illuminant and a higher color
+temperature illuminant such as D65 will be chosen for the second
+reference illuminant.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.referenceIlluminant2">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The standard reference illuminant used as the scene light source when
+calculating the <a href="#static_android.sensor.colorTransform2">android.<wbr/>sensor.<wbr/>color<wbr/>Transform2</a>,<wbr/>
+<a href="#static_android.sensor.calibrationTransform2">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2</a>,<wbr/> and
+<a href="#static_android.sensor.forwardMatrix2">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2</a> matrices.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>See <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a> for more details.<wbr/></p>
+<p>If this key is present,<wbr/> then <a href="#static_android.sensor.colorTransform2">android.<wbr/>sensor.<wbr/>color<wbr/>Transform2</a>,<wbr/>
+<a href="#static_android.sensor.calibrationTransform2">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2</a>,<wbr/> and
+<a href="#static_android.sensor.forwardMatrix2">android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2</a> will also be present.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.calibrationTransform1">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform1
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+
+
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A per-device calibration transform matrix that maps from the
+reference sensor colorspace to the actual device sensor colorspace.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is used to correct for per-device variations in the
+sensor colorspace,<wbr/> and is used for processing raw buffer data.<wbr/></p>
+<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
+contains a per-device calibration transform that maps colors
+from reference sensor color space (i.<wbr/>e.<wbr/> the "golden module"
+colorspace) into this camera device's native sensor color
+space under the first reference illuminant
+(<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.calibrationTransform2">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+
+
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A per-device calibration transform matrix that maps from the
+reference sensor colorspace to the actual device sensor colorspace
+(this is the colorspace of the raw buffer data).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is used to correct for per-device variations in the
+sensor colorspace,<wbr/> and is used for processing raw buffer data.<wbr/></p>
+<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
+contains a per-device calibration transform that maps colors
+from reference sensor color space (i.<wbr/>e.<wbr/> the "golden module"
+colorspace) into this camera device's native sensor color
+space under the second reference illuminant
+(<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a>).<wbr/></p>
+<p>This matrix will only be present if the second reference
+illuminant is present.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.colorTransform1">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>color<wbr/>Transform1
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+
+
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A matrix that transforms color values from CIE XYZ color space to
+reference sensor color space.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is used to convert from the standard CIE XYZ color
+space to the reference sensor colorspace,<wbr/> and is used when processing
+raw buffer data.<wbr/></p>
+<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
+contains a color transform matrix that maps colors from the CIE
+XYZ color space to the reference sensor color space (i.<wbr/>e.<wbr/> the
+"golden module" colorspace) under the first reference illuminant
+(<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>).<wbr/></p>
+<p>The white points chosen in both the reference sensor color space
+and the CIE XYZ colorspace when calculating this transform will
+match the standard white point for the first reference illuminant
+(i.<wbr/>e.<wbr/> no chromatic adaptation will be applied by this transform).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.colorTransform2">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>color<wbr/>Transform2
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+
+
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A matrix that transforms color values from CIE XYZ color space to
+reference sensor color space.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is used to convert from the standard CIE XYZ color
+space to the reference sensor colorspace,<wbr/> and is used when processing
+raw buffer data.<wbr/></p>
+<p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
+contains a color transform matrix that maps colors from the CIE
+XYZ color space to the reference sensor color space (i.<wbr/>e.<wbr/> the
+"golden module" colorspace) under the second reference illuminant
+(<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a>).<wbr/></p>
+<p>The white points chosen in both the reference sensor color space
+and the CIE XYZ colorspace when calculating this transform will
+match the standard white point for the second reference illuminant
+(i.<wbr/>e.<wbr/> no chromatic adaptation will be applied by this transform).<wbr/></p>
+<p>This matrix will only be present if the second reference
+illuminant is present.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.forwardMatrix1">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix1
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+
+
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A matrix that transforms white balanced camera colors from the reference
+sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is used to convert to the standard CIE XYZ colorspace,<wbr/> and
+is used when processing raw buffer data.<wbr/></p>
+<p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and contains
+a color transform matrix that maps white balanced colors from the
+reference sensor color space to the CIE XYZ color space with a D50 white
+point.<wbr/></p>
+<p>Under the first reference illuminant (<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>)
+this matrix is chosen so that the standard white point for this reference
+illuminant in the reference sensor colorspace is mapped to D50 in the
+CIE XYZ colorspace.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.forwardMatrix2">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>forward<wbr/>Matrix2
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [public as colorSpaceTransform]</span>
+
+
+
+
+                <div class="entry_type_notes">3x3 matrix in row-major-order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A matrix that transforms white balanced camera colors from the reference
+sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This matrix is used to convert to the standard CIE XYZ colorspace,<wbr/> and
+is used when processing raw buffer data.<wbr/></p>
+<p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and contains
+a color transform matrix that maps white balanced colors from the
+reference sensor color space to the CIE XYZ color space with a D50 white
+point.<wbr/></p>
+<p>Under the second reference illuminant (<a href="#static_android.sensor.referenceIlluminant2">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant2</a>)
+this matrix is chosen so that the standard white point for this reference
+illuminant in the reference sensor colorspace is mapped to D50 in the
+CIE XYZ colorspace.<wbr/></p>
+<p>This matrix will only be present if the second reference
+illuminant is present.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.baseGainFactor">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>sensor.<wbr/>base<wbr/>Gain<wbr/>Factor
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Gain factor from electrons to raw units when
+ISO=100</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.blackLevelPattern">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public as blackLevelPattern]</span>
+
+
+
+
+                <div class="entry_type_notes">2x2 raw count block</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A fixed black level offset for each of the color filter arrangement
+(CFA) mosaic channels.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0 for each.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This key specifies the zero light value for each of the CFA mosaic
+channels in the camera sensor.<wbr/>  The maximal value output by the
+sensor is represented by the value in <a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a>.<wbr/></p>
+<p>The values are given in the same order as channels listed for the CFA
+layout key (see <a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>),<wbr/> i.<wbr/>e.<wbr/> the
+nth value given corresponds to the black level offset for the nth
+color channel listed in the CFA.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The values are given in row-column scan order,<wbr/> with the first value
+corresponding to the element of the CFA in row=0,<wbr/> column=0.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.maxAnalogSensitivity">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>max<wbr/>Analog<wbr/>Sensitivity
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum sensitivity that is implemented
+purely through analog gain.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_FULL">FULL</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> values less than or
+equal to this,<wbr/> all applied gain must be analog.<wbr/> For
+values above this,<wbr/> the gain applied can be a mix of analog and
+digital.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.orientation">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>orientation
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Clockwise angle through which the output image needs to be rotated to be
+upright on the device screen in its native orientation.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Degrees of clockwise rotation; always a multiple of
+          90
+            </td>
+
+            <td class="entry_range">
+              <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Also defines the direction of rolling shutter readout,<wbr/> which is from top to bottom in
+the sensor's coordinate system.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.profileHueSatMapDimensions">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map<wbr/>Dimensions
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+                <div class="entry_type_notes">Number of samples for hue,<wbr/> saturation,<wbr/> and value</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The number of input samples for each dimension of
+<a href="#dynamic_android.sensor.profileHueSatMap">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Hue &gt;= 1,<wbr/>
+Saturation &gt;= 2,<wbr/>
+Value &gt;= 1</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The number of input samples for the hue,<wbr/> saturation,<wbr/> and value
+dimension of <a href="#dynamic_android.sensor.profileHueSatMap">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map</a>.<wbr/> The order of the
+dimensions given is hue,<wbr/> saturation,<wbr/> value; where hue is the 0th
+element.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.sensor.availableTestPatternModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of sensor test pattern modes for <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a>
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Defaults to OFF,<wbr/> and always includes OFF if defined.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All custom modes must be &gt;= CUSTOM1.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.sensor.exposureTime">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>exposure<wbr/>Time
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Duration each pixel is exposed to
+light.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If the sensor can't expose this exact duration,<wbr/> it will shorten the
+duration exposed to the nearest possible value (rather than expose longer).<wbr/>
+The final exposure time used will be available in the output capture result.<wbr/></p>
+<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
+OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.frameDuration">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>frame<wbr/>Duration
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Duration from start of frame exposure to
+start of next frame exposure.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
+<a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/> The duration
+is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The maximum frame rate that can be supported by a camera subsystem is
+a function of many factors:</p>
+<ul>
+<li>Requested resolutions of output image streams</li>
+<li>Availability of binning /<wbr/> skipping modes on the imager</li>
+<li>The bandwidth of the imager interface</li>
+<li>The bandwidth of the various ISP processing blocks</li>
+</ul>
+<p>Since these factors can vary greatly between different ISPs and
+sensors,<wbr/> the camera abstraction tries to represent the bandwidth
+restrictions with as simple a model as possible.<wbr/></p>
+<p>The model presented has the following characteristics:</p>
+<ul>
+<li>The image sensor is always configured to output the smallest
+resolution possible given the application's requested output stream
+sizes.<wbr/>  The smallest resolution is defined as being at least as large
+as the largest requested output stream size; the camera pipeline must
+never digitally upsample sensor data when the crop region covers the
+whole sensor.<wbr/> In general,<wbr/> this means that if only small output stream
+resolutions are configured,<wbr/> the sensor can provide a higher frame
+rate.<wbr/></li>
+<li>Since any request may use any or all the currently configured
+output streams,<wbr/> the sensor and ISP must be configured to support
+scaling a single capture to all the streams at the same time.<wbr/>  This
+means the camera pipeline must be ready to produce the largest
+requested output size without any delay.<wbr/>  Therefore,<wbr/> the overall
+frame rate of a given configured stream set is governed only by the
+largest requested stream resolution.<wbr/></li>
+<li>Using more than one output stream in a request does not affect the
+frame duration.<wbr/></li>
+<li>Certain format-streams may need to do additional background processing
+before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
+can run concurrently to the rest of the camera pipeline,<wbr/> but
+cannot process more than 1 capture at a time.<wbr/></li>
+</ul>
+<p>The necessary information for the application,<wbr/> given the model above,<wbr/>
+is provided via the <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> field using
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/>
+These are used to determine the maximum frame rate /<wbr/> minimum frame
+duration that is possible for a given stream configuration.<wbr/></p>
+<p>Specifically,<wbr/> the application can use the following rules to
+determine the minimum frame duration it can request from the camera
+device:</p>
+<ol>
+<li>Let the set of currently configured input/<wbr/>output streams
+be called <code>S</code>.<wbr/></li>
+<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking
+it up in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> using <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
+(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be
+called <code>F</code>.<wbr/></li>
+<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
+for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
+used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
+</ol>
+<p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>
+using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code>
+determines the steady state frame rate that the application will get
+if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind of
+request be called <code>Rsimple</code>.<wbr/></p>
+<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+by a single capture of a new request <code>Rstall</code> (which has at least
+one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+same minimum frame duration this will not cause a frame rate loss
+if all buffers from the previous <code>Rstall</code> have already been
+delivered.<wbr/></p>
+<p>For more details about stalling,<wbr/> see
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
+<p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
+OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For more details about stalling,<wbr/> see
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.sensitivity">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>sensitivity
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The amount of gain applied to sensor data
+before processing.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              ISO arithmetic units
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The sensitivity is the standard ISO sensitivity value,<wbr/>
+as defined in ISO 12232:2006.<wbr/></p>
+<p>The sensitivity must be within <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a>,<wbr/> and
+if if it less than <a href="#static_android.sensor.maxAnalogSensitivity">android.<wbr/>sensor.<wbr/>max<wbr/>Analog<wbr/>Sensitivity</a>,<wbr/> the camera device
+is guaranteed to use only analog amplification for applying the gain.<wbr/></p>
+<p>If the camera device cannot apply the exact sensitivity
+requested,<wbr/> it will reduce the gain to the nearest supported
+value.<wbr/> The final sensitivity used will be available in the
+output capture result.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>ISO 12232:2006 REI method is acceptable.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.timestamp">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>timestamp
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Time at start of exposure of first
+row of the image sensor active array,<wbr/> in nanoseconds.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>&gt; 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The timestamps are also included in all image
+buffers produced for the same capture,<wbr/> and will be identical
+on all the outputs.<wbr/></p>
+<p>When <a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> UNKNOWN,<wbr/>
+the timestamps measure time since an unspecified starting point,<wbr/>
+and are monotonically increasing.<wbr/> They can be compared with the
+timestamps for other captures from the same camera device,<wbr/> but are
+not guaranteed to be comparable to any other time source.<wbr/></p>
+<p>When <a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> REALTIME,<wbr/> the
+timestamps measure time in the same timebase as <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,<wbr/> and they can
+be compared to other timestamps from other subsystems that
+are using that base.<wbr/></p>
+<p>For reprocessing,<wbr/> the timestamp will match the start of exposure of
+the input image,<wbr/> i.<wbr/>e.<wbr/> <a href="https://developer.android.com/reference/CaptureResult.html#SENSOR_TIMESTAMP">the
+timestamp</a> in the TotalCaptureResult that was used to create the
+reprocess capture request.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All timestamps must be in reference to the kernel's
+CLOCK_<wbr/>BOOTTIME monotonic clock,<wbr/> which properly accounts for
+time spent asleep.<wbr/> This allows for synchronization with
+sensors that continue to operate while the system is
+otherwise asleep.<wbr/></p>
+<p>If <a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> REALTIME,<wbr/>
+The timestamp must be synchronized with the timestamps from other
+sensor subsystems that are using the same timebase.<wbr/></p>
+<p>For reprocessing,<wbr/> the input image's start of exposure can be looked up
+with <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> from the metadata included in the
+capture request.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.temperature">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>sensor.<wbr/>temperature
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The temperature of the sensor,<wbr/> sampled at the time
+exposure began for this frame.<wbr/></p>
+<p>The thermal diode being queried should be inside the sensor PCB,<wbr/> or
+somewhere close to it.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Celsius
+            </td>
+
+            <td class="entry_range">
+              <p>Optional.<wbr/> This value is missing if no temperature is available.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.neutralColorPoint">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>neutral<wbr/>Color<wbr/>Point
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The estimated camera neutral color in the native sensor colorspace at
+the time of capture.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value gives the neutral color point encoded as an RGB value in the
+native sensor color space.<wbr/>  The neutral color point indicates the
+currently estimated white point of the scene illumination.<wbr/>  It can be
+used to interpolate between the provided color transforms when
+processing raw sensor data.<wbr/></p>
+<p>The order of the values is R,<wbr/> G,<wbr/> B; where R is in the lowest index.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.noiseProfile">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>noise<wbr/>Profile
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">double</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2 x CFA Channels
+                </span>
+              <span class="entry_type_visibility"> [public as pairDoubleDouble]</span>
+
+
+
+
+                <div class="entry_type_notes">Pairs of noise model coefficients</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Noise model coefficients for each CFA mosaic channel.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This key contains two noise model coefficients for each CFA channel
+corresponding to the sensor amplification (S) and sensor readout
+noise (O).<wbr/>  These are given as pairs of coefficients for each channel
+in the same order as channels listed for the CFA layout key
+(see <a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>).<wbr/>  This is
+represented as an array of Pair&lt;Double,<wbr/> Double&gt;,<wbr/> where
+the first member of the Pair at index n is the S coefficient and the
+second member is the O coefficient for the nth color channel in the CFA.<wbr/></p>
+<p>These coefficients are used in a two parameter noise model to describe
+the amount of noise present in the image for each CFA channel.<wbr/>  The
+noise model used here is:</p>
+<p>N(x) = sqrt(Sx + O)</p>
+<p>Where x represents the recorded signal of a CFA channel normalized to
+the range [0,<wbr/> 1],<wbr/> and S and O are the noise model coeffiecients for
+that channel.<wbr/></p>
+<p>A more detailed description of the noise model can be found in the
+Adobe DNG specification for the NoiseProfile tag.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For a CFA layout of RGGB,<wbr/> the list of coefficients would be given as
+an array of doubles S0,<wbr/>O0,<wbr/>S1,<wbr/>O1,...,<wbr/> where S0 and O0 are the coefficients
+for the red channel,<wbr/> S1 and O1 are the coefficients for the first green
+channel,<wbr/> etc.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.profileHueSatMap">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  hue_samples x saturation_samples x value_samples x 3
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+                <div class="entry_type_notes">Mapping for hue,<wbr/> saturation,<wbr/> and value</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A mapping containing a hue shift,<wbr/> saturation scale,<wbr/> and value scale
+for each pixel.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+          The hue shift is given in degrees; saturation and value scale factors are
+          unitless and are between 0 and 1 inclusive
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>hue_<wbr/>samples,<wbr/> saturation_<wbr/>samples,<wbr/> and value_<wbr/>samples are given in
+<a href="#static_android.sensor.profileHueSatMapDimensions">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map<wbr/>Dimensions</a>.<wbr/></p>
+<p>Each entry of this map contains three floats corresponding to the
+hue shift,<wbr/> saturation scale,<wbr/> and value scale,<wbr/> respectively; where the
+hue shift has the lowest index.<wbr/> The map entries are stored in the key
+in nested loop order,<wbr/> with the value divisions in the outer loop,<wbr/> the
+hue divisions in the middle loop,<wbr/> and the saturation divisions in the
+inner loop.<wbr/> All zero input saturation entries are required to have a
+value scale factor of 1.<wbr/>0.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.profileToneCurve">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>sensor.<wbr/>profile<wbr/>Tone<wbr/>Curve
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  samples x 2
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+                <div class="entry_type_notes">Samples defining a spline for a tone-mapping curve</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of x,<wbr/>y samples defining a tone-mapping curve for gamma adjustment.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Each sample has an input range of <code>[0,<wbr/> 1]</code> and an output range of
+<code>[0,<wbr/> 1]</code>.<wbr/>  The first sample is required to be <code>(0,<wbr/> 0)</code>,<wbr/> and the last
+sample is required to be <code>(1,<wbr/> 1)</code>.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This key contains a default tone curve that can be applied while
+processing the image as a starting point for user adjustments.<wbr/>
+The curve is specified as a list of value pairs in linear gamma.<wbr/>
+The curve is interpolated using a cubic spline.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.greenSplit">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>green<wbr/>Split
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The worst-case divergence between Bayer green channels.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value is an estimate of the worst case split between the
+Bayer green channels in the red and blue rows in the sensor color
+filter array.<wbr/></p>
+<p>The green split is calculated as follows:</p>
+<ol>
+<li>A 5x5 pixel (or larger) window W within the active sensor array is
+chosen.<wbr/> The term 'pixel' here is taken to mean a group of 4 Bayer
+mosaic channels (R,<wbr/> Gr,<wbr/> Gb,<wbr/> B).<wbr/>  The location and size of the window
+chosen is implementation defined,<wbr/> and should be chosen to provide a
+green split estimate that is both representative of the entire image
+for this camera sensor,<wbr/> and can be calculated quickly.<wbr/></li>
+<li>The arithmetic mean of the green channels from the red
+rows (mean_<wbr/>Gr) within W is computed.<wbr/></li>
+<li>The arithmetic mean of the green channels from the blue
+rows (mean_<wbr/>Gb) within W is computed.<wbr/></li>
+<li>The maximum ratio R of the two means is computed as follows:
+<code>R = max((mean_<wbr/>Gr + 1)/<wbr/>(mean_<wbr/>Gb + 1),<wbr/> (mean_<wbr/>Gb + 1)/<wbr/>(mean_<wbr/>Gr + 1))</code></li>
+</ol>
+<p>The ratio R is the green split divergence reported for this property,<wbr/>
+which represents how much the green channels differ in the mosaic
+pattern.<wbr/>  This value is typically used to determine the treatment of
+the green mosaic channels when demosaicing.<wbr/></p>
+<p>The green split value can be roughly interpreted as follows:</p>
+<ul>
+<li>R &lt; 1.<wbr/>03 is a negligible split (&lt;3% divergence).<wbr/></li>
+<li>1.<wbr/>20 &lt;= R &gt;= 1.<wbr/>03 will require some software
+correction to avoid demosaic errors (3-20% divergence).<wbr/></li>
+<li>R &gt; 1.<wbr/>20 will require strong software correction to produce
+a usuable image (&gt;20% divergence).<wbr/></li>
+</ul>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The green split given may be a static value based on prior
+characterization of the camera sensor using the green split
+calculation method given here over a large,<wbr/> representative,<wbr/> sample
+set of images.<wbr/>  Other methods of calculation that produce equivalent
+results,<wbr/> and can be interpreted in the same manner,<wbr/> may be used.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.testPatternData">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A pixel <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> that supplies the test pattern
+when <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a> is SOLID_<wbr/>COLOR.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Each color channel is treated as an unsigned 32-bit integer.<wbr/>
+The camera device then uses the most significant X bits
+that correspond to how many bits are in its Bayer raw sensor
+output.<wbr/></p>
+<p>For example,<wbr/> a sensor with RAW10 Bayer output would use the
+10 most significant bits from each color channel.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.testPatternMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No test pattern mode is used,<wbr/> and the camera
+device returns captures from the image sensor.<wbr/></p>
+<p>This is the default if the key is not set.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SOLID_COLOR</span>
+                    <span class="entry_type_enum_notes"><p>Each pixel in <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> is replaced by its
+respective color channel provided in
+<a href="#controls_android.sensor.testPatternData">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data</a>.<wbr/></p>
+<p>For example:</p>
+<pre><code>android.<wbr/>testPatternData = [0,<wbr/> 0xFFFFFFFF,<wbr/> 0xFFFFFFFF,<wbr/> 0]
+</code></pre>
+<p>All green pixels are 100% green.<wbr/> All red/<wbr/>blue pixels are black.<wbr/></p>
+<pre><code>android.<wbr/>testPatternData = [0xFFFFFFFF,<wbr/> 0,<wbr/> 0xFFFFFFFF,<wbr/> 0]
+</code></pre>
+<p>All red pixels are 100% red.<wbr/> Only the odd green pixels
+are 100% green.<wbr/> All blue pixels are 100% black.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">COLOR_BARS</span>
+                    <span class="entry_type_enum_notes"><p>All pixel data is replaced with an 8-bar color pattern.<wbr/></p>
+<p>The vertical bars (left-to-right) are as follows:</p>
+<ul>
+<li>100% white</li>
+<li>yellow</li>
+<li>cyan</li>
+<li>green</li>
+<li>magenta</li>
+<li>red</li>
+<li>blue</li>
+<li>black</li>
+</ul>
+<p>In general the image would look like the following:</p>
+<pre><code>W Y C G M R B K
+W Y C G M R B K
+W Y C G M R B K
+W Y C G M R B K
+W Y C G M R B K
+.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
+.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
+.<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/> .<wbr/>
+
+(B = Blue,<wbr/> K = Black)
+</code></pre>
+<p>Each bar should take up 1/<wbr/>8 of the sensor pixel array width.<wbr/>
+When this is not possible,<wbr/> the bar size should be rounded
+down to the nearest integer and the pattern can repeat
+on the right side.<wbr/></p>
+<p>Each bar's height must always take up the full sensor
+pixel array height.<wbr/></p>
+<p>Each pixel in this test pattern must be set to either
+0% intensity or 100% intensity.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY</span>
+                    <span class="entry_type_enum_notes"><p>The test pattern is similar to COLOR_<wbr/>BARS,<wbr/> except that
+each bar should start at its specified color at the top,<wbr/>
+and fade to gray at the bottom.<wbr/></p>
+<p>Furthermore each bar is further subdivided into a left and
+right half.<wbr/> The left half should have a smooth gradient,<wbr/>
+and the right half should have a quantized gradient.<wbr/></p>
+<p>In particular,<wbr/> the right half's should consist of blocks of the
+same color for 1/<wbr/>16th active sensor pixel array width.<wbr/></p>
+<p>The least significant bits in the quantized gradient should
+be copied from the most significant bits of the smooth gradient.<wbr/></p>
+<p>The height of each bar should always be a multiple of 128.<wbr/>
+When this is not the case,<wbr/> the pattern should repeat at the bottom
+of the image.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PN9</span>
+                    <span class="entry_type_enum_notes"><p>All pixel data is replaced by a pseudo-random sequence
+generated from a PN9 512-bit sequence (typically implemented
+in hardware with a linear feedback shift register).<wbr/></p>
+<p>The generator should be reset at the beginning of each frame,<wbr/>
+and thus each subsequent raw frame with this test pattern should
+be exactly the same as the last.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CUSTOM1</span>
+                    <span class="entry_type_enum_value">256</span>
+                    <span class="entry_type_enum_notes"><p>The first custom test pattern.<wbr/> All custom patterns that are
+available only on this camera device are at least this numeric
+value.<wbr/></p>
+<p>All of the custom test patterns will be static
+(that is the raw image must not vary from frame to frame).<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>When enabled,<wbr/> the sensor sends a test pattern instead of
+doing a real exposure from the camera.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.sensor.availableTestPatternModes">android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When a test pattern is enabled,<wbr/> all manual sensor controls specified
+by android.<wbr/>sensor.<wbr/>* will be ignored.<wbr/> All other controls should
+work as normal.<wbr/></p>
+<p>For example,<wbr/> if manual flash is enabled,<wbr/> flash firing should still
+occur (and that the test pattern remain unmodified,<wbr/> since the flash
+would not actually affect it).<wbr/></p>
+<p>Defaults to OFF.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>All test patterns are specified in the Bayer domain.<wbr/></p>
+<p>The HAL may choose to substitute test patterns from the sensor
+with test patterns from on-device memory.<wbr/> In that case,<wbr/> it should be
+indistinguishable to the ISP whether the data came from the
+sensor interconnect bus (such as CSI2) or memory.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.sensor.rollingShutterSkew">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sensor.<wbr/>rolling<wbr/>Shutter<wbr/>Skew
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Duration between the start of first row exposure
+and the start of last row exposure.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Nanoseconds
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 0 and &lt;
+<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is the exposure time skew between the first and last
+row exposure start times.<wbr/> The first row and the last row are
+the first and last rows inside of the
+<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+<p>For typical camera sensors that use rolling shutters,<wbr/> this is also equivalent
+to the frame readout time.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The HAL must report <code>0</code> if the sensor is using global shutter,<wbr/> where all pixels begin
+exposure at the same time.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_shading" class="section">shading</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.shading.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>shading.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No lens shading correction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Apply lens shading corrections,<wbr/> without slowing
+frame rate relative to sensor raw output</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Apply high-quality lens shading correction,<wbr/> at the
+cost of possibly reduced frame rate.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Quality of lens shading correction applied
+to the image data.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.shading.availableModes">android.<wbr/>shading.<wbr/>available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to OFF mode,<wbr/> no lens shading correction will be applied by the
+camera device,<wbr/> and an identity lens shading map data will be provided
+if <code><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON</code>.<wbr/> For example,<wbr/> for lens
+shading map with size of <code>[ 4,<wbr/> 3 ]</code>,<wbr/>
+the output <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a> for this case will be an identity
+map shown below:</p>
+<pre><code>[ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
+</code></pre>
+<p>When set to other modes,<wbr/> lens shading correction will be applied by the camera
+device.<wbr/> Applications can request lens shading map data by setting
+<a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> to ON,<wbr/> and then the camera device will provide lens
+shading map data in <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a>; the returned shading map
+data will be the one applied by the camera device for this capture request.<wbr/></p>
+<p>The shading map data may depend on the auto-exposure (AE) and AWB statistics,<wbr/> therefore
+the reliability of the map data may be affected by the AE and AWB algorithms.<wbr/> When AE and
+AWB are in AUTO modes(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF and <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>!=</code>
+OFF),<wbr/> to get best results,<wbr/> it is recommended that the applications wait for the AE and AWB
+to be converged before using the returned shading map data.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.shading.strength">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>shading.<wbr/>strength
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Control the amount of shading correction
+applied to the images</p>
+            </td>
+
+            <td class="entry_units">
+              unitless: 1-10; 10 is full shading
+          compensation
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.shading.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>shading.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>No lens shading correction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Apply lens shading corrections,<wbr/> without slowing
+frame rate relative to sensor raw output</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>Apply high-quality lens shading correction,<wbr/> at the
+cost of possibly reduced frame rate.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Quality of lens shading correction applied
+to the image data.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.shading.availableModes">android.<wbr/>shading.<wbr/>available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to OFF mode,<wbr/> no lens shading correction will be applied by the
+camera device,<wbr/> and an identity lens shading map data will be provided
+if <code><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON</code>.<wbr/> For example,<wbr/> for lens
+shading map with size of <code>[ 4,<wbr/> 3 ]</code>,<wbr/>
+the output <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a> for this case will be an identity
+map shown below:</p>
+<pre><code>[ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>
+ 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
+</code></pre>
+<p>When set to other modes,<wbr/> lens shading correction will be applied by the camera
+device.<wbr/> Applications can request lens shading map data by setting
+<a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> to ON,<wbr/> and then the camera device will provide lens
+shading map data in <a href="#dynamic_android.statistics.lensShadingCorrectionMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map</a>; the returned shading map
+data will be the one applied by the camera device for this capture request.<wbr/></p>
+<p>The shading map data may depend on the auto-exposure (AE) and AWB statistics,<wbr/> therefore
+the reliability of the map data may be affected by the AE and AWB algorithms.<wbr/> When AE and
+AWB are in AUTO modes(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF and <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>!=</code>
+OFF),<wbr/> to get best results,<wbr/> it is recommended that the applications wait for the AE and AWB
+to be converged before using the returned shading map data.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.shading.availableModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>shading.<wbr/>available<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums (android.<wbr/>shading.<wbr/>mode).<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of lens shading modes for <a href="#controls_android.shading.mode">android.<wbr/>shading.<wbr/>mode</a> that are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.shading.mode">android.<wbr/>shading.<wbr/>mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This list contains lens shading modes that can be set for the camera device.<wbr/>
+Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always
+list OFF and FAST mode.<wbr/> This includes all FULL level devices.<wbr/>
+LEGACY devices will always only support FAST mode.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if lens shading correction control is
+available on the camera device,<wbr/> but the underlying implementation can be the same for
+both modes.<wbr/> That is,<wbr/> if the highest quality implementation on the camera device does not
+slow down capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_statistics" class="section">statistics</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.statistics.faceDetectMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Do not include face detection statistics in capture
+results.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SIMPLE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Return face rectangle and confidence values only.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Return all face
+metadata.<wbr/></p>
+<p>In this mode,<wbr/> face rectangles,<wbr/> scores,<wbr/> landmarks,<wbr/> and face IDs are all valid.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for the face detector
+unit.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Whether face detection is enabled,<wbr/> and whether it
+should output just the basic fields or the full set of
+fields.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>SIMPLE mode must fill in <a href="#dynamic_android.statistics.faceRectangles">android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles</a> and
+<a href="#dynamic_android.statistics.faceScores">android.<wbr/>statistics.<wbr/>face<wbr/>Scores</a>.<wbr/>
+FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">android.<wbr/>statistics.<wbr/>face<wbr/>Ids</a>,<wbr/> and
+<a href="#dynamic_android.statistics.faceLandmarks">android.<wbr/>statistics.<wbr/>face<wbr/>Landmarks</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.statistics.histogramMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>histogram<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for histogram
+generation</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.statistics.sharpnessMapMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>sharpness<wbr/>Map<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for sharpness map
+generation</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.statistics.hotPixelMapMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Hot pixel map production is disabled.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Hot pixel map production is enabled.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for hot pixel map generation.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableHotPixelMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If set to <code>true</code>,<wbr/> a hot pixel map is returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/>
+If set to <code>false</code>,<wbr/> no hot pixel map will be returned.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.statistics.lensShadingMapMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Do not include a lens shading map in the capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Include a lens shading map in the capture result.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device will output the lens
+shading map in output result metadata.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableLensShadingMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to ON,<wbr/>
+<a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> will be provided in
+the output result metadata.<wbr/></p>
+<p>ON is always supported on devices with the RAW capability.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+                
+            
+
+                
+          <tr class="entry" id="static_android.statistics.info.availableFaceDetectModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">List of enums from android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of face detection modes for <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> that are
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>OFF is always supported.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.histogramBucketCount">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>histogram<wbr/>Bucket<wbr/>Count
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Number of histogram buckets
+supported</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 64</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.maxFaceCount">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Face<wbr/>Count
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum number of simultaneously detectable
+faces.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>0 for cameras without available face detection; otherwise:
+<code>&gt;=4</code> for LIMITED or FULL hwlevel devices or
+<code>&gt;0</code> for LEGACY devices.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.maxHistogramCount">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Histogram<wbr/>Count
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum value possible for a histogram
+bucket</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.maxSharpnessMapValue">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>max<wbr/>Sharpness<wbr/>Map<wbr/>Value
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum value possible for a sharpness map
+region.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.sharpnessMapSize">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>sharpness<wbr/>Map<wbr/>Size
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2
+                </span>
+              <span class="entry_type_visibility"> [system as size]</span>
+
+
+
+
+                <div class="entry_type_notes">width x height</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Dimensions of the sharpness
+map</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Must be at least 32 x 32</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.availableHotPixelMapModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of hot pixel map output modes for <a href="#controls_android.statistics.hotPixelMapMode">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode</a> that are
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.statistics.hotPixelMapMode">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If no hotpixel map output is available for this camera device,<wbr/> this will contain only
+<code>false</code>.<wbr/></p>
+<p>ON is always supported on devices with the RAW capability.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.availableLensShadingMapModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of lens shading map output modes for <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> that
+are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If no lens shading map output is available for this camera device,<wbr/> this key will
+contain only OFF.<wbr/></p>
+<p>ON is always supported on devices with the RAW capability.<wbr/>
+LEGACY mode devices will always only support OFF.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+        
+
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.statistics.faceDetectMode">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Do not include face detection statistics in capture
+results.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">SIMPLE</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Return face rectangle and confidence values only.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>Return all face
+metadata.<wbr/></p>
+<p>In this mode,<wbr/> face rectangles,<wbr/> scores,<wbr/> landmarks,<wbr/> and face IDs are all valid.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for the face detector
+unit.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Whether face detection is enabled,<wbr/> and whether it
+should output just the basic fields or the full set of
+fields.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>SIMPLE mode must fill in <a href="#dynamic_android.statistics.faceRectangles">android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles</a> and
+<a href="#dynamic_android.statistics.faceScores">android.<wbr/>statistics.<wbr/>face<wbr/>Scores</a>.<wbr/>
+FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">android.<wbr/>statistics.<wbr/>face<wbr/>Ids</a>,<wbr/> and
+<a href="#dynamic_android.statistics.faceLandmarks">android.<wbr/>statistics.<wbr/>face<wbr/>Landmarks</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.faceIds">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>face<wbr/>Ids
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of unique IDs for detected faces.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Each detected face is given a unique ID that is valid for as long as the face is visible
+to the camera device.<wbr/>  A face that leaves the field of view and later returns may be
+assigned a new ID.<wbr/></p>
+<p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> == FULL</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.faceLandmarks">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>face<wbr/>Landmarks
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 6
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">(leftEyeX,<wbr/> leftEyeY,<wbr/> rightEyeX,<wbr/> rightEyeY,<wbr/> mouthX,<wbr/> mouthY)</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of landmarks for detected
+faces.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The coordinate system is that of <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with
+<code>(0,<wbr/> 0)</code> being the top-left pixel of the active array.<wbr/></p>
+<p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> == FULL</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.faceRectangles">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 4
+                </span>
+              <span class="entry_type_visibility"> [hidden as rectangle]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+                <div class="entry_type_notes">(xmin,<wbr/> ymin,<wbr/> xmax,<wbr/> ymax).<wbr/> (0,<wbr/>0) is top-left of active pixel area</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of the bounding rectangles for detected
+faces.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The coordinate system is that of <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with
+<code>(0,<wbr/> 0)</code> being the top-left pixel of the active array.<wbr/></p>
+<p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> != OFF</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.faceScores">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>statistics.<wbr/>face<wbr/>Scores
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of the face confidence scores for
+detected faces</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>1-100</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_BC">BC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> != OFF.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The value should be meaningful (for example,<wbr/> setting 100 at
+all times is illegal).<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.faces">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>faces
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as face]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of the faces detected through camera face detection
+in this capture.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> <code>!=</code> OFF.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.histogram">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>histogram
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 3
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+                <div class="entry_type_notes">count of pixels for each color channel that fall into each histogram bucket,<wbr/> scaled to be between 0 and maxHistogramCount</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A 3-channel histogram based on the raw
+sensor data</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The k'th bucket (0-based) covers the input range
+(with w = <a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a>) of [ k * w/<wbr/>N,<wbr/>
+(k + 1) * w /<wbr/> N ).<wbr/> If only a monochrome sharpness map is
+supported,<wbr/> all channels should have the same data</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.histogramMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>histogram<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for histogram
+generation</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.sharpnessMap">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>sharpness<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x m x 3
+                </span>
+              <span class="entry_type_visibility"> [system]</span>
+
+
+
+
+                <div class="entry_type_notes">estimated sharpness for each region of the input image.<wbr/> Normalized to be between 0 and maxSharpnessMapValue.<wbr/> Higher values mean sharper (better focused)</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A 3-channel sharpness map,<wbr/> based on the raw
+sensor data</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If only a monochrome sharpness map is supported,<wbr/>
+all channels should have the same data</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.sharpnessMapMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>sharpness<wbr/>Map<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [system as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for sharpness map
+generation</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_FUTURE">FUTURE</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.lensShadingCorrectionMap">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Correction<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public as lensShadingMap]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The shading map is a low-resolution floating-point map
+that lists the coefficients used to correct for vignetting,<wbr/> for each
+Bayer color channel.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Each gain factor is &gt;= 1</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The least shaded section of the image should have a gain factor
+of 1; all other sections should have gains above 1.<wbr/></p>
+<p>When <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> = TRANSFORM_<wbr/>MATRIX,<wbr/> the map
+must take into account the colorCorrection settings.<wbr/></p>
+<p>The shading map is for the entire active pixel array,<wbr/> and is not
+affected by the crop region specified in the request.<wbr/> Each shading map
+entry is the value of the shading compensation map over a specific
+pixel on the sensor.<wbr/>  Specifically,<wbr/> with a (N x M) resolution shading
+map,<wbr/> and an active pixel array size (W x H),<wbr/> shading map entry
+(x,<wbr/>y) ϵ (0 ...<wbr/> N-1,<wbr/> 0 ...<wbr/> M-1) is the value of the shading map at
+pixel ( ((W-1)/<wbr/>(N-1)) * x,<wbr/> ((H-1)/<wbr/>(M-1)) * y) for the four color channels.<wbr/>
+The map is assumed to be bilinearly interpolated between the sample points.<wbr/></p>
+<p>The channel order is [R,<wbr/> Geven,<wbr/> Godd,<wbr/> B],<wbr/> where Geven is the green
+channel for the even rows of a Bayer pattern,<wbr/> and Godd is the odd rows.<wbr/>
+The shading map is stored in a fully interleaved format.<wbr/></p>
+<p>The shading map should have on the order of 30-40 rows and columns,<wbr/>
+and must be smaller than 64x64.<wbr/></p>
+<p>As an example,<wbr/> given a very small map defined as:</p>
+<pre><code>width,<wbr/>height = [ 4,<wbr/> 3 ]
+values =
+[ 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/>  1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/>
+    1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/>  1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>3,<wbr/>
+  1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>1,<wbr/>  1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/>
+    1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>2,<wbr/>
+  1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/>   1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/>
+    1.<wbr/>2,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>2,<wbr/>  1.<wbr/>3,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3 ]
+</code></pre>
+<p>The low-resolution scaling map images for each channel are
+(displayed using nearest-neighbor interpolation):</p>
+<p><img alt="Red lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png"/>
+<img alt="Green (even rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png"/>
+<img alt="Green (odd rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png"/>
+<img alt="Blue lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png"/></p>
+<p>As a visualization only,<wbr/> inverting the full-color map to recover an
+image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
+<p><img alt="Image of a uniform white wall (inverse shading map)" src="images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png"/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.lensShadingMap">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n x m
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">2D array of float gain factors per channel to correct lens shading</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The shading map is a low-resolution floating-point map
+that lists the coefficients used to correct for vignetting,<wbr/> for each
+Bayer color channel of RAW image data.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Each gain factor is &gt;= 1</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The least shaded section of the image should have a gain factor
+of 1; all other sections should have gains above 1.<wbr/></p>
+<p>When <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> = TRANSFORM_<wbr/>MATRIX,<wbr/> the map
+must take into account the colorCorrection settings.<wbr/></p>
+<p>The shading map is for the entire active pixel array,<wbr/> and is not
+affected by the crop region specified in the request.<wbr/> Each shading map
+entry is the value of the shading compensation map over a specific
+pixel on the sensor.<wbr/>  Specifically,<wbr/> with a (N x M) resolution shading
+map,<wbr/> and an active pixel array size (W x H),<wbr/> shading map entry
+(x,<wbr/>y) ϵ (0 ...<wbr/> N-1,<wbr/> 0 ...<wbr/> M-1) is the value of the shading map at
+pixel ( ((W-1)/<wbr/>(N-1)) * x,<wbr/> ((H-1)/<wbr/>(M-1)) * y) for the four color channels.<wbr/>
+The map is assumed to be bilinearly interpolated between the sample points.<wbr/></p>
+<p>The channel order is [R,<wbr/> Geven,<wbr/> Godd,<wbr/> B],<wbr/> where Geven is the green
+channel for the even rows of a Bayer pattern,<wbr/> and Godd is the odd rows.<wbr/>
+The shading map is stored in a fully interleaved format,<wbr/> and its size
+is provided in the camera static metadata by <a href="#static_android.lens.info.shadingMapSize">android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size</a>.<wbr/></p>
+<p>The shading map should have on the order of 30-40 rows and columns,<wbr/>
+and must be smaller than 64x64.<wbr/></p>
+<p>As an example,<wbr/> given a very small map defined as:</p>
+<pre><code><a href="#static_android.lens.info.shadingMapSize">android.<wbr/>lens.<wbr/>info.<wbr/>shading<wbr/>Map<wbr/>Size</a> = [ 4,<wbr/> 3 ]
+<a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> =
+[ 1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/>  1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/>
+    1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/>  1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>3,<wbr/>
+  1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>1,<wbr/>  1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/>
+    1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/>  1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/> 1.<wbr/>25,<wbr/> 1.<wbr/>2,<wbr/>
+  1.<wbr/>3,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3,<wbr/>   1.<wbr/>2,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>2,<wbr/>
+    1.<wbr/>2,<wbr/> 1.<wbr/>1,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>2,<wbr/>  1.<wbr/>3,<wbr/> 1.<wbr/>15,<wbr/> 1.<wbr/>2,<wbr/> 1.<wbr/>3 ]
+</code></pre>
+<p>The low-resolution scaling map images for each channel are
+(displayed using nearest-neighbor interpolation):</p>
+<p><img alt="Red lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png"/>
+<img alt="Green (even rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png"/>
+<img alt="Green (odd rows) lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png"/>
+<img alt="Blue lens shading map" src="images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png"/></p>
+<p>As a visualization only,<wbr/> inverting the full-color map to recover an
+image of a gray wall (using bicubic interpolation for visual quality)
+as captured by the sensor gives:</p>
+<p><img alt="Image of a uniform white wall (inverse shading map)" src="images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png"/></p>
+<p>Note that the RAW image data might be subject to lens shading
+correction not reported on this map.<wbr/> Query
+<a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a> to see if RAW image data has subject
+to lens shading correction.<wbr/> If <a href="#static_android.sensor.info.lensShadingApplied">android.<wbr/>sensor.<wbr/>info.<wbr/>lens<wbr/>Shading<wbr/>Applied</a>
+is TRUE,<wbr/> the RAW image data is subject to partial or full lens shading
+correction.<wbr/> In the case full lens shading correction is applied to RAW
+images,<wbr/> the gain factor map reported in this key will contain all 1.<wbr/>0 gains.<wbr/>
+In other words,<wbr/> the map reported in this key is the remaining lens shading
+that needs to be applied on the RAW image to get images without lens shading
+artifacts.<wbr/> See <a href="#static_android.request.maxNumOutputRaw">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Raw</a> for a list of RAW image
+formats.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The lens shading map calculation may depend on exposure and white balance statistics.<wbr/>
+When AE and AWB are in AUTO modes
+(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF and <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>!=</code> OFF),<wbr/> the HAL
+may have all the information it need to generate most accurate lens shading map.<wbr/> When
+AE or AWB are in manual mode
+(<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>==</code> OFF or <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>==</code> OFF),<wbr/> the shading map
+may be adversely impacted by manual exposure or white balance parameters.<wbr/> To avoid
+generating unreliable shading map data,<wbr/> the HAL may choose to lock the shading map with
+the latest known good map generated when the AE and AWB are in AUTO modes.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.predictedColorGains">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>predicted<wbr/>Color<wbr/>Gains
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+                <div class="entry_type_notes">A 1D array of floats for 4 color channel gains</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The best-fit color channel gains calculated
+by the camera device's statistics units for the current output frame.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This may be different than the gains used for this frame,<wbr/>
+since statistics processing on data from a new frame
+typically completes after the transform has already been
+applied to that frame.<wbr/></p>
+<p>The 4 channel gains are defined in Bayer domain,<wbr/>
+see <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> for details.<wbr/></p>
+<p>This value should always be calculated by the auto-white balance (AWB) block,<wbr/>
+regardless of the android.<wbr/>control.<wbr/>* current values.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.predictedColorTransform">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>predicted<wbr/>Color<wbr/>Transform
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">rational</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  3 x 3
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+                <div class="entry_type_notes">3x3 rational matrix in row-major order</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The best-fit color transform matrix estimate
+calculated by the camera device's statistics units for the current
+output frame.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The camera device will provide the estimate from its
+statistics unit on the white balance transforms to use
+for the next frame.<wbr/> These are the values the camera device believes
+are the best fit for the current output frame.<wbr/> This may
+be different than the transform used for this frame,<wbr/> since
+statistics processing on data from a new frame typically
+completes after the transform has already been applied to
+that frame.<wbr/></p>
+<p>These estimates must be provided for all frames,<wbr/> even if
+capture settings and color transforms are set by the application.<wbr/></p>
+<p>This value should always be calculated by the auto-white balance (AWB) block,<wbr/>
+regardless of the android.<wbr/>control.<wbr/>* current values.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.sceneFlicker">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>scene<wbr/>Flicker
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">NONE</span>
+                    <span class="entry_type_enum_notes"><p>The camera device does not detect any flickering illumination
+in the current scene.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">50HZ</span>
+                    <span class="entry_type_enum_notes"><p>The camera device detects illumination flickering at 50Hz
+in the current scene.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">60HZ</span>
+                    <span class="entry_type_enum_notes"><p>The camera device detects illumination flickering at 60Hz
+in the current scene.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The camera device estimated scene illumination lighting
+frequency.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Many light sources,<wbr/> such as most fluorescent lights,<wbr/> flicker at a rate
+that depends on the local utility power standards.<wbr/> This flicker must be
+accounted for by auto-exposure routines to avoid artifacts in captured images.<wbr/>
+The camera device uses this entry to tell the application what the scene
+illuminant frequency is.<wbr/></p>
+<p>When manual exposure control is enabled
+(<code><a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> == OFF</code> or <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> ==
+OFF</code>),<wbr/> the <a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> doesn't perform
+antibanding,<wbr/> and the application can ensure it selects
+exposure times that do not cause banding issues by looking
+into this metadata field.<wbr/> See
+<a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a> for more details.<wbr/></p>
+<p>Reports NONE if there doesn't appear to be flickering illumination.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.hotPixelMapMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Hot pixel map production is disabled.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Hot pixel map production is enabled.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Operating mode for hot pixel map generation.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableHotPixelMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If set to <code>true</code>,<wbr/> a hot pixel map is returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/>
+If set to <code>false</code>,<wbr/> no hot pixel map will be returned.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.hotPixelMap">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  2 x n
+                </span>
+              <span class="entry_type_visibility"> [public as point]</span>
+
+
+
+
+                <div class="entry_type_notes">list of coordinates based on android.<wbr/>sensor.<wbr/>pixel<wbr/>Array<wbr/>Size</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of <code>(x,<wbr/> y)</code> coordinates of hot/<wbr/>defective pixels on the sensor.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>n &lt;= number of pixels on the sensor.<wbr/>
+The <code>(x,<wbr/> y)</code> coordinates must be bounded by
+<a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A coordinate <code>(x,<wbr/> y)</code> must lie between <code>(0,<wbr/> 0)</code>,<wbr/> and
+<code>(width - 1,<wbr/> height - 1)</code> (inclusive),<wbr/> which are the top-left and
+bottom-right of the pixel array,<wbr/> respectively.<wbr/> The width and
+height dimensions are given in <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/>
+This may include hot pixels that lie outside of the active array
+bounds given by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A hotpixel map contains the coordinates of pixels on the camera
+sensor that do report valid values (usually due to defects in
+the camera sensor).<wbr/> This includes pixels that are stuck at certain
+values,<wbr/> or have a response that does not accuractly encode the
+incoming light from the scene.<wbr/></p>
+<p>To avoid performance issues,<wbr/> there should be significantly fewer hot
+pixels than actual pixels on the camera sensor.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.lensShadingMapMode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_notes"><p>Do not include a lens shading map in the capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_notes"><p>Include a lens shading map in the capture result.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether the camera device will output the lens
+shading map in output result metadata.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableLensShadingMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_RAW">RAW</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to ON,<wbr/>
+<a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> will be provided in
+the output result metadata.<wbr/></p>
+<p>ON is always supported on devices with the RAW capability.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_tonemap" class="section">tonemap</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.tonemap.curveBlue">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Blue
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the blue
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.curveGreen">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Green
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the green
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.curveRed">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Red
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the red
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>0-1 on both input and output coordinates,<wbr/> normalized
+as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Each channel's curve is defined by an array of control points:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> =
+  [ P0in,<wbr/> P0out,<wbr/> P1in,<wbr/> P1out,<wbr/> P2in,<wbr/> P2out,<wbr/> P3in,<wbr/> P3out,<wbr/> ...,<wbr/> PNin,<wbr/> PNout ]
+2 &lt;= N &lt;= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
+<p>These are sorted in order of increasing <code>Pin</code>; it is
+required that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
+define a complete mapping.<wbr/> For input values between control points,<wbr/>
+the camera device must linearly interpolate between the control
+points.<wbr/></p>
+<p>Each curve can have an independent number of points,<wbr/> and the number
+of points can be less than max (that is,<wbr/> the request doesn't have to
+always provide a curve with number of points equivalent to
+<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>A few examples,<wbr/> and their corresponding graphical mappings; these
+only specify the red channel and the precision is limited to 4
+digits,<wbr/> for conciseness.<wbr/></p>
+<p>Linear mapping:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
+</code></pre>
+<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
+<p>Invert mapping:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 0 ]
+</code></pre>
+<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
+<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
+  0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2920,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4002,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4812,<wbr/>
+  0.<wbr/>2667,<wbr/> 0.<wbr/>5484,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6069,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6594,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7072,<wbr/>
+  0.<wbr/>5333,<wbr/> 0.<wbr/>7515,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7928,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8317,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8685,<wbr/>
+  0.<wbr/>8000,<wbr/> 0.<wbr/>9035,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9370,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9691,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
+</code></pre>
+<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
+<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
+  0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2864,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4007,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4845,<wbr/>
+  0.<wbr/>2667,<wbr/> 0.<wbr/>5532,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6125,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6652,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7130,<wbr/>
+  0.<wbr/>5333,<wbr/> 0.<wbr/>7569,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7977,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8360,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8721,<wbr/>
+  0.<wbr/>8000,<wbr/> 0.<wbr/>9063,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9389,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9701,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
+</code></pre>
+<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For good quality of mapping,<wbr/> at least 128 control points are
+preferred.<wbr/></p>
+<p>A typical use case of this would be a gamma-1/<wbr/>2.<wbr/>2 curve,<wbr/> with as many
+control points used as are available.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.curve">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>tonemap.<wbr/>curve
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public as tonemapCurve]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a>
+is CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The tonemapCurve consist of three curves for each of red,<wbr/> green,<wbr/> and blue
+channels respectively.<wbr/> The following example uses the red channel as an
+example.<wbr/> The same logic applies to green and blue channel.<wbr/>
+Each channel's curve is defined by an array of control points:</p>
+<pre><code>curveRed =
+  [ P0(in,<wbr/> out),<wbr/> P1(in,<wbr/> out),<wbr/> P2(in,<wbr/> out),<wbr/> P3(in,<wbr/> out),<wbr/> ...,<wbr/> PN(in,<wbr/> out) ]
+2 &lt;= N &lt;= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
+<p>These are sorted in order of increasing <code>Pin</code>; it is always
+guaranteed that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
+define a complete mapping.<wbr/> For input values between control points,<wbr/>
+the camera device must linearly interpolate between the control
+points.<wbr/></p>
+<p>Each curve can have an independent number of points,<wbr/> and the number
+of points can be less than max (that is,<wbr/> the request doesn't have to
+always provide a curve with number of points equivalent to
+<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>A few examples,<wbr/> and their corresponding graphical mappings; these
+only specify the red channel and the precision is limited to 4
+digits,<wbr/> for conciseness.<wbr/></p>
+<p>Linear mapping:</p>
+<pre><code>curveRed = [ (0,<wbr/> 0),<wbr/> (1.<wbr/>0,<wbr/> 1.<wbr/>0) ]
+</code></pre>
+<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
+<p>Invert mapping:</p>
+<pre><code>curveRed = [ (0,<wbr/> 1.<wbr/>0),<wbr/> (1.<wbr/>0,<wbr/> 0) ]
+</code></pre>
+<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
+<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
+<pre><code>curveRed = [
+  (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2920),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4002),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4812),<wbr/>
+  (0.<wbr/>2667,<wbr/> 0.<wbr/>5484),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6069),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6594),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7072),<wbr/>
+  (0.<wbr/>5333,<wbr/> 0.<wbr/>7515),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7928),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8317),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8685),<wbr/>
+  (0.<wbr/>8000,<wbr/> 0.<wbr/>9035),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9370),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9691),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
+</code></pre>
+<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
+<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
+<pre><code>curveRed = [
+  (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2864),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4007),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4845),<wbr/>
+  (0.<wbr/>2667,<wbr/> 0.<wbr/>5532),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6125),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6652),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7130),<wbr/>
+  (0.<wbr/>5333,<wbr/> 0.<wbr/>7569),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7977),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8360),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8721),<wbr/>
+  (0.<wbr/>8000,<wbr/> 0.<wbr/>9063),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9389),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9701),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
+</code></pre>
+<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is created by the framework from the curveRed,<wbr/> curveGreen and
+curveBlue entries.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">CONTRAST_CURVE</span>
+                    <span class="entry_type_enum_notes"><p>Use the tone mapping curve specified in
+the <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>* entries.<wbr/></p>
+<p>All color enhancement and tonemapping must be disabled,<wbr/> except
+for applying the tonemapping curve specified by
+<a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/></p>
+<p>Must not slow down frame rate relative to raw
+sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Advanced gamma mapping and color enhancement may be applied,<wbr/> without
+reducing frame rate compared to raw sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>High-quality gamma mapping and color enhancement will be applied,<wbr/> at
+the cost of possibly reduced frame rate compared to raw sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">GAMMA_VALUE</span>
+                    <span class="entry_type_enum_notes"><p>Use the gamma value specified in <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a> to peform
+tonemapping.<wbr/></p>
+<p>All color enhancement and tonemapping must be disabled,<wbr/> except
+for applying the tonemapping curve specified by <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a>.<wbr/></p>
+<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PRESET_CURVE</span>
+                    <span class="entry_type_enum_notes"><p>Use the preset tonemapping curve specified in
+<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a> to peform tonemapping.<wbr/></p>
+<p>All color enhancement and tonemapping must be disabled,<wbr/> except
+for applying the tonemapping curve specified by
+<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a>.<wbr/></p>
+<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>High-level global contrast/<wbr/>gamma/<wbr/>tonemapping control.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.tonemap.availableToneMapModes">android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When switching to an application-defined contrast curve by setting
+<a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> to CONTRAST_<wbr/>CURVE,<wbr/> the curve is defined
+per-channel with a set of <code>(in,<wbr/> out)</code> points that specify the
+mapping from input high-bit-depth pixel value to the output
+low-bit-depth value.<wbr/>  Since the actual pixel ranges of both input
+and output may change depending on the camera pipeline,<wbr/> the values
+are specified by normalized floating-point numbers.<wbr/></p>
+<p>More-complex color mapping operations such as 3D color look-up
+tables,<wbr/> selective chroma enhancement,<wbr/> or other non-linear color
+transforms will be disabled when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+<p>When using either FAST or HIGH_<wbr/>QUALITY,<wbr/> the camera device will
+emit its own tonemap curve in <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/>
+These values are always available,<wbr/> and as close as possible to the
+actually used nonlinear/<wbr/>nonglobal transforms.<wbr/></p>
+<p>If a request is sent with CONTRAST_<wbr/>CURVE with the camera device's
+provided curve in FAST or HIGH_<wbr/>QUALITY,<wbr/> the image's tonemap will be
+roughly the same.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.gamma">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>gamma
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+GAMMA_<wbr/>VALUE</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The tonemap curve will be defined the following formula:
+* OUT = pow(IN,<wbr/> 1.<wbr/>0 /<wbr/> gamma)
+where IN and OUT is the input pixel value scaled to range [0.<wbr/>0,<wbr/> 1.<wbr/>0],<wbr/>
+pow is the power function and gamma is the gamma value specified by this
+key.<wbr/></p>
+<p>The same curve will be applied to all color channels.<wbr/> The camera device
+may clip the input gamma value to its supported range.<wbr/> The actual applied
+value will be returned in capture result.<wbr/></p>
+<p>The valid range of gamma value varies on different devices,<wbr/> but values
+within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.presetCurve">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">SRGB</span>
+                    <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by sRGB</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">REC709</span>
+                    <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by ITU-R BT.<wbr/>709</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+PRESET_<wbr/>CURVE</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The tonemap curve will be defined by specified standard.<wbr/></p>
+<p>sRGB (approximated by 16 control points):</p>
+<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
+<p>Rec.<wbr/> 709 (approximated by 16 control points):</p>
+<p><img alt="Rec. 709 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png"/></p>
+<p>Note that above figures show a 16 control points approximation of preset
+curves.<wbr/> Camera devices may apply a different approximation to the curve.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.tonemap.maxCurvePoints">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum number of supported points in the
+tonemap curve that can be used for <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If the actual number of points provided by the application (in <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>*) is
+less than this maximum,<wbr/> the camera device will resample the curve to its internal
+representation,<wbr/> using linear interpolation.<wbr/></p>
+<p>The output curves in the result metadata may have a different number
+of points than the input curves,<wbr/> and will represent the actual
+hardware curves used as closely as possible when linearly interpolated.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This value must be at least 64.<wbr/> This should be at least 128.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.tonemap.availableToneMapModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of tonemapping modes for <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> that are supported by this camera
+device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always contain
+at least one of below mode combinations:</p>
+<ul>
+<li>CONTRAST_<wbr/>CURVE,<wbr/> FAST and HIGH_<wbr/>QUALITY</li>
+<li>GAMMA_<wbr/>VALUE,<wbr/> PRESET_<wbr/>CURVE,<wbr/> FAST and HIGH_<wbr/>QUALITY</li>
+</ul>
+<p>This includes all FULL level devices.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if automatic tonemap control is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.tonemap.curveBlue">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Blue
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the blue
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.tonemap.curveGreen">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Green
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the green
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.tonemap.curveRed">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Red
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the red
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>0-1 on both input and output coordinates,<wbr/> normalized
+as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Each channel's curve is defined by an array of control points:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> =
+  [ P0in,<wbr/> P0out,<wbr/> P1in,<wbr/> P1out,<wbr/> P2in,<wbr/> P2out,<wbr/> P3in,<wbr/> P3out,<wbr/> ...,<wbr/> PNin,<wbr/> PNout ]
+2 &lt;= N &lt;= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
+<p>These are sorted in order of increasing <code>Pin</code>; it is
+required that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
+define a complete mapping.<wbr/> For input values between control points,<wbr/>
+the camera device must linearly interpolate between the control
+points.<wbr/></p>
+<p>Each curve can have an independent number of points,<wbr/> and the number
+of points can be less than max (that is,<wbr/> the request doesn't have to
+always provide a curve with number of points equivalent to
+<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>A few examples,<wbr/> and their corresponding graphical mappings; these
+only specify the red channel and the precision is limited to 4
+digits,<wbr/> for conciseness.<wbr/></p>
+<p>Linear mapping:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0 ]
+</code></pre>
+<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
+<p>Invert mapping:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [ 0,<wbr/> 1.<wbr/>0,<wbr/> 1.<wbr/>0,<wbr/> 0 ]
+</code></pre>
+<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
+<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
+  0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2920,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4002,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4812,<wbr/>
+  0.<wbr/>2667,<wbr/> 0.<wbr/>5484,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6069,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6594,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7072,<wbr/>
+  0.<wbr/>5333,<wbr/> 0.<wbr/>7515,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7928,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8317,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8685,<wbr/>
+  0.<wbr/>8000,<wbr/> 0.<wbr/>9035,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9370,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9691,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
+</code></pre>
+<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
+<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> = [
+  0.<wbr/>0000,<wbr/> 0.<wbr/>0000,<wbr/> 0.<wbr/>0667,<wbr/> 0.<wbr/>2864,<wbr/> 0.<wbr/>1333,<wbr/> 0.<wbr/>4007,<wbr/> 0.<wbr/>2000,<wbr/> 0.<wbr/>4845,<wbr/>
+  0.<wbr/>2667,<wbr/> 0.<wbr/>5532,<wbr/> 0.<wbr/>3333,<wbr/> 0.<wbr/>6125,<wbr/> 0.<wbr/>4000,<wbr/> 0.<wbr/>6652,<wbr/> 0.<wbr/>4667,<wbr/> 0.<wbr/>7130,<wbr/>
+  0.<wbr/>5333,<wbr/> 0.<wbr/>7569,<wbr/> 0.<wbr/>6000,<wbr/> 0.<wbr/>7977,<wbr/> 0.<wbr/>6667,<wbr/> 0.<wbr/>8360,<wbr/> 0.<wbr/>7333,<wbr/> 0.<wbr/>8721,<wbr/>
+  0.<wbr/>8000,<wbr/> 0.<wbr/>9063,<wbr/> 0.<wbr/>8667,<wbr/> 0.<wbr/>9389,<wbr/> 0.<wbr/>9333,<wbr/> 0.<wbr/>9701,<wbr/> 1.<wbr/>0000,<wbr/> 1.<wbr/>0000 ]
+</code></pre>
+<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For good quality of mapping,<wbr/> at least 128 control points are
+preferred.<wbr/></p>
+<p>A typical use case of this would be a gamma-1/<wbr/>2.<wbr/>2 curve,<wbr/> with as many
+control points used as are available.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.tonemap.curve">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>tonemap.<wbr/>curve
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public as tonemapCurve]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a>
+is CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The tonemapCurve consist of three curves for each of red,<wbr/> green,<wbr/> and blue
+channels respectively.<wbr/> The following example uses the red channel as an
+example.<wbr/> The same logic applies to green and blue channel.<wbr/>
+Each channel's curve is defined by an array of control points:</p>
+<pre><code>curveRed =
+  [ P0(in,<wbr/> out),<wbr/> P1(in,<wbr/> out),<wbr/> P2(in,<wbr/> out),<wbr/> P3(in,<wbr/> out),<wbr/> ...,<wbr/> PN(in,<wbr/> out) ]
+2 &lt;= N &lt;= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
+<p>These are sorted in order of increasing <code>Pin</code>; it is always
+guaranteed that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
+define a complete mapping.<wbr/> For input values between control points,<wbr/>
+the camera device must linearly interpolate between the control
+points.<wbr/></p>
+<p>Each curve can have an independent number of points,<wbr/> and the number
+of points can be less than max (that is,<wbr/> the request doesn't have to
+always provide a curve with number of points equivalent to
+<a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>A few examples,<wbr/> and their corresponding graphical mappings; these
+only specify the red channel and the precision is limited to 4
+digits,<wbr/> for conciseness.<wbr/></p>
+<p>Linear mapping:</p>
+<pre><code>curveRed = [ (0,<wbr/> 0),<wbr/> (1.<wbr/>0,<wbr/> 1.<wbr/>0) ]
+</code></pre>
+<p><img alt="Linear mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png"/></p>
+<p>Invert mapping:</p>
+<pre><code>curveRed = [ (0,<wbr/> 1.<wbr/>0),<wbr/> (1.<wbr/>0,<wbr/> 0) ]
+</code></pre>
+<p><img alt="Inverting mapping curve" src="images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png"/></p>
+<p>Gamma 1/<wbr/>2.<wbr/>2 mapping,<wbr/> with 16 control points:</p>
+<pre><code>curveRed = [
+  (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2920),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4002),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4812),<wbr/>
+  (0.<wbr/>2667,<wbr/> 0.<wbr/>5484),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6069),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6594),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7072),<wbr/>
+  (0.<wbr/>5333,<wbr/> 0.<wbr/>7515),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7928),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8317),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8685),<wbr/>
+  (0.<wbr/>8000,<wbr/> 0.<wbr/>9035),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9370),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9691),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
+</code></pre>
+<p><img alt="Gamma = 1/2.2 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png"/></p>
+<p>Standard sRGB gamma mapping,<wbr/> per IEC 61966-2-1:1999,<wbr/> with 16 control points:</p>
+<pre><code>curveRed = [
+  (0.<wbr/>0000,<wbr/> 0.<wbr/>0000),<wbr/> (0.<wbr/>0667,<wbr/> 0.<wbr/>2864),<wbr/> (0.<wbr/>1333,<wbr/> 0.<wbr/>4007),<wbr/> (0.<wbr/>2000,<wbr/> 0.<wbr/>4845),<wbr/>
+  (0.<wbr/>2667,<wbr/> 0.<wbr/>5532),<wbr/> (0.<wbr/>3333,<wbr/> 0.<wbr/>6125),<wbr/> (0.<wbr/>4000,<wbr/> 0.<wbr/>6652),<wbr/> (0.<wbr/>4667,<wbr/> 0.<wbr/>7130),<wbr/>
+  (0.<wbr/>5333,<wbr/> 0.<wbr/>7569),<wbr/> (0.<wbr/>6000,<wbr/> 0.<wbr/>7977),<wbr/> (0.<wbr/>6667,<wbr/> 0.<wbr/>8360),<wbr/> (0.<wbr/>7333,<wbr/> 0.<wbr/>8721),<wbr/>
+  (0.<wbr/>8000,<wbr/> 0.<wbr/>9063),<wbr/> (0.<wbr/>8667,<wbr/> 0.<wbr/>9389),<wbr/> (0.<wbr/>9333,<wbr/> 0.<wbr/>9701),<wbr/> (1.<wbr/>0000,<wbr/> 1.<wbr/>0000) ]
+</code></pre>
+<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This entry is created by the framework from the curveRed,<wbr/> curveGreen and
+curveBlue entries.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.tonemap.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">CONTRAST_CURVE</span>
+                    <span class="entry_type_enum_notes"><p>Use the tone mapping curve specified in
+the <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>* entries.<wbr/></p>
+<p>All color enhancement and tonemapping must be disabled,<wbr/> except
+for applying the tonemapping curve specified by
+<a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/></p>
+<p>Must not slow down frame rate relative to raw
+sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_notes"><p>Advanced gamma mapping and color enhancement may be applied,<wbr/> without
+reducing frame rate compared to raw sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_notes"><p>High-quality gamma mapping and color enhancement will be applied,<wbr/> at
+the cost of possibly reduced frame rate compared to raw sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">GAMMA_VALUE</span>
+                    <span class="entry_type_enum_notes"><p>Use the gamma value specified in <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a> to peform
+tonemapping.<wbr/></p>
+<p>All color enhancement and tonemapping must be disabled,<wbr/> except
+for applying the tonemapping curve specified by <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a>.<wbr/></p>
+<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">PRESET_CURVE</span>
+                    <span class="entry_type_enum_notes"><p>Use the preset tonemapping curve specified in
+<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a> to peform tonemapping.<wbr/></p>
+<p>All color enhancement and tonemapping must be disabled,<wbr/> except
+for applying the tonemapping curve specified by
+<a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a>.<wbr/></p>
+<p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>High-level global contrast/<wbr/>gamma/<wbr/>tonemapping control.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.tonemap.availableToneMapModes">android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When switching to an application-defined contrast curve by setting
+<a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> to CONTRAST_<wbr/>CURVE,<wbr/> the curve is defined
+per-channel with a set of <code>(in,<wbr/> out)</code> points that specify the
+mapping from input high-bit-depth pixel value to the output
+low-bit-depth value.<wbr/>  Since the actual pixel ranges of both input
+and output may change depending on the camera pipeline,<wbr/> the values
+are specified by normalized floating-point numbers.<wbr/></p>
+<p>More-complex color mapping operations such as 3D color look-up
+tables,<wbr/> selective chroma enhancement,<wbr/> or other non-linear color
+transforms will be disabled when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+<p>When using either FAST or HIGH_<wbr/>QUALITY,<wbr/> the camera device will
+emit its own tonemap curve in <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>.<wbr/>
+These values are always available,<wbr/> and as close as possible to the
+actually used nonlinear/<wbr/>nonglobal transforms.<wbr/></p>
+<p>If a request is sent with CONTRAST_<wbr/>CURVE with the camera device's
+provided curve in FAST or HIGH_<wbr/>QUALITY,<wbr/> the image's tonemap will be
+roughly the same.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.tonemap.gamma">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>gamma
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+GAMMA_<wbr/>VALUE</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The tonemap curve will be defined the following formula:
+* OUT = pow(IN,<wbr/> 1.<wbr/>0 /<wbr/> gamma)
+where IN and OUT is the input pixel value scaled to range [0.<wbr/>0,<wbr/> 1.<wbr/>0],<wbr/>
+pow is the power function and gamma is the gamma value specified by this
+key.<wbr/></p>
+<p>The same curve will be applied to all color channels.<wbr/> The camera device
+may clip the input gamma value to its supported range.<wbr/> The actual applied
+value will be returned in capture result.<wbr/></p>
+<p>The valid range of gamma value varies on different devices,<wbr/> but values
+within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.tonemap.presetCurve">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">SRGB</span>
+                    <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by sRGB</p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">REC709</span>
+                    <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by ITU-R BT.<wbr/>709</p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping curve to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+PRESET_<wbr/>CURVE</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The tonemap curve will be defined by specified standard.<wbr/></p>
+<p>sRGB (approximated by 16 control points):</p>
+<p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
+<p>Rec.<wbr/> 709 (approximated by 16 control points):</p>
+<p><img alt="Rec. 709 tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png"/></p>
+<p>Note that above figures show a 16 control points approximation of preset
+curves.<wbr/> Camera devices may apply a different approximation to the curve.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_led" class="section">led</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.led.transmit">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>led.<wbr/>transmit
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [hidden as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This LED is nominally used to indicate to the user
+that the camera is powered on and may be streaming images back to the
+Application Processor.<wbr/> In certain rare circumstances,<wbr/> the OS may
+disable this when video is processed locally and not transmitted to
+any untrusted applications.<wbr/></p>
+<p>In particular,<wbr/> the LED <em>must</em> always be on when the data could be
+transmitted off the device.<wbr/> The LED <em>should</em> always be on whenever
+data is stored locally on the device.<wbr/></p>
+<p>The LED <em>may</em> be off if a trusted application is using the data that
+doesn't violate the above rules.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.led.transmit">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>led.<wbr/>transmit
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [hidden as boolean]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This LED is nominally used to indicate to the user
+that the camera is powered on and may be streaming images back to the
+Application Processor.<wbr/> In certain rare circumstances,<wbr/> the OS may
+disable this when video is processed locally and not transmitted to
+any untrusted applications.<wbr/></p>
+<p>In particular,<wbr/> the LED <em>must</em> always be on when the data could be
+transmitted off the device.<wbr/> The LED <em>should</em> always be on whenever
+data is stored locally on the device.<wbr/></p>
+<p>The LED <em>may</em> be off if a trusted application is using the data that
+doesn't violate the above rules.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.led.availableLeds">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>led.<wbr/>available<wbr/>Leds
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">TRANSMIT</span>
+                    <span class="entry_type_enum_notes"><p><a href="#controls_android.led.transmit">android.<wbr/>led.<wbr/>transmit</a> control is used.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A list of camera LEDs that are available on this system.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_info" class="section">info</td></tr>
+
+
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.info.supportedHardwareLevel">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">LIMITED</span>
+                    <span class="entry_type_enum_notes"><p>This camera device has only limited capabilities.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_notes"><p>This camera device is capable of supporting advanced imaging applications.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">LEGACY</span>
+                    <span class="entry_type_enum_notes"><p>This camera device is running in backward compatibility mode.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Generally classifies the overall set of the camera device functionality.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Camera devices will come in three flavors: LEGACY,<wbr/> LIMITED and FULL.<wbr/></p>
+<p>A FULL device will support below capabilities:</p>
+<ul>
+<li>BURST_<wbr/>CAPTURE capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains BURST_<wbr/>CAPTURE)</li>
+<li>Per frame control (<a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a> <code>==</code> PER_<wbr/>FRAME_<wbr/>CONTROL)</li>
+<li>Manual sensor control (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains MANUAL_<wbr/>SENSOR)</li>
+<li>Manual post-processing control (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
+  MANUAL_<wbr/>POST_<wbr/>PROCESSING)</li>
+<li>At least 3 processed (but not stalling) format output streams
+  (<a href="#static_android.request.maxNumOutputProc">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Output<wbr/>Proc</a> <code>&gt;=</code> 3)</li>
+<li>The required stream configurations defined in <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a></li>
+<li>The required exposure time range defined in <a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></li>
+<li>The required maxFrameDuration defined in <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a></li>
+</ul>
+<p>A LIMITED device may have some or none of the above characteristics.<wbr/>
+To find out more refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
+<p>Some features are not part of any particular hardware level or capability and must be
+queried separately.<wbr/> These include:</p>
+<ul>
+<li>Calibrated timestamps (<a href="#static_android.sensor.info.timestampSource">android.<wbr/>sensor.<wbr/>info.<wbr/>timestamp<wbr/>Source</a> <code>==</code> REALTIME)</li>
+<li>Precision lens control (<a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a> <code>==</code> CALIBRATED)</li>
+<li>Face detection (<a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a>)</li>
+<li>Optical or electrical image stabilization
+  (<a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a>,<wbr/>
+   <a href="#static_android.control.availableVideoStabilizationModes">android.<wbr/>control.<wbr/>available<wbr/>Video<wbr/>Stabilization<wbr/>Modes</a>)</li>
+</ul>
+<p>A LEGACY device does not support per-frame control,<wbr/> manual sensor control,<wbr/> manual
+post-processing,<wbr/> arbitrary cropping regions,<wbr/> and has relaxed performance constraints.<wbr/></p>
+<p>Each higher level supports everything the lower level supports
+in this order: FULL <code>&gt;</code> LIMITED <code>&gt;</code> LEGACY.<wbr/></p>
+<p>Note:
+Pre-API level 23,<wbr/> FULL devices also supported arbitrary cropping region
+(<a href="#static_android.scaler.croppingType">android.<wbr/>scaler.<wbr/>cropping<wbr/>Type</a> <code>==</code> FREEFORM); this requirement was relaxed in API level 23,<wbr/>
+and FULL devices may only support CENTERED cropping.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The camera 3 HAL device can implement one of two possible
+operational modes; limited and full.<wbr/> Full support is
+expected from new higher-end devices.<wbr/> Limited mode has
+hardware requirements roughly in line with those for a
+camera HAL device v1 implementation,<wbr/> and is expected from
+older or inexpensive devices.<wbr/> Full is a strict superset of
+limited,<wbr/> and they share the same essential operational flow.<wbr/></p>
+<p>For full details refer to "S3.<wbr/> Operational Modes" in camera3.<wbr/>h</p>
+<p>Camera HAL3+ must not implement LEGACY mode.<wbr/> It is there
+for backwards compatibility in the <code>android.<wbr/>hardware.<wbr/>camera2</code>
+user-facing API only.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_blackLevel" class="section">blackLevel</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.blackLevel.lock">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>black<wbr/>Level.<wbr/>lock
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether black-level compensation is locked
+to its current values,<wbr/> or is free to vary.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_HAL2">HAL2</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When set to <code>true</code> (ON),<wbr/> the values used for black-level
+compensation will not change until the lock is set to
+<code>false</code> (OFF).<wbr/></p>
+<p>Since changes to certain capture parameters (such as
+exposure time) may require resetting of black level
+compensation,<wbr/> the camera device must report whether setting
+the black level lock was successful in the output result
+metadata.<wbr/></p>
+<p>For example,<wbr/> if a sequence of requests is as follows:</p>
+<ul>
+<li>Request 1: Exposure = 10ms,<wbr/> Black level lock = OFF</li>
+<li>Request 2: Exposure = 10ms,<wbr/> Black level lock = ON</li>
+<li>Request 3: Exposure = 10ms,<wbr/> Black level lock = ON</li>
+<li>Request 4: Exposure = 20ms,<wbr/> Black level lock = ON</li>
+<li>Request 5: Exposure = 20ms,<wbr/> Black level lock = ON</li>
+<li>Request 6: Exposure = 20ms,<wbr/> Black level lock = ON</li>
+</ul>
+<p>And the exposure change in Request 4 requires the camera
+device to reset the black level offsets,<wbr/> then the output
+result metadata is expected to be:</p>
+<ul>
+<li>Result 1: Exposure = 10ms,<wbr/> Black level lock = OFF</li>
+<li>Result 2: Exposure = 10ms,<wbr/> Black level lock = ON</li>
+<li>Result 3: Exposure = 10ms,<wbr/> Black level lock = ON</li>
+<li>Result 4: Exposure = 20ms,<wbr/> Black level lock = OFF</li>
+<li>Result 5: Exposure = 20ms,<wbr/> Black level lock = ON</li>
+<li>Result 6: Exposure = 20ms,<wbr/> Black level lock = ON</li>
+</ul>
+<p>This indicates to the application that on frame 4,<wbr/> black
+levels were reset due to exposure value changes,<wbr/> and pixel
+values may not be consistent across captures.<wbr/></p>
+<p>The camera device will maintain the lock to the extent
+possible,<wbr/> only overriding the lock to OFF when changes to
+other request parameters require a black level recalculation
+or reset.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If for some reason black level locking is no longer possible
+(for example,<wbr/> the analog gain has changed,<wbr/> which forces
+black level offsets to be recalculated),<wbr/> then the HAL must
+override this request (and it must report 'OFF' when this
+does happen) until the next capture for which locking is
+possible again.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.blackLevel.lock">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>black<wbr/>Level.<wbr/>lock
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether black-level compensation is locked
+to its current values,<wbr/> or is free to vary.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_HAL2">HAL2</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Whether the black level offset was locked for this frame.<wbr/>  Should be
+ON if <a href="#controls_android.blackLevel.lock">android.<wbr/>black<wbr/>Level.<wbr/>lock</a> was ON in the capture request,<wbr/> unless
+a change in other capture settings forced the camera device to
+perform a black level reset.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If for some reason black level locking is no longer possible
+(for example,<wbr/> the analog gain has changed,<wbr/> which forces
+black level offsets to be recalculated),<wbr/> then the HAL must
+override this request (and it must report 'OFF' when this
+does happen) until the next capture for which locking is
+possible again.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_sync" class="section">sync</td></tr>
+
+
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.sync.frameNumber">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sync.<wbr/>frame<wbr/>Number
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int64</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">CONVERGING</span>
+                    <span class="entry_type_enum_value">-1</span>
+                    <span class="entry_type_enum_notes"><p>The current result is not yet fully synchronized to any request.<wbr/></p>
+<p>Synchronization is in progress,<wbr/> and reading metadata from this
+result may include a mix of data that have taken effect since the
+last synchronization time.<wbr/></p>
+<p>In some future result,<wbr/> within <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a> frames,<wbr/>
+this value will update to the actual frame number frame number
+the result is guaranteed to be synchronized to (as long as the
+request settings remain constant).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">UNKNOWN</span>
+                    <span class="entry_type_enum_value">-2</span>
+                    <span class="entry_type_enum_notes"><p>The current result's synchronization status is unknown.<wbr/></p>
+<p>The result may have already converged,<wbr/> or it may be in
+progress.<wbr/>  Reading from this result may include some mix
+of settings from past requests.<wbr/></p>
+<p>After a settings change,<wbr/> the new settings will eventually all
+take effect for the output buffers and results.<wbr/> However,<wbr/> this
+value will not change when that happens.<wbr/> Altering settings
+rapidly may provide outcomes using mixes of settings from recent
+requests.<wbr/></p>
+<p>This value is intended primarily for backwards compatibility with
+the older camera implementations (for android.<wbr/>hardware.<wbr/>Camera).<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The frame number corresponding to the last request
+with which the output result (metadata + buffers) has been fully
+synchronized.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Either a non-negative value corresponding to a
+<code>frame_<wbr/>number</code>,<wbr/> or one of the two enums (CONVERGING /<wbr/> UNKNOWN).<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>When a request is submitted to the camera device,<wbr/> there is usually a
+delay of several frames before the controls get applied.<wbr/> A camera
+device may either choose to account for this delay by implementing a
+pipeline and carefully submit well-timed atomic control updates,<wbr/> or
+it may start streaming control changes that span over several frame
+boundaries.<wbr/></p>
+<p>In the latter case,<wbr/> whenever a request's settings change relative to
+the previous submitted request,<wbr/> the full set of changes may take
+multiple frame durations to fully take effect.<wbr/> Some settings may
+take effect sooner (in less frame durations) than others.<wbr/></p>
+<p>While a set of control changes are being propagated,<wbr/> this value
+will be CONVERGING.<wbr/></p>
+<p>Once it is fully known that a set of control changes have been
+finished propagating,<wbr/> and the resulting updated control settings
+have been read back by the camera device,<wbr/> this value will be set
+to a non-negative frame number (corresponding to the request to
+which the results have synchronized to).<wbr/></p>
+<p>Older camera device implementations may not have a way to detect
+when all camera controls have been applied,<wbr/> and will always set this
+value to UNKNOWN.<wbr/></p>
+<p>FULL capability devices will always have this value set to the
+frame number of the request corresponding to this result.<wbr/></p>
+<p><em>Further details</em>:</p>
+<ul>
+<li>Whenever a request differs from the last request,<wbr/> any future
+results not yet returned may have this value set to CONVERGING (this
+could include any in-progress captures not yet returned by the camera
+device,<wbr/> for more details see pipeline considerations below).<wbr/></li>
+<li>Submitting a series of multiple requests that differ from the
+previous request (e.<wbr/>g.<wbr/> r1,<wbr/> r2,<wbr/> r3 s.<wbr/>t.<wbr/> r1 != r2 != r3)
+moves the new synchronization frame to the last non-repeating
+request (using the smallest frame number from the contiguous list of
+repeating requests).<wbr/></li>
+<li>Submitting the same request repeatedly will not change this value
+to CONVERGING,<wbr/> if it was already a non-negative value.<wbr/></li>
+<li>When this value changes to non-negative,<wbr/> that means that all of the
+metadata controls from the request have been applied,<wbr/> all of the
+metadata controls from the camera device have been read to the
+updated values (into the result),<wbr/> and all of the graphics buffers
+corresponding to this result are also synchronized to the request.<wbr/></li>
+</ul>
+<p><em>Pipeline considerations</em>:</p>
+<p>Submitting a request with updated controls relative to the previously
+submitted requests may also invalidate the synchronization state
+of all the results corresponding to currently in-flight requests.<wbr/></p>
+<p>In other words,<wbr/> results for this current request and up to
+<a href="#static_android.request.pipelineMaxDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth</a> prior requests may have their
+<a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> change to CONVERGING.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>Using UNKNOWN here is illegal unless <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a>
+is also UNKNOWN.<wbr/></p>
+<p>FULL capability devices should simply set this value to the
+<code>frame_<wbr/>number</code> of the request this result corresponds to.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.sync.maxLatency">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>sync.<wbr/>max<wbr/>Latency
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[legacy] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">PER_FRAME_CONTROL</span>
+                    <span class="entry_type_enum_value">0</span>
+                    <span class="entry_type_enum_notes"><p>Every frame has the requests immediately applied.<wbr/></p>
+<p>Changing controls over multiple requests one after another will
+produce results that have those controls applied atomically
+each frame.<wbr/></p>
+<p>All FULL capability devices will have this as their maxLatency.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">UNKNOWN</span>
+                    <span class="entry_type_enum_value">-1</span>
+                    <span class="entry_type_enum_notes"><p>Each new frame has some subset (potentially the entire set)
+of the past requests applied to the camera settings.<wbr/></p>
+<p>By submitting a series of identical requests,<wbr/> the camera device
+will eventually have the camera settings applied,<wbr/> but it is
+unknown when that exact point will be.<wbr/></p>
+<p>All LEGACY capability devices will have this as their maxLatency.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximum number of frames that can occur after a request
+(different than the previous) has been submitted,<wbr/> and before the
+result's state becomes synchronized.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Frame counts
+            </td>
+
+            <td class="entry_range">
+              <p>A positive value,<wbr/> PER_<wbr/>FRAME_<wbr/>CONTROL,<wbr/> or UNKNOWN.<wbr/></p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This defines the maximum distance (in number of metadata results),<wbr/>
+between the frame number of the request that has new controls to apply
+and the frame number of the result that has all the controls applied.<wbr/></p>
+<p>In other words this acts as an upper boundary for how many frames
+must occur before the camera device knows for a fact that the new
+submitted camera settings have been applied in outgoing frames.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>For example if maxLatency was 2,<wbr/></p>
+<pre><code>initial request = X (repeating)
+request1 = X
+request2 = Y
+request3 = Y
+request4 = Y
+
+where requestN has frameNumber N,<wbr/> and the first of the repeating
+initial request's has frameNumber F (and F &lt; 1).<wbr/>
+
+initial result = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == F }
+result1 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == F }
+result2 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == CONVERGING }
+result3 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == CONVERGING }
+result4 = X' + { <a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == 2 }
+
+where resultN has frameNumber N.<wbr/>
+</code></pre>
+<p>Since <code>result4</code> has a <code>frameNumber == 4</code> and
+<code><a href="#dynamic_android.sync.frameNumber">android.<wbr/>sync.<wbr/>frame<wbr/>Number</a> == 2</code>,<wbr/> the distance is clearly
+<code>4 - 2 = 2</code>.<wbr/></p>
+<p>Use <code>frame_<wbr/>count</code> from camera3_<wbr/>request_<wbr/>t instead of
+<a href="#controls_android.request.frameCount">android.<wbr/>request.<wbr/>frame<wbr/>Count</a> or
+<code>@link{android.<wbr/>hardware.<wbr/>camera2.<wbr/>Capture<wbr/>Result#get<wbr/>Frame<wbr/>Number}</code>.<wbr/></p>
+<p>LIMITED devices are strongly encouraged to use a non-negative
+value.<wbr/> If UNKNOWN is used here then app developers do not have a way
+to know when sensor settings have been applied.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_reprocess" class="section">reprocess</td></tr>
+
+
+      <tr><td colspan="6" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.reprocess.effectiveExposureFactor">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The amount of exposure time increase factor applied to the original output
+frame by the application processing before sending for reprocessing.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Relative exposure time increase factor.<wbr/>
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 1.<wbr/>0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is optional,<wbr/> and will be supported if the camera device supports YUV_<wbr/>REPROCESSING
+capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains YUV_<wbr/>REPROCESSING).<wbr/></p>
+<p>For some YUV reprocessing use cases,<wbr/> the application may choose to filter the original
+output frames to effectively reduce the noise to the same level as a frame that was
+captured with longer exposure time.<wbr/> To be more specific,<wbr/> assuming the original captured
+images were captured with a sensitivity of S and an exposure time of T,<wbr/> the model in
+the camera device is that the amount of noise in the image would be approximately what
+would be expected if the original capture parameters had been a sensitivity of
+S/<wbr/>effectiveExposureFactor and an exposure time of T*effectiveExposureFactor,<wbr/> rather
+than S and T respectively.<wbr/> If the captured images were processed by the application
+before being sent for reprocessing,<wbr/> then the application may have used image processing
+algorithms and/<wbr/>or multi-frame image fusion to reduce the noise in the
+application-processed images (input images).<wbr/> By using the effectiveExposureFactor
+control,<wbr/> the application can communicate to the camera device the actual noise level
+improvement in the application-processed image.<wbr/> With this information,<wbr/> the camera
+device can select appropriate noise reduction and edge enhancement parameters to avoid
+excessive noise reduction (<a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a>) and insufficient edge
+enhancement (<a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a>) being applied to the reprocessed frames.<wbr/></p>
+<p>For example,<wbr/> for multi-frame image fusion use case,<wbr/> the application may fuse
+multiple output frames together to a final frame for reprocessing.<wbr/> When N image are
+fused into 1 image for reprocessing,<wbr/> the exposure time increase factor could be up to
+square root of N (based on a simple photon shot noise model).<wbr/> The camera device will
+adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
+produce the best quality images.<wbr/></p>
+<p>This is relative factor,<wbr/> 1.<wbr/>0 indicates the application hasn't processed the input
+buffer in a way that affects its effective exposure time.<wbr/></p>
+<p>This control is only effective for YUV reprocessing capture request.<wbr/> For noise
+reduction reprocessing,<wbr/> it is only effective when <code><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a> != OFF</code>.<wbr/>
+Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective when
+<code><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a> != OFF</code>.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.reprocess.effectiveExposureFactor">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The amount of exposure time increase factor applied to the original output
+frame by the application processing before sending for reprocessing.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Relative exposure time increase factor.<wbr/>
+            </td>
+
+            <td class="entry_range">
+              <p>&gt;= 1.<wbr/>0</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This is optional,<wbr/> and will be supported if the camera device supports YUV_<wbr/>REPROCESSING
+capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains YUV_<wbr/>REPROCESSING).<wbr/></p>
+<p>For some YUV reprocessing use cases,<wbr/> the application may choose to filter the original
+output frames to effectively reduce the noise to the same level as a frame that was
+captured with longer exposure time.<wbr/> To be more specific,<wbr/> assuming the original captured
+images were captured with a sensitivity of S and an exposure time of T,<wbr/> the model in
+the camera device is that the amount of noise in the image would be approximately what
+would be expected if the original capture parameters had been a sensitivity of
+S/<wbr/>effectiveExposureFactor and an exposure time of T*effectiveExposureFactor,<wbr/> rather
+than S and T respectively.<wbr/> If the captured images were processed by the application
+before being sent for reprocessing,<wbr/> then the application may have used image processing
+algorithms and/<wbr/>or multi-frame image fusion to reduce the noise in the
+application-processed images (input images).<wbr/> By using the effectiveExposureFactor
+control,<wbr/> the application can communicate to the camera device the actual noise level
+improvement in the application-processed image.<wbr/> With this information,<wbr/> the camera
+device can select appropriate noise reduction and edge enhancement parameters to avoid
+excessive noise reduction (<a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a>) and insufficient edge
+enhancement (<a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a>) being applied to the reprocessed frames.<wbr/></p>
+<p>For example,<wbr/> for multi-frame image fusion use case,<wbr/> the application may fuse
+multiple output frames together to a final frame for reprocessing.<wbr/> When N image are
+fused into 1 image for reprocessing,<wbr/> the exposure time increase factor could be up to
+square root of N (based on a simple photon shot noise model).<wbr/> The camera device will
+adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
+produce the best quality images.<wbr/></p>
+<p>This is relative factor,<wbr/> 1.<wbr/>0 indicates the application hasn't processed the input
+buffer in a way that affects its effective exposure time.<wbr/></p>
+<p>This control is only effective for YUV reprocessing capture request.<wbr/> For noise
+reduction reprocessing,<wbr/> it is only effective when <code><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a> != OFF</code>.<wbr/>
+Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective when
+<code><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a> != OFF</code>.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.reprocess.maxCaptureStall">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>reprocess.<wbr/>max<wbr/>Capture<wbr/>Stall
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The maximal camera capture pipeline stall (in unit of frame count) introduced by a
+reprocess capture request.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              Number of frames.<wbr/>
+            </td>
+
+            <td class="entry_range">
+              <p>&lt;= 4</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>The key describes the maximal interference that one reprocess (input) request
+can introduce to the camera simultaneous streaming of regular (output) capture
+requests,<wbr/> including repeating requests.<wbr/></p>
+<p>When a reprocessing capture request is submitted while a camera output repeating request
+(e.<wbr/>g.<wbr/> preview) is being served by the camera device,<wbr/> it may preempt the camera capture
+pipeline for at least one frame duration so that the camera device is unable to process
+the following capture request in time for the next sensor start of exposure boundary.<wbr/>
+When this happens,<wbr/> the application may observe a capture time gap (longer than one frame
+duration) between adjacent capture output frames,<wbr/> which usually exhibits as preview
+glitch if the repeating request output targets include a preview surface.<wbr/> This key gives
+the worst-case number of frame stall introduced by one reprocess request with any kind of
+formats/<wbr/>sizes combination.<wbr/></p>
+<p>If this key reports 0,<wbr/> it means a reprocess request doesn't introduce any glitch to the
+ongoing camera repeating request outputs,<wbr/> as if this reprocess request is never issued.<wbr/></p>
+<p>This key is supported if the camera device supports PRIVATE or YUV reprocessing (
+i.<wbr/>e.<wbr/> <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains PRIVATE_<wbr/>REPROCESSING or
+YUV_<wbr/>REPROCESSING).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="6" id="section_depth" class="section">depth</td></tr>
+
+
+      <tr><td colspan="6" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.depth.maxDepthSamples">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>depth.<wbr/>max<wbr/>Depth<wbr/>Samples
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [system]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Maximum number of points that a depth point cloud may contain.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If a camera device supports outputting depth range data in the form of a depth point
+cloud (<a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH_POINT_CLOUD">Image<wbr/>Format#DEPTH_<wbr/>POINT_<wbr/>CLOUD</a>),<wbr/> this is the maximum
+number of points an output buffer may contain.<wbr/></p>
+<p>Any given buffer may contain between 0 and maxDepthSamples points,<wbr/> inclusive.<wbr/>
+If output in the depth point cloud format is not supported,<wbr/> this entry will
+not be defined.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.depth.availableDepthStreamConfigurations">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stream<wbr/>Configurations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">int32</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 4
+                </span>
+              <span class="entry_type_visibility"> [hidden as streamConfiguration]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OUTPUT</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">INPUT</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The available depth dataspace stream
+configurations that this camera device supports
+(i.<wbr/>e.<wbr/> format,<wbr/> width,<wbr/> height,<wbr/> output/<wbr/>input stream).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>These are output stream configurations for use with
+dataSpace HAL_<wbr/>DATASPACE_<wbr/>DEPTH.<wbr/> The configurations are
+listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code> tuples.<wbr/></p>
+<p>Only devices that support depth output for at least
+the HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>Y16 dense depth map may include
+this entry.<wbr/></p>
+<p>A device that also supports the HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>BLOB
+sparse depth point cloud must report a single entry for
+the format in this list as <code>(HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>BLOB,<wbr/>
+<a href="#static_android.depth.maxDepthSamples">android.<wbr/>depth.<wbr/>max<wbr/>Depth<wbr/>Samples</a>,<wbr/> 1,<wbr/> OUTPUT)</code> in addition to
+the entries for HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>Y16.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.depth.availableDepthMinFrameDurations">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Min<wbr/>Frame<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [hidden as streamConfigurationDuration]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the minimum frame duration for each
+format/<wbr/>size combination for depth output formats.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>This should correspond to the frame duration when only that
+stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
+set to either OFF or FAST.<wbr/></p>
+<p>When multiple streams are used in a request,<wbr/> the minimum frame
+duration will be max(individual stream min durations).<wbr/></p>
+<p>The minimum frame duration of a stream (of a particular format,<wbr/> size)
+is the same regardless of whether the stream is input or output.<wbr/></p>
+<p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
+calculating the max frame rate.<wbr/></p>
+<p>(Keep in sync with <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>)</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.depth.availableDepthStallDurations">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>depth.<wbr/>available<wbr/>Depth<wbr/>Stall<wbr/>Durations
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int64</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  4 x n
+                </span>
+              <span class="entry_type_visibility"> [hidden as streamConfigurationDuration]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>This lists the maximum stall duration for each
+output format/<wbr/>size combination for depth streams.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              (format,<wbr/> width,<wbr/> height,<wbr/> ns) x n
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>A stall duration is how much extra time would get added
+to the normal minimum frame duration for a repeating request
+that has streams with non-zero stall.<wbr/></p>
+<p>This functions similarly to
+<a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for depth
+streams.<wbr/></p>
+<p>All depth output stream formats may have a nonzero stall
+duration.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.depth.depthIsExclusive">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>depth.<wbr/>depth<wbr/>Is<wbr/>Exclusive
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public as boolean]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">FALSE</span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">TRUE</span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Indicates whether a capture request may target both a
+DEPTH16 /<wbr/> DEPTH_<wbr/>POINT_<wbr/>CLOUD output,<wbr/> and normal color outputs (such as
+YUV_<wbr/>420_<wbr/>888,<wbr/> JPEG,<wbr/> or RAW) simultaneously.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              <p>If TRUE,<wbr/> including both depth and color outputs in a single
+capture request is not supported.<wbr/> An application must interleave color
+and depth requests.<wbr/>  If FALSE,<wbr/> a single request can target both types
+of output.<wbr/></p>
+<p>Typically,<wbr/> this restriction exists on camera devices that
+need to emit a specific pattern or wavelength of light to
+measure depth values,<wbr/> which causes the color image to be
+corrupted during depth measurement.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+<!-- </namespace> -->
+  </table>
+
+  <div class="tags" id="tag_index">
+    <h2>Tags</h2>
+    <ul>
+      <li id="tag_BC">BC - 
+        Needed for backwards compatibility with old Java API
+    
+        <ul class="tags_entries">
+          <li><a href="#controls_android.control.aeAntibandingMode">android.control.aeAntibandingMode</a> (controls)</li>
+          <li><a href="#controls_android.control.aeExposureCompensation">android.control.aeExposureCompensation</a> (controls)</li>
+          <li><a href="#controls_android.control.aeLock">android.control.aeLock</a> (controls)</li>
+          <li><a href="#controls_android.control.aeMode">android.control.aeMode</a> (controls)</li>
+          <li><a href="#controls_android.control.aeRegions">android.control.aeRegions</a> (controls)</li>
+          <li><a href="#controls_android.control.aeTargetFpsRange">android.control.aeTargetFpsRange</a> (controls)</li>
+          <li><a href="#controls_android.control.aePrecaptureTrigger">android.control.aePrecaptureTrigger</a> (controls)</li>
+          <li><a href="#controls_android.control.afMode">android.control.afMode</a> (controls)</li>
+          <li><a href="#controls_android.control.afRegions">android.control.afRegions</a> (controls)</li>
+          <li><a href="#controls_android.control.afTrigger">android.control.afTrigger</a> (controls)</li>
+          <li><a href="#controls_android.control.awbLock">android.control.awbLock</a> (controls)</li>
+          <li><a href="#controls_android.control.awbMode">android.control.awbMode</a> (controls)</li>
+          <li><a href="#controls_android.control.awbRegions">android.control.awbRegions</a> (controls)</li>
+          <li><a href="#controls_android.control.captureIntent">android.control.captureIntent</a> (controls)</li>
+          <li><a href="#controls_android.control.effectMode">android.control.effectMode</a> (controls)</li>
+          <li><a href="#controls_android.control.mode">android.control.mode</a> (controls)</li>
+          <li><a href="#controls_android.control.sceneMode">android.control.sceneMode</a> (controls)</li>
+          <li><a href="#controls_android.control.videoStabilizationMode">android.control.videoStabilizationMode</a> (controls)</li>
+          <li><a href="#static_android.control.aeAvailableAntibandingModes">android.control.aeAvailableAntibandingModes</a> (static)</li>
+          <li><a href="#static_android.control.aeAvailableModes">android.control.aeAvailableModes</a> (static)</li>
+          <li><a href="#static_android.control.aeAvailableTargetFpsRanges">android.control.aeAvailableTargetFpsRanges</a> (static)</li>
+          <li><a href="#static_android.control.aeCompensationRange">android.control.aeCompensationRange</a> (static)</li>
+          <li><a href="#static_android.control.aeCompensationStep">android.control.aeCompensationStep</a> (static)</li>
+          <li><a href="#static_android.control.afAvailableModes">android.control.afAvailableModes</a> (static)</li>
+          <li><a href="#static_android.control.availableEffects">android.control.availableEffects</a> (static)</li>
+          <li><a href="#static_android.control.availableSceneModes">android.control.availableSceneModes</a> (static)</li>
+          <li><a href="#static_android.control.availableVideoStabilizationModes">android.control.availableVideoStabilizationModes</a> (static)</li>
+          <li><a href="#static_android.control.awbAvailableModes">android.control.awbAvailableModes</a> (static)</li>
+          <li><a href="#static_android.control.maxRegions">android.control.maxRegions</a> (static)</li>
+          <li><a href="#static_android.control.sceneModeOverrides">android.control.sceneModeOverrides</a> (static)</li>
+          <li><a href="#static_android.control.aeLockAvailable">android.control.aeLockAvailable</a> (static)</li>
+          <li><a href="#static_android.control.awbLockAvailable">android.control.awbLockAvailable</a> (static)</li>
+          <li><a href="#controls_android.flash.mode">android.flash.mode</a> (controls)</li>
+          <li><a href="#static_android.flash.info.available">android.flash.info.available</a> (static)</li>
+          <li><a href="#controls_android.jpeg.gpsCoordinates">android.jpeg.gpsCoordinates</a> (controls)</li>
+          <li><a href="#controls_android.jpeg.gpsProcessingMethod">android.jpeg.gpsProcessingMethod</a> (controls)</li>
+          <li><a href="#controls_android.jpeg.gpsTimestamp">android.jpeg.gpsTimestamp</a> (controls)</li>
+          <li><a href="#controls_android.jpeg.orientation">android.jpeg.orientation</a> (controls)</li>
+          <li><a href="#controls_android.jpeg.quality">android.jpeg.quality</a> (controls)</li>
+          <li><a href="#controls_android.jpeg.thumbnailQuality">android.jpeg.thumbnailQuality</a> (controls)</li>
+          <li><a href="#controls_android.jpeg.thumbnailSize">android.jpeg.thumbnailSize</a> (controls)</li>
+          <li><a href="#static_android.jpeg.availableThumbnailSizes">android.jpeg.availableThumbnailSizes</a> (static)</li>
+          <li><a href="#controls_android.lens.focusDistance">android.lens.focusDistance</a> (controls)</li>
+          <li><a href="#static_android.lens.info.availableFocalLengths">android.lens.info.availableFocalLengths</a> (static)</li>
+          <li><a href="#dynamic_android.lens.focusRange">android.lens.focusRange</a> (dynamic)</li>
+          <li><a href="#static_android.request.maxNumOutputStreams">android.request.maxNumOutputStreams</a> (static)</li>
+          <li><a href="#controls_android.scaler.cropRegion">android.scaler.cropRegion</a> (controls)</li>
+          <li><a href="#static_android.scaler.availableFormats">android.scaler.availableFormats</a> (static)</li>
+          <li><a href="#static_android.scaler.availableJpegMinDurations">android.scaler.availableJpegMinDurations</a> (static)</li>
+          <li><a href="#static_android.scaler.availableJpegSizes">android.scaler.availableJpegSizes</a> (static)</li>
+          <li><a href="#static_android.scaler.availableMaxDigitalZoom">android.scaler.availableMaxDigitalZoom</a> (static)</li>
+          <li><a href="#static_android.scaler.availableProcessedMinDurations">android.scaler.availableProcessedMinDurations</a> (static)</li>
+          <li><a href="#static_android.scaler.availableProcessedSizes">android.scaler.availableProcessedSizes</a> (static)</li>
+          <li><a href="#static_android.scaler.availableRawMinDurations">android.scaler.availableRawMinDurations</a> (static)</li>
+          <li><a href="#static_android.sensor.info.sensitivityRange">android.sensor.info.sensitivityRange</a> (static)</li>
+          <li><a href="#static_android.sensor.info.physicalSize">android.sensor.info.physicalSize</a> (static)</li>
+          <li><a href="#static_android.sensor.info.pixelArraySize">android.sensor.info.pixelArraySize</a> (static)</li>
+          <li><a href="#static_android.sensor.orientation">android.sensor.orientation</a> (static)</li>
+          <li><a href="#dynamic_android.sensor.timestamp">android.sensor.timestamp</a> (dynamic)</li>
+          <li><a href="#controls_android.statistics.faceDetectMode">android.statistics.faceDetectMode</a> (controls)</li>
+          <li><a href="#static_android.statistics.info.maxFaceCount">android.statistics.info.maxFaceCount</a> (static)</li>
+          <li><a href="#dynamic_android.statistics.faceIds">android.statistics.faceIds</a> (dynamic)</li>
+          <li><a href="#dynamic_android.statistics.faceLandmarks">android.statistics.faceLandmarks</a> (dynamic)</li>
+          <li><a href="#dynamic_android.statistics.faceRectangles">android.statistics.faceRectangles</a> (dynamic)</li>
+          <li><a href="#dynamic_android.statistics.faceScores">android.statistics.faceScores</a> (dynamic)</li>
+          <li><a href="#dynamic_android.lens.focalLength">android.lens.focalLength</a> (dynamic)</li>
+          <li><a href="#dynamic_android.lens.focusDistance">android.lens.focusDistance</a> (dynamic)</li>
+        </ul>
+      </li> <!-- tag_BC -->
+      <li id="tag_V1">V1 - 
+        New features for first camera 2 release (API1)
+    
+        <ul class="tags_entries">
+          <li><a href="#static_android.colorCorrection.availableAberrationModes">android.colorCorrection.availableAberrationModes</a> (static)</li>
+          <li><a href="#static_android.control.availableHighSpeedVideoConfigurations">android.control.availableHighSpeedVideoConfigurations</a> (static)</li>
+          <li><a href="#controls_android.edge.mode">android.edge.mode</a> (controls)</li>
+          <li><a href="#static_android.edge.availableEdgeModes">android.edge.availableEdgeModes</a> (static)</li>
+          <li><a href="#controls_android.hotPixel.mode">android.hotPixel.mode</a> (controls)</li>
+          <li><a href="#static_android.hotPixel.availableHotPixelModes">android.hotPixel.availableHotPixelModes</a> (static)</li>
+          <li><a href="#controls_android.lens.aperture">android.lens.aperture</a> (controls)</li>
+          <li><a href="#controls_android.lens.filterDensity">android.lens.filterDensity</a> (controls)</li>
+          <li><a href="#controls_android.lens.focalLength">android.lens.focalLength</a> (controls)</li>
+          <li><a href="#controls_android.lens.focusDistance">android.lens.focusDistance</a> (controls)</li>
+          <li><a href="#controls_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a> (controls)</li>
+          <li><a href="#static_android.lens.info.availableApertures">android.lens.info.availableApertures</a> (static)</li>
+          <li><a href="#static_android.lens.info.availableFilterDensities">android.lens.info.availableFilterDensities</a> (static)</li>
+          <li><a href="#static_android.lens.info.availableFocalLengths">android.lens.info.availableFocalLengths</a> (static)</li>
+          <li><a href="#static_android.lens.info.availableOpticalStabilization">android.lens.info.availableOpticalStabilization</a> (static)</li>
+          <li><a href="#static_android.lens.info.minimumFocusDistance">android.lens.info.minimumFocusDistance</a> (static)</li>
+          <li><a href="#static_android.lens.info.shadingMapSize">android.lens.info.shadingMapSize</a> (static)</li>
+          <li><a href="#static_android.lens.info.focusDistanceCalibration">android.lens.info.focusDistanceCalibration</a> (static)</li>
+          <li><a href="#dynamic_android.lens.state">android.lens.state</a> (dynamic)</li>
+          <li><a href="#controls_android.noiseReduction.mode">android.noiseReduction.mode</a> (controls)</li>
+          <li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.noiseReduction.availableNoiseReductionModes</a> (static)</li>
+          <li><a href="#controls_android.request.id">android.request.id</a> (controls)</li>
+          <li><a href="#static_android.scaler.availableMinFrameDurations">android.scaler.availableMinFrameDurations</a> (static)</li>
+          <li><a href="#static_android.scaler.availableStallDurations">android.scaler.availableStallDurations</a> (static)</li>
+          <li><a href="#controls_android.sensor.exposureTime">android.sensor.exposureTime</a> (controls)</li>
+          <li><a href="#controls_android.sensor.frameDuration">android.sensor.frameDuration</a> (controls)</li>
+          <li><a href="#controls_android.sensor.sensitivity">android.sensor.sensitivity</a> (controls)</li>
+          <li><a href="#static_android.sensor.info.sensitivityRange">android.sensor.info.sensitivityRange</a> (static)</li>
+          <li><a href="#static_android.sensor.info.exposureTimeRange">android.sensor.info.exposureTimeRange</a> (static)</li>
+          <li><a href="#static_android.sensor.info.maxFrameDuration">android.sensor.info.maxFrameDuration</a> (static)</li>
+          <li><a href="#static_android.sensor.info.physicalSize">android.sensor.info.physicalSize</a> (static)</li>
+          <li><a href="#static_android.sensor.info.timestampSource">android.sensor.info.timestampSource</a> (static)</li>
+          <li><a href="#static_android.sensor.maxAnalogSensitivity">android.sensor.maxAnalogSensitivity</a> (static)</li>
+          <li><a href="#dynamic_android.sensor.rollingShutterSkew">android.sensor.rollingShutterSkew</a> (dynamic)</li>
+          <li><a href="#controls_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a> (controls)</li>
+          <li><a href="#static_android.statistics.info.availableHotPixelMapModes">android.statistics.info.availableHotPixelMapModes</a> (static)</li>
+          <li><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sync.frameNumber">android.sync.frameNumber</a> (dynamic)</li>
+          <li><a href="#static_android.sync.maxLatency">android.sync.maxLatency</a> (static)</li>
+          <li><a href="#dynamic_android.edge.mode">android.edge.mode</a> (dynamic)</li>
+          <li><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a> (dynamic)</li>
+          <li><a href="#dynamic_android.lens.aperture">android.lens.aperture</a> (dynamic)</li>
+          <li><a href="#dynamic_android.lens.filterDensity">android.lens.filterDensity</a> (dynamic)</li>
+          <li><a href="#dynamic_android.lens.opticalStabilizationMode">android.lens.opticalStabilizationMode</a> (dynamic)</li>
+          <li><a href="#dynamic_android.noiseReduction.mode">android.noiseReduction.mode</a> (dynamic)</li>
+        </ul>
+      </li> <!-- tag_V1 -->
+      <li id="tag_RAW">RAW - 
+        Needed for useful RAW image processing and DNG file support
+    
+        <ul class="tags_entries">
+          <li><a href="#controls_android.hotPixel.mode">android.hotPixel.mode</a> (controls)</li>
+          <li><a href="#static_android.hotPixel.availableHotPixelModes">android.hotPixel.availableHotPixelModes</a> (static)</li>
+          <li><a href="#static_android.sensor.info.activeArraySize">android.sensor.info.activeArraySize</a> (static)</li>
+          <li><a href="#static_android.sensor.info.colorFilterArrangement">android.sensor.info.colorFilterArrangement</a> (static)</li>
+          <li><a href="#static_android.sensor.info.pixelArraySize">android.sensor.info.pixelArraySize</a> (static)</li>
+          <li><a href="#static_android.sensor.info.whiteLevel">android.sensor.info.whiteLevel</a> (static)</li>
+          <li><a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.sensor.info.preCorrectionActiveArraySize</a> (static)</li>
+          <li><a href="#static_android.sensor.referenceIlluminant1">android.sensor.referenceIlluminant1</a> (static)</li>
+          <li><a href="#static_android.sensor.referenceIlluminant2">android.sensor.referenceIlluminant2</a> (static)</li>
+          <li><a href="#static_android.sensor.calibrationTransform1">android.sensor.calibrationTransform1</a> (static)</li>
+          <li><a href="#static_android.sensor.calibrationTransform2">android.sensor.calibrationTransform2</a> (static)</li>
+          <li><a href="#static_android.sensor.colorTransform1">android.sensor.colorTransform1</a> (static)</li>
+          <li><a href="#static_android.sensor.colorTransform2">android.sensor.colorTransform2</a> (static)</li>
+          <li><a href="#static_android.sensor.forwardMatrix1">android.sensor.forwardMatrix1</a> (static)</li>
+          <li><a href="#static_android.sensor.forwardMatrix2">android.sensor.forwardMatrix2</a> (static)</li>
+          <li><a href="#static_android.sensor.blackLevelPattern">android.sensor.blackLevelPattern</a> (static)</li>
+          <li><a href="#static_android.sensor.profileHueSatMapDimensions">android.sensor.profileHueSatMapDimensions</a> (static)</li>
+          <li><a href="#dynamic_android.sensor.neutralColorPoint">android.sensor.neutralColorPoint</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sensor.noiseProfile">android.sensor.noiseProfile</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sensor.profileHueSatMap">android.sensor.profileHueSatMap</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sensor.profileToneCurve">android.sensor.profileToneCurve</a> (dynamic)</li>
+          <li><a href="#dynamic_android.sensor.greenSplit">android.sensor.greenSplit</a> (dynamic)</li>
+          <li><a href="#controls_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a> (controls)</li>
+          <li><a href="#static_android.statistics.info.availableHotPixelMapModes">android.statistics.info.availableHotPixelMapModes</a> (static)</li>
+          <li><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a> (dynamic)</li>
+          <li><a href="#controls_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a> (controls)</li>
+          <li><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a> (dynamic)</li>
+        </ul>
+      </li> <!-- tag_RAW -->
+      <li id="tag_HAL2">HAL2 - 
+        Entry is only used by camera device HAL 2.x
+    
+        <ul class="tags_entries">
+          <li><a href="#controls_android.request.inputStreams">android.request.inputStreams</a> (controls)</li>
+          <li><a href="#controls_android.request.outputStreams">android.request.outputStreams</a> (controls)</li>
+          <li><a href="#controls_android.request.type">android.request.type</a> (controls)</li>
+          <li><a href="#static_android.request.maxNumReprocessStreams">android.request.maxNumReprocessStreams</a> (static)</li>
+          <li><a href="#controls_android.blackLevel.lock">android.blackLevel.lock</a> (controls)</li>
+        </ul>
+      </li> <!-- tag_HAL2 -->
+      <li id="tag_FULL">FULL - 
+        Entry is required for full hardware level devices, and optional for other hardware levels
+    
+        <ul class="tags_entries">
+          <li><a href="#static_android.sensor.maxAnalogSensitivity">android.sensor.maxAnalogSensitivity</a> (static)</li>
+        </ul>
+      </li> <!-- tag_FULL -->
+      <li id="tag_DEPTH">DEPTH - 
+        Entry is required for the depth capability.
+    
+        <ul class="tags_entries">
+          <li><a href="#static_android.lens.poseRotation">android.lens.poseRotation</a> (static)</li>
+          <li><a href="#static_android.lens.poseTranslation">android.lens.poseTranslation</a> (static)</li>
+          <li><a href="#static_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a> (static)</li>
+          <li><a href="#static_android.lens.radialDistortion">android.lens.radialDistortion</a> (static)</li>
+          <li><a href="#static_android.depth.maxDepthSamples">android.depth.maxDepthSamples</a> (static)</li>
+          <li><a href="#static_android.depth.availableDepthStreamConfigurations">android.depth.availableDepthStreamConfigurations</a> (static)</li>
+          <li><a href="#static_android.depth.availableDepthMinFrameDurations">android.depth.availableDepthMinFrameDurations</a> (static)</li>
+          <li><a href="#static_android.depth.availableDepthStallDurations">android.depth.availableDepthStallDurations</a> (static)</li>
+        </ul>
+      </li> <!-- tag_DEPTH -->
+      <li id="tag_REPROC">REPROC - 
+        Entry is required for the YUV or PRIVATE reprocessing capability.
+    
+        <ul class="tags_entries">
+          <li><a href="#controls_android.edge.mode">android.edge.mode</a> (controls)</li>
+          <li><a href="#static_android.edge.availableEdgeModes">android.edge.availableEdgeModes</a> (static)</li>
+          <li><a href="#controls_android.noiseReduction.mode">android.noiseReduction.mode</a> (controls)</li>
+          <li><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.noiseReduction.availableNoiseReductionModes</a> (static)</li>
+          <li><a href="#static_android.request.maxNumInputStreams">android.request.maxNumInputStreams</a> (static)</li>
+          <li><a href="#static_android.scaler.availableInputOutputFormatsMap">android.scaler.availableInputOutputFormatsMap</a> (static)</li>
+          <li><a href="#controls_android.reprocess.effectiveExposureFactor">android.reprocess.effectiveExposureFactor</a> (controls)</li>
+          <li><a href="#static_android.reprocess.maxCaptureStall">android.reprocess.maxCaptureStall</a> (static)</li>
+          <li><a href="#dynamic_android.edge.mode">android.edge.mode</a> (dynamic)</li>
+          <li><a href="#dynamic_android.noiseReduction.mode">android.noiseReduction.mode</a> (dynamic)</li>
+        </ul>
+      </li> <!-- tag_REPROC -->
+      <li id="tag_FUTURE">FUTURE - 
+        Entry is  under-specified and is not required for now. This is for book-keeping purpose,
+        do not implement or use it, it may be revised for future.
+    
+        <ul class="tags_entries">
+          <li><a href="#controls_android.demosaic.mode">android.demosaic.mode</a> (controls)</li>
+          <li><a href="#controls_android.edge.strength">android.edge.strength</a> (controls)</li>
+          <li><a href="#controls_android.flash.firingPower">android.flash.firingPower</a> (controls)</li>
+          <li><a href="#controls_android.flash.firingTime">android.flash.firingTime</a> (controls)</li>
+          <li><a href="#static_android.flash.info.chargeDuration">android.flash.info.chargeDuration</a> (static)</li>
+          <li><a href="#static_android.flash.colorTemperature">android.flash.colorTemperature</a> (static)</li>
+          <li><a href="#static_android.flash.maxEnergy">android.flash.maxEnergy</a> (static)</li>
+          <li><a href="#dynamic_android.jpeg.size">android.jpeg.size</a> (dynamic)</li>
+          <li><a href="#controls_android.noiseReduction.strength">android.noiseReduction.strength</a> (controls)</li>
+          <li><a href="#controls_android.request.metadataMode">android.request.metadataMode</a> (controls)</li>
+          <li><a href="#static_android.sensor.baseGainFactor">android.sensor.baseGainFactor</a> (static)</li>
+          <li><a href="#dynamic_android.sensor.temperature">android.sensor.temperature</a> (dynamic)</li>
+          <li><a href="#controls_android.shading.strength">android.shading.strength</a> (controls)</li>
+          <li><a href="#controls_android.statistics.histogramMode">android.statistics.histogramMode</a> (controls)</li>
+          <li><a href="#controls_android.statistics.sharpnessMapMode">android.statistics.sharpnessMapMode</a> (controls)</li>
+          <li><a href="#static_android.statistics.info.histogramBucketCount">android.statistics.info.histogramBucketCount</a> (static)</li>
+          <li><a href="#static_android.statistics.info.maxHistogramCount">android.statistics.info.maxHistogramCount</a> (static)</li>
+          <li><a href="#static_android.statistics.info.maxSharpnessMapValue">android.statistics.info.maxSharpnessMapValue</a> (static)</li>
+          <li><a href="#static_android.statistics.info.sharpnessMapSize">android.statistics.info.sharpnessMapSize</a> (static)</li>
+          <li><a href="#dynamic_android.statistics.histogram">android.statistics.histogram</a> (dynamic)</li>
+          <li><a href="#dynamic_android.statistics.sharpnessMap">android.statistics.sharpnessMap</a> (dynamic)</li>
+        </ul>
+      </li> <!-- tag_FUTURE -->
+    </ul>
+  </div>
+
+  [ <a href="#">top</a> ]
+
+</body>
+</html>
diff --git a/media/camera/docs/html.mako b/media/camera/docs/html.mako
new file mode 100644
index 0000000..b117a5a
--- /dev/null
+++ b/media/camera/docs/html.mako
@@ -0,0 +1,412 @@
+## -*- coding: utf-8 -*-
+<!DOCTYPE html>
+<html>
+<!-- Copyright (C) 2012 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<head>
+  <!-- automatically generated from html.mako. do NOT edit directly -->
+  <meta charset="utf-8" />
+  <title>Android Camera HAL3.2 Properties</title>
+  <style type="text/css">
+     body { background-color: #f7f7f7; font-family: Roboto, sans-serif;}
+     h1 { color: #333333; }
+     h2 { color: #333333; }
+     a:link { color: #258aaf; text-decoration: none}
+     a:hover { color: #459aaf; text-decoration: underline }
+     a:visited { color: #154a5f; text-decoration: none}
+    .section { color: #eeeeee; font-size: 1.5em; font-weight: bold; background-color: #888888; padding: 0.5em 0em 0.5em 0.5em; border-width: thick thin thin thin; border-color: #111111 #777777 #777777 #777777}
+    .kind { color: #eeeeee; font-size: 1.2em; font-weight: bold; padding-left: 1.5em; background-color: #aaaaaa }
+    .entry { background-color: #f0f0f0 }
+    .entry_cont { background-color: #f0f0f0 }
+    .entries_header { background-color: #dddddd; text-align: center}
+
+    /* toc style */
+    .toc_section_header { font-size:1.3em;  }
+    .toc_kind_header { font-size:1.2em;  }
+    .toc_deprecated { text-decoration:line-through; }
+
+    /* table column sizes */
+    table { border-collapse:collapse; table-layout: fixed; width: 100%; word-wrap: break-word }
+    td,th { border: 1px solid; border-color: #aaaaaa; padding-left: 0.5em; padding-right: 0.5em }
+    .th_name { width: 20% }
+    .th_units { width: 10% }
+    .th_tags { width: 5% }
+    .th_details { width: 25% }
+    .th_type { width: 20% }
+    .th_description { width: 20% }
+    .th_range { width: 10% }
+    td { font-size: 0.9em; }
+
+    /* hide the first thead, we need it there only to enforce column sizes */
+    .thead_dummy { visibility: hidden; }
+
+    /* Entry flair */
+    .entry_name { color: #333333; padding-left:1.0em; font-size:1.1em; font-family: monospace; vertical-align:top; }
+    .entry_name_deprecated { text-decoration:line-through; }
+
+    /* Entry type flair */
+    .entry_type_name { font-size:1.1em; color: #669900; font-weight: bold;}
+    .entry_type_name_enum:after { color: #669900; font-weight: bold; content:" (enum)" }
+    .entry_type_visibility { font-weight: bolder; padding-left:1em}
+    .entry_type_synthetic { font-weight: bolder; color: #996600; }
+    .entry_type_hwlevel { font-weight: bolder; color: #000066; }
+    .entry_type_deprecated { font-weight: bolder; color: #4D4D4D; }
+    .entry_type_enum_name { font-family: monospace; font-weight: bolder; }
+    .entry_type_enum_notes:before { content:" - " }
+    .entry_type_enum_notes>p:first-child { display:inline; }
+    .entry_type_enum_value:before { content:" = " }
+    .entry_type_enum_value { font-family: monospace; }
+    .entry ul { margin: 0 0 0 0; list-style-position: inside; padding-left: 0.5em; }
+    .entry ul li { padding: 0 0 0 0; margin: 0 0 0 0;}
+    .entry_range_deprecated { font-weight: bolder; }
+
+    /* Entry tags flair */
+    .entry_tags ul { list-style-type: none; }
+
+    /* Entry details (full docs) flair */
+    .entry_details_header { font-weight: bold; background-color: #dddddd;
+      text-align: center; font-size: 1.1em; margin-left: 0em; margin-right: 0em; }
+
+    /* Entry spacer flair */
+    .entry_spacer { background-color: transparent; border-style: none; height: 0.5em; }
+
+    /* TODO: generate abbr element for each tag link? */
+    /* TODO for each x.y.z try to link it to the entry */
+
+  </style>
+
+  <style>
+
+    {
+      /* broken...
+         supposedly there is a bug in chrome that it lays out tables before
+         it knows its being printed, so the page-break-* styles are ignored
+         */
+        tr { page-break-after: always; page-break-inside: avoid; }
+    }
+
+  </style>
+</head>
+
+<%!
+  import re
+  from metadata_helpers import md
+  from metadata_helpers import IMAGE_SRC_METADATA
+  from metadata_helpers import filter_tags
+  from metadata_helpers import filter_links
+  from metadata_helpers import wbr
+
+  # insert line breaks after every two \n\n
+  def br(text):
+    return re.sub(r"(\r?\n)(\r?\n)", r"\1<br>\2<br>", text)
+
+  # Convert node name "x.y.z" of kind w to an HTML anchor of form
+  # <a href="#w_x.y.z">x.y.z</a>
+  def html_anchor(node):
+    return '<a href="#%s_%s">%s</a>' % (node.kind, node.name, node.name)
+
+  # Convert target "xxx.yyy#zzz" to a HTML reference to Android public developer
+  # docs with link name from shortname.
+  def html_link(target, shortname):
+    if shortname == '':
+      lastdot = target.rfind('.')
+      if lastdot == -1:
+        shortname = target
+      else:
+        shortname = target[lastdot + 1:]
+
+    target = target.replace('.','/')
+    if target.find('#') != -1:
+      target = target.replace('#','.html#')
+    else:
+      target = target + '.html'
+
+    return '<a href="https://developer.android.com/reference/%s">%s</a>' % (target, shortname)
+
+  # Render as markdown, and do HTML-doc-specific rewrites
+  def md_html(text):
+    return md(text, IMAGE_SRC_METADATA)
+
+  # linkify tag names such as "android.x.y.z" into html anchors
+  def linkify_tags(metadata):
+    def linkify_filter(text):
+      tagged_text = filter_tags(text, metadata, html_anchor)
+      return filter_links(tagged_text, html_link)
+    return linkify_filter
+
+  # Number of rows an entry will span
+  def entry_cols(prop):
+    cols = 1
+    if prop.details: cols = cols + 2
+    if prop.hal_details: cols = cols + 2
+    return cols
+%>
+
+<body>
+  <h1>Android Camera HAL3.2 Properties</h1>
+\
+<%def name="insert_toc_body(node)">
+  % for nested in node.namespaces:
+${    insert_toc_body(nested)}
+  % endfor
+  % for entry in node.merged_entries:
+            <li
+    % if entry.deprecated:
+                class="toc_deprecated"
+    % endif
+            >${html_anchor(entry)}</li>
+  % endfor
+</%def>
+
+  <h2>Table of Contents</h2>
+  <ul class="toc">
+    <li><a href="#tag_index" class="toc_section_header">Tags</a></li>
+% for root in metadata.outer_namespaces:
+  % for section in root.sections:
+    <li>
+      <span class="toc_section_header"><a href="#section_${section.name}">${section.name}</a></span>
+      <ul class="toc_section">
+      % for kind in section.merged_kinds: # dynamic,static,controls
+        <li>
+          <span class="toc_kind_header">${kind.name}</span>
+          <ul class="toc_section">\
+${          insert_toc_body(kind)}\
+          </ul>
+        </li>
+      % endfor
+      </ul> <!-- toc_section -->
+    </li>
+  % endfor
+% endfor
+  </ul>
+
+
+  <h1>Properties</h1>
+  <table class="properties">
+
+    <thead class="thead_dummy">
+      <tr>
+        <th class="th_name">Property Name</th>
+        <th class="th_type">Type</th>
+        <th class="th_description">Description</th>
+        <th class="th_units">Units</th>
+        <th class="th_range">Range</th>
+        <th class="th_tags">Tags</th>
+      </tr>
+    </thead> <!-- so that the first occurrence of thead is not
+                         above the first occurrence of tr -->
+% for root in metadata.outer_namespaces:
+<!-- <namespace name="${root.name}"> -->
+  % for section in root.sections:
+  <tr><td colspan="6" id="section_${section.name}" class="section">${section.name}</td></tr>
+
+    % if section.description is not None:
+      <tr class="description"><td>${section.description}</td></tr>
+    % endif
+
+    % for kind in section.merged_kinds: # dynamic,static,controls
+      <tr><td colspan="6" class="kind">${kind.name}</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        <%def name="insert_body(node)">
+            % for nested in node.namespaces:
+                ${insert_namespace(nested)}
+            % endfor
+
+            % for entry in node.merged_entries:
+                ${insert_entry(entry)}
+            % endfor
+        </%def>
+
+        <%def name="insert_namespace(namespace)">
+            ${insert_body(namespace)}
+        </%def>
+
+        <%def name="insert_entry(prop)">
+          <tr class="entry" id="${prop.kind}_${prop.name}">
+            <td class="entry_name
+              % if prop.deprecated:
+                entry_name_deprecated
+              % endif
+             " rowspan="${entry_cols(prop)}">
+              ${prop.name | wbr}
+            </td>
+            <td class="entry_type">
+              % if prop.enum:
+                <span class="entry_type_name entry_type_name_enum">${prop.type}</span>
+              % else:
+                <span class="entry_type_name">${prop.type}</span>
+              % endif
+              % if prop.container is not None:
+                <span class="entry_type_container">x</span>
+              % endif
+
+              % if prop.container == 'array':
+                <span class="entry_type_array">
+                  ${" x ".join(prop.container_sizes)}
+                </span>
+              % elif prop.container == 'tuple':
+                <ul class="entry_type_tuple">
+                % for val in prop.tuple_values:
+                  <li>${val}</li>
+                % endfor
+                </ul>
+              % endif
+              <span class="entry_type_visibility"> [${prop.applied_visibility}${" as %s" %prop.typedef.name if prop.typedef else ""}]</span>
+
+              % if prop.synthetic:
+              <span class="entry_type_synthetic">[synthetic] </span>
+              % endif
+
+              % if prop.hwlevel:
+              <span class="entry_type_hwlevel">[${prop.hwlevel}] </span>
+              % endif
+
+              % if prop.deprecated:
+              <span class="entry_type_deprecated">[deprecated] </span>
+              % endif
+
+              % if prop.type_notes is not None:
+                <div class="entry_type_notes">${prop.type_notes | wbr}</div>
+              % endif
+
+              % if prop.enum:
+                <ul class="entry_type_enum">
+                  % for value in prop.enum.values:
+                  <li>
+                    <span class="entry_type_enum_name">${value.name}</span>
+                  % if value.deprecated:
+                    <span class="entry_type_enum_deprecated">[deprecated]</span>
+                  % endif:
+                  % if value.optional:
+                    <span class="entry_type_enum_optional">[optional]</span>
+                  % endif:
+                  % if value.hidden:
+                    <span class="entry_type_enum_hidden">[hidden]</span>
+                  % endif:
+                  % if value.id is not None:
+                    <span class="entry_type_enum_value">${value.id}</span>
+                  % endif
+                  % if value.notes is not None:
+                    <span class="entry_type_enum_notes">${value.notes | md_html, linkify_tags(metadata), wbr}</span>
+                  % endif
+                  </li>
+                  % endfor
+                </ul>
+              % endif
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+            % if prop.description is not None:
+              ${prop.description | md_html, linkify_tags(metadata), wbr}
+            % endif
+            </td>
+
+            <td class="entry_units">
+            % if prop.units is not None:
+              ${prop.units | wbr}
+            % endif
+            </td>
+
+            <td class="entry_range">
+            % if prop.deprecated:
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            % endif
+            % if prop.range is not None:
+              ${prop.range | md_html, linkify_tags(metadata), wbr}
+            % endif
+            </td>
+
+            <td class="entry_tags">
+            % if next(prop.tags, None):
+              <ul class="entry_tags">
+              % for tag in prop.tags:
+                  <li><a href="#tag_${tag.id}">${tag.id}</a></li>
+              % endfor
+              </ul>
+            % endif
+            </td>
+
+          </tr>
+          % if prop.details is not None:
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              ${prop.details | md_html, linkify_tags(metadata), wbr}
+            </td>
+          </tr>
+          % endif
+
+          % if prop.hal_details is not None:
+          <tr class="entries_header">
+            <th class="th_details" colspan="5">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="5">
+              ${prop.hal_details | md_html, linkify_tags(metadata), wbr}
+            </td>
+          </tr>
+          % endif
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+           <!-- end of entry -->
+        </%def>
+
+        ${insert_body(kind)}
+
+      <!-- end of kind -->
+      </tbody>
+    % endfor # for each kind
+
+  <!-- end of section -->
+  % endfor
+<!-- </namespace> -->
+% endfor
+  </table>
+
+  <div class="tags" id="tag_index">
+    <h2>Tags</h2>
+    <ul>
+    % for tag in metadata.tags:
+      <li id="tag_${tag.id}">${tag.id} - ${tag.description}
+        <ul class="tags_entries">
+        % for prop in tag.entries:
+          <li>${html_anchor(prop)} (${prop.kind})</li>
+        % endfor
+        </ul>
+      </li> <!-- tag_${tag.id} -->
+    % endfor
+    </ul>
+  </div>
+
+  [ <a href="#">top</a> ]
+
+</body>
+</html>
diff --git a/media/camera/docs/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png b/media/camera/docs/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png
new file mode 100644
index 0000000..7578b48
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png
new file mode 100644
index 0000000..7b10f6b
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png
new file mode 100644
index 0000000..41972cf
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png
new file mode 100644
index 0000000..d26600b
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png
new file mode 100644
index 0000000..1e7208e
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png
new file mode 100644
index 0000000..ecef3ae
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png
new file mode 100644
index 0000000..a02fd89
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png
new file mode 100644
index 0000000..c309ac5
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png
new file mode 100644
index 0000000..414fad4
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png
new file mode 100644
index 0000000..c147a87
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png
Binary files differ
diff --git a/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png
new file mode 100644
index 0000000..4ce2125
--- /dev/null
+++ b/media/camera/docs/images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png
Binary files differ
diff --git a/media/camera/docs/metadata-check-dependencies b/media/camera/docs/metadata-check-dependencies
new file mode 100755
index 0000000..56f2e27
--- /dev/null
+++ b/media/camera/docs/metadata-check-dependencies
@@ -0,0 +1,114 @@
+#!/bin/bash
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+packager=""
+retcode=0
+if [[ "$OSTYPE" == "darwin"* ]]
+then
+    packager="macports"
+
+    if ! which port >& /dev/null
+    then
+        echo "Missing port binary, please install from http://www.macports.org/" >& 2
+    fi
+elif [[ "$OSTYPE" == "linux-gnu" ]] && which apt-get >& /dev/null
+then
+    packager="apt-get"
+fi
+
+function packager_install
+{
+    if [[ $packager == "macports" ]]
+    then
+        echo "sudo port install $1"
+    elif [[ $packager == "apt-get" ]]
+    then
+        echo "sudo apt-get install $1"
+    else
+        echo "<your package manager> install $1"
+    fi
+}
+
+function binary_check()
+{
+    local bin=$1
+    local macports=$2
+    local aptget=$3
+
+    local pkg=""
+
+    if type -f "$bin" >& /dev/null
+    then
+        return 0
+    fi
+
+    if [[ $packager == "macports" ]]
+    then
+        pkg="$macports"
+    elif [[ $packager == "apt-get" ]]
+    then
+        pkg="$aptget"
+    fi
+
+    if [[ -n $pkg ]]
+    then
+        echo "Missing $bin binary; please install with '$(packager_install $pkg)'"
+    fi
+
+    retcode=1
+    return 1
+}
+
+function python_check()
+{
+    local mod=$1
+    local macports=$2
+    local aptget=$3
+
+    local pkg=""
+
+    if python -c "import $mod" >& /dev/null
+    then
+        return 0
+    fi
+
+    if [[ $packager == "macports" ]]
+    then
+        pkg="$macports"
+    elif [[ $packager == "apt-get" ]]
+    then
+        pkg="$aptget"
+    fi
+
+    if [[ -n $pkg ]]
+    then
+        echo "Missing python module $mod, please install with '$(packager_install $pkg)'"
+    fi
+
+    retcode=1
+    return 1
+}
+
+binary_check xmllint libxml2 libxml2-utils
+binary_check tidy tidy tidy
+binary_check python python27 python2.7
+python_check bs4 py27-beautifulsoup4 python-bs4
+python_check mako py27-mako python-mako
+
+exit $retcode
+
diff --git a/media/camera/docs/metadata-generate b/media/camera/docs/metadata-generate
new file mode 100755
index 0000000..fd21fbd
--- /dev/null
+++ b/media/camera/docs/metadata-generate
@@ -0,0 +1,223 @@
+#!/bin/bash
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Generate all files we have templates for:
+#   docs.html
+#   ../src/camera_metadata_tag_info.c
+#   ../src/camera_metadata_tags.h
+#   ../../../../cts/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
+#   ../../../../frameworks/base/core/java/android/hardware/camera2/CameraCharacteristics.java
+#   ../../../../frameworks/base/core/java/android/hardware/camera2/CaptureRequest.java
+#   ../../../../frameworks/base/core/java/android/hardware/camera2/CaptureResult.java
+
+if [[ -z $ANDROID_BUILD_TOP ]]; then
+    echo "Please source build/envsetup.sh before running script" >& 2
+    exit 1
+fi
+
+thisdir=$(cd "$(dirname "$0")"; pwd)
+fwkdir="$ANDROID_BUILD_TOP/frameworks/base/core/java/android/hardware/camera2/"
+fwkdir_html="$ANDROID_BUILD_TOP/frameworks/base/docs/html"
+ctsdir="$ANDROID_BUILD_TOP/cts/tests/tests/hardware/src/android/hardware/camera2/cts"
+outdir="$ANDROID_PRODUCT_OUT/obj/ETC/system-media-camera-docs_intermediates"
+out_files=()
+
+function relpath() {
+    python -c "import os.path; print os.path.relpath('$1', '$PWD')"
+}
+
+# Generates a file. Appends to $out_files array as a side effect.
+function gen_file() {
+    local in=$thisdir/$1
+    local out=$thisdir/$2
+
+    gen_file_abs "$in" "$out"
+    return $?
+}
+
+function gen_file_abs() {
+    local in="$1"
+    local out="$2"
+    local intermediates="$3"
+
+    python $thisdir/metadata_parser_xml.py $thisdir/metadata_properties.xml $in $out
+
+    local succ=$?
+
+    if [[ $succ -eq 0 ]]
+    then
+        echo "OK: Generated $(relpath "$out")"
+        if [[ "$intermediates" != "no" ]]; then
+          out_files+=$'\n'" $out"
+        fi
+    else
+        echo "FAIL: Errors while generating $(relpath "$out")" >& 2
+    fi
+
+    return $succ
+}
+
+# Print a list of git repository paths which were affected after file generation
+function affected_git_directories() {
+    local input_files=($@)
+    local git_directories=()
+
+    for file in "${input_files[@]}"; do
+        local dir_path="$(dirname "$file")"
+        echo "Trying to cd into $dir_path" >& /dev/null
+        # Absolute path to the git repository root of that file
+        local git_path="$(cd "$dir_path";
+                          git rev-parse --show-toplevel 2> /dev/null)"
+        if [[ $? -eq 0 ]]; then
+            # Both staged and unstaged changes
+            local diff_result="$(cd "$dir_path";
+                                 git status --porcelain | egrep -c -v '^[?][?]')"
+            echo "Diff result was $diff_result" >& /dev/null
+            echo "Diff result was $diff_result" >& /dev/null
+            if [[ $diff_result -eq 0 ]]; then
+                echo "No changes in ${git_path}" >& /dev/null
+            else
+                echo "There are changes in ${git_path}" >& /dev/null
+                git_directories+=("$git_path")
+            fi
+        fi
+    done
+
+    # print as result the unique list of git directories affected
+    printf %s\\n "${git_directories[@]}" | sort | uniq
+}
+
+# Insert a file into the middle of another, starting at the line containing the
+# start delim and ending on the end delim, both of which are replaced
+function insert_file() {
+    local src_part="$1"
+    local dst_file="$2"
+    local start_delim="/*@O~"
+    local end_delim="~O@*/"
+
+    local start_line="$(grep -n -F "${start_delim}" "${dst_file}" | cut -d: -f1)"
+    local end_line="$(grep -n -F "${end_delim}" "${dst_file}" | cut -d: -f1)"
+
+    # Adjust cutoff points to use start/end line from inserted file
+    (( start_line-- ))
+    (( end_line++ ))
+
+    # Do some basic sanity checks
+
+    if [[ -z "$start_line" ]]; then
+       echo "No starting delimiter found in ${dst_file}" >& 2
+       echo "FAIL: Errors in inserting into $(relpath ${dst_file})" >& 2
+       return 1
+    fi
+
+    if [[ -z "$end_line" ]]; then
+       echo "No ending delimiter found in ${dst_file}" >& 2
+       echo "FAIL: Errors in inserting into $(relpath ${dst_file})" >& 2
+       return 1
+    fi
+
+    if [[ "$start_line" -ge "$end_line" ]]; then
+       echo "Starting delim later than ending delim: $start_line vs $end_line" >& 2
+       echo "FAIL: Errors in inserting into $(relpath ${dst_file})" >& 2
+       return 1
+    fi
+
+    local tmp_name=$(mktemp -t XXXXXXXX)
+
+    # Compose the three parts of the final file together
+
+    head -n "$start_line" "${dst_file}" > "${tmp_name}"
+    cat "${src_part}" >> "${tmp_name}"
+    tail -n "+${end_line}" "${dst_file}" >> "${tmp_name}"
+
+    # And replace the destination file with the new version
+
+    mv "${tmp_name}" "${dst_file}"
+    echo "OK: Inserted $(relpath "$src_part") into $(relpath "$dst_file")"
+    out_files+=$'\n'" $dst_file"
+}
+
+# Recursively copy a directory tree from $1 to $2. Pretty-prints status.
+function copy_directory() {
+    local src="$thisdir/$1" # Relative to directory of this script
+    local dst="$2" # Absolute path
+
+    if ! [[ -d $src ]]; then
+        echo "FAIL: Source directory $src does not exist" >& 2
+        return 1
+    fi
+    if ! [[ -d $dst ]]; then
+        echo "FAIL: Destination directory $dst does not exist" >& 2
+        return 1
+    fi
+
+    cp -R "$src" "$dst"
+    local retval=$?
+
+    if [[ $retval -ne 0 ]]; then
+        echo "ERROR: Failed to copy $(relpath "$src") to $(relpath "$dst")" >& 2
+    else
+        echo "OK: Copied $(relpath "$src") to $(relpath "$dst")"
+    fi
+
+    return $retval
+}
+
+$thisdir/metadata-check-dependencies || exit 1
+$thisdir/metadata-validate $thisdir/metadata_properties.xml || exit 1
+$thisdir/metadata-parser-sanity-check || exit 1
+
+# Generate HTML properties documentation
+gen_file html.mako docs.html || exit 1
+
+# Generate C API headers and implementation
+gen_file camera_metadata_tag_info.mako ../src/camera_metadata_tag_info.c || exit 1
+gen_file camera_metadata_tags.mako ../include/system/camera_metadata_tags.h || exit 1
+
+# Generate Java API definitions
+mkdir -p "${outdir}"
+gen_file_abs CameraMetadataEnums.mako "$outdir/CameraMetadataEnums.java.part" no || exit 1
+gen_file_abs CameraCharacteristicsKeys.mako "$outdir/CameraCharacteristicsKeys.java.part" no || exit 1
+gen_file_abs CaptureRequestKeys.mako "$outdir/CaptureRequestKeys.java.part" no || exit 1
+gen_file_abs CaptureResultKeys.mako "$outdir/CaptureResultKeys.java.part" no || exit 1
+gen_file_abs CaptureResultTest.mako "$outdir/CaptureResultTest.java.part" no || exit 1
+
+insert_file "$outdir/CameraMetadataEnums.java.part" "$fwkdir/CameraMetadata.java" || exit 1
+insert_file "$outdir/CameraCharacteristicsKeys.java.part" "$fwkdir/CameraCharacteristics.java" || exit 1
+insert_file "$outdir/CaptureRequestKeys.java.part" "$fwkdir/CaptureRequest.java" || exit 1
+insert_file "$outdir/CaptureResultKeys.java.part" "$fwkdir/CaptureResult.java" || exit 1
+insert_file "$outdir/CaptureResultTest.java.part" "$ctsdir/CaptureResultTest.java" || exit 1
+
+# Copy ./images directory into javadoc directory
+copy_directory "images" "$fwkdir_html" || exit 1
+
+echo ""
+echo "===================================================="
+echo "Successfully generated all metadata source files"
+echo "===================================================="
+echo ""
+
+echo "****************************************************"
+echo "The following git repositories need to be committed:"
+echo "****************************************************"
+echo ""
+affected_git_directories "${out_files[@]}"
+echo ""
+
+exit 0
diff --git a/media/camera/docs/metadata-parser-sanity-check b/media/camera/docs/metadata-parser-sanity-check
new file mode 100755
index 0000000..386960a
--- /dev/null
+++ b/media/camera/docs/metadata-parser-sanity-check
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Sanity check the XML parser by making sure it's generating the same data
+# as the original parsed data.
+#
+
+thisdir=$(cd "$(dirname "$0")"; pwd)
+
+$thisdir/metadata-check-dependencies || exit 1
+
+tmp_out=$(mktemp -t tmp.XXXXXXXXXX)
+tmp_tidy1=$(mktemp -t tmp.XXXXXXXXXX)
+tmp_tidy2=$(mktemp -t tmp.XXXXXXXXXX)
+
+function check_test
+{
+    local file="$1"
+    local results
+    results="$(python "$file" 2>&1)"
+    local retval=$?
+    if [[ $retval -ne 0 ]]
+    then
+        echo "$results" >& 2
+        echo "FAILED: Unit tests $file"
+    else
+        echo "SUCCESS: Unit tests $file"
+    fi
+    return $retval
+}
+
+check_test "$thisdir/metadata_model_test.py" || exit 1
+check_test "$thisdir/metadata_helpers_test.py" || exit 1
+python $thisdir/metadata_parser_xml.py $thisdir/metadata_properties.xml $thisdir/metadata_template.mako $tmp_out || exit 1
+tidy -indent -xml -quiet $thisdir/metadata_properties.xml > $tmp_tidy1
+tidy -indent -xml -quiet $tmp_out > $tmp_tidy2
+
+diff $tmp_tidy1 $tmp_tidy2
+exit_code=$?
+rm $tmp_out $tmp_tidy1 $tmp_tidy2
+
+if [[ $exit_code -ne 0 ]]
+then
+    echo "ERROR: Files differ, please check parser logic" 1>&2
+else
+    echo "SUCCESS: Files are the same!" 1>&2
+fi
+
+exit $exit_code
diff --git a/media/camera/docs/metadata-validate b/media/camera/docs/metadata-validate
new file mode 100755
index 0000000..a7755ad
--- /dev/null
+++ b/media/camera/docs/metadata-validate
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+thisdir=$(cd "$(dirname "$0")"; pwd)
+$thisdir/metadata-check-dependencies || exit 1
+
+if [[ $# -lt 1 ]]
+then
+    echo "Usage: ${BASH_SOURCE##*/} <properties-file-name.xml>" 1>&2
+    exit
+fi
+
+schema=$thisdir/metadata_properties.xsd
+doc=$1
+
+xmllint --noout --schema $schema $doc || exit 1
+python $thisdir/metadata_validate.py $doc || exit 1
+
diff --git a/media/camera/docs/metadata_helpers.py b/media/camera/docs/metadata_helpers.py
new file mode 100644
index 0000000..9a6fe9b
--- /dev/null
+++ b/media/camera/docs/metadata_helpers.py
@@ -0,0 +1,1114 @@
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+A set of helpers for rendering Mako templates with a Metadata model.
+"""
+
+import metadata_model
+import re
+import markdown
+import textwrap
+import sys
+import bs4
+# Monkey-patch BS4. WBR element must not have an end tag.
+bs4.builder.HTMLTreeBuilder.empty_element_tags.add("wbr")
+
+from collections import OrderedDict
+
+# Relative path from HTML file to the base directory used by <img> tags
+IMAGE_SRC_METADATA="images/camera2/metadata/"
+
+# Prepend this path to each <img src="foo"> in javadocs
+JAVADOC_IMAGE_SRC_METADATA="../../../../" + IMAGE_SRC_METADATA
+
+_context_buf = None
+
+def _is_sec_or_ins(x):
+  return isinstance(x, metadata_model.Section) or    \
+         isinstance(x, metadata_model.InnerNamespace)
+
+##
+## Metadata Helpers
+##
+
+def find_all_sections(root):
+  """
+  Find all descendants that are Section or InnerNamespace instances.
+
+  Args:
+    root: a Metadata instance
+
+  Returns:
+    A list of Section/InnerNamespace instances
+
+  Remarks:
+    These are known as "sections" in the generated C code.
+  """
+  return root.find_all(_is_sec_or_ins)
+
+def find_parent_section(entry):
+  """
+  Find the closest ancestor that is either a Section or InnerNamespace.
+
+  Args:
+    entry: an Entry or Clone node
+
+  Returns:
+    An instance of Section or InnerNamespace
+  """
+  return entry.find_parent_first(_is_sec_or_ins)
+
+# find uniquely named entries (w/o recursing through inner namespaces)
+def find_unique_entries(node):
+  """
+  Find all uniquely named entries, without recursing through inner namespaces.
+
+  Args:
+    node: a Section or InnerNamespace instance
+
+  Yields:
+    A sequence of MergedEntry nodes representing an entry
+
+  Remarks:
+    This collapses multiple entries with the same fully qualified name into
+    one entry (e.g. if there are multiple entries in different kinds).
+  """
+  if not isinstance(node, metadata_model.Section) and    \
+     not isinstance(node, metadata_model.InnerNamespace):
+      raise TypeError("expected node to be a Section or InnerNamespace")
+
+  d = OrderedDict()
+  # remove the 'kinds' from the path between sec and the closest entries
+  # then search the immediate children of the search path
+  search_path = isinstance(node, metadata_model.Section) and node.kinds \
+                or [node]
+  for i in search_path:
+      for entry in i.entries:
+          d[entry.name] = entry
+
+  for k,v in d.iteritems():
+      yield v.merge()
+
+def path_name(node):
+  """
+  Calculate a period-separated string path from the root to this element,
+  by joining the names of each node and excluding the Metadata/Kind nodes
+  from the path.
+
+  Args:
+    node: a Node instance
+
+  Returns:
+    A string path
+  """
+
+  isa = lambda x,y: isinstance(x, y)
+  fltr = lambda x: not isa(x, metadata_model.Metadata) and \
+                   not isa(x, metadata_model.Kind)
+
+  path = node.find_parents(fltr)
+  path = list(path)
+  path.reverse()
+  path.append(node)
+
+  return ".".join((i.name for i in path))
+
+def has_descendants_with_enums(node):
+  """
+  Determine whether or not the current node is or has any descendants with an
+  Enum node.
+
+  Args:
+    node: a Node instance
+
+  Returns:
+    True if it finds an Enum node in the subtree, False otherwise
+  """
+  return bool(node.find_first(lambda x: isinstance(x, metadata_model.Enum)))
+
+def get_children_by_throwing_away_kind(node, member='entries'):
+  """
+  Get the children of this node by compressing the subtree together by removing
+  the kind and then combining any children nodes with the same name together.
+
+  Args:
+    node: An instance of Section, InnerNamespace, or Kind
+
+  Returns:
+    An iterable over the combined children of the subtree of node,
+    as if the Kinds never existed.
+
+  Remarks:
+    Not recursive. Call this function repeatedly on each child.
+  """
+
+  if isinstance(node, metadata_model.Section):
+    # Note that this makes jump from Section to Kind,
+    # skipping the Kind entirely in the tree.
+    node_to_combine = node.combine_kinds_into_single_node()
+  else:
+    node_to_combine = node
+
+  combined_kind = node_to_combine.combine_children_by_name()
+
+  return (i for i in getattr(combined_kind, member))
+
+def get_children_by_filtering_kind(section, kind_name, member='entries'):
+  """
+  Takes a section and yields the children of the merged kind under this section.
+
+  Args:
+    section: An instance of Section
+    kind_name: A name of the kind, i.e. 'dynamic' or 'static' or 'controls'
+
+  Returns:
+    An iterable over the children of the specified merged kind.
+  """
+
+  matched_kind = next((i for i in section.merged_kinds if i.name == kind_name), None)
+
+  if matched_kind:
+    return getattr(matched_kind, member)
+  else:
+    return ()
+
+##
+## Filters
+##
+
+# abcDef.xyz -> ABC_DEF_XYZ
+def csym(name):
+  """
+  Convert an entry name string into an uppercase C symbol.
+
+  Returns:
+    A string
+
+  Example:
+    csym('abcDef.xyz') == 'ABC_DEF_XYZ'
+  """
+  newstr = name
+  newstr = "".join([i.isupper() and ("_" + i) or i for i in newstr]).upper()
+  newstr = newstr.replace(".", "_")
+  return newstr
+
+# abcDef.xyz -> abc_def_xyz
+def csyml(name):
+  """
+  Convert an entry name string into a lowercase C symbol.
+
+  Returns:
+    A string
+
+  Example:
+    csyml('abcDef.xyz') == 'abc_def_xyz'
+  """
+  return csym(name).lower()
+
+# pad with spaces to make string len == size. add new line if too big
+def ljust(size, indent=4):
+  """
+  Creates a function that given a string will pad it with spaces to make
+  the string length == size. Adds a new line if the string was too big.
+
+  Args:
+    size: an integer representing how much spacing should be added
+    indent: an integer representing the initial indendation level
+
+  Returns:
+    A function that takes a string and returns a string.
+
+  Example:
+    ljust(8)("hello") == 'hello   '
+
+  Remarks:
+    Deprecated. Use pad instead since it works for non-first items in a
+    Mako template.
+  """
+  def inner(what):
+    newstr = what.ljust(size)
+    if len(newstr) > size:
+      return what + "\n" + "".ljust(indent + size)
+    else:
+      return newstr
+  return inner
+
+def _find_new_line():
+
+  if _context_buf is None:
+    raise ValueError("Context buffer was not set")
+
+  buf = _context_buf
+  x = -1 # since the first read is always ''
+  cur_pos = buf.tell()
+  while buf.tell() > 0 and buf.read(1) != '\n':
+    buf.seek(cur_pos - x)
+    x = x + 1
+
+  buf.seek(cur_pos)
+
+  return int(x)
+
+# Pad the string until the buffer reaches the desired column.
+# If string is too long, insert a new line with 'col' spaces instead
+def pad(col):
+  """
+  Create a function that given a string will pad it to the specified column col.
+  If the string overflows the column, put the string on a new line and pad it.
+
+  Args:
+    col: an integer specifying the column number
+
+  Returns:
+    A function that given a string will produce a padded string.
+
+  Example:
+    pad(8)("hello") == 'hello   '
+
+  Remarks:
+    This keeps track of the line written by Mako so far, so it will always
+    align to the column number correctly.
+  """
+  def inner(what):
+    wut = int(col)
+    current_col = _find_new_line()
+
+    if len(what) > wut - current_col:
+      return what + "\n".ljust(col)
+    else:
+      return what.ljust(wut - current_col)
+  return inner
+
+# int32 -> TYPE_INT32, byte -> TYPE_BYTE, etc. note that enum -> TYPE_INT32
+def ctype_enum(what):
+  """
+  Generate a camera_metadata_type_t symbol from a type string.
+
+  Args:
+    what: a type string
+
+  Returns:
+    A string representing the camera_metadata_type_t
+
+  Example:
+    ctype_enum('int32') == 'TYPE_INT32'
+    ctype_enum('int64') == 'TYPE_INT64'
+    ctype_enum('float') == 'TYPE_FLOAT'
+
+  Remarks:
+    An enum is coerced to a byte since the rest of the camera_metadata
+    code doesn't support enums directly yet.
+  """
+  return 'TYPE_%s' %(what.upper())
+
+
+# Calculate a java type name from an entry with a Typedef node
+def _jtypedef_type(entry):
+  typedef = entry.typedef
+  additional = ''
+
+  # Hacky way to deal with arrays. Assume that if we have
+  # size 'Constant x N' the Constant is part of the Typedef size.
+  # So something sized just 'Constant', 'Constant1 x Constant2', etc
+  # is not treated as a real java array.
+  if entry.container == 'array':
+    has_variable_size = False
+    for size in entry.container_sizes:
+      try:
+        size_int = int(size)
+      except ValueError:
+        has_variable_size = True
+
+    if has_variable_size:
+      additional = '[]'
+
+  try:
+    name = typedef.languages['java']
+
+    return "%s%s" %(name, additional)
+  except KeyError:
+    return None
+
+# Box if primitive. Otherwise leave unboxed.
+def _jtype_box(type_name):
+  mapping = {
+    'boolean': 'Boolean',
+    'byte': 'Byte',
+    'int': 'Integer',
+    'float': 'Float',
+    'double': 'Double',
+    'long': 'Long'
+  }
+
+  return mapping.get(type_name, type_name)
+
+def jtype_unboxed(entry):
+  """
+  Calculate the Java type from an entry type string, to be used whenever we
+  need the regular type in Java. It's not boxed, so it can't be used as a
+  generic type argument when the entry type happens to resolve to a primitive.
+
+  Remarks:
+    Since Java generics cannot be instantiated with primitives, this version
+    is not applicable in that case. Use jtype_boxed instead for that.
+
+  Returns:
+    The string representing the Java type.
+  """
+  if not isinstance(entry, metadata_model.Entry):
+    raise ValueError("Expected entry to be an instance of Entry")
+
+  metadata_type = entry.type
+
+  java_type = None
+
+  if entry.typedef:
+    typedef_name = _jtypedef_type(entry)
+    if typedef_name:
+      java_type = typedef_name # already takes into account arrays
+
+  if not java_type:
+    if not java_type and entry.enum and metadata_type == 'byte':
+      # Always map byte enums to Java ints, unless there's a typedef override
+      base_type = 'int'
+
+    else:
+      mapping = {
+        'int32': 'int',
+        'int64': 'long',
+        'float': 'float',
+        'double': 'double',
+        'byte': 'byte',
+        'rational': 'Rational'
+      }
+
+      base_type = mapping[metadata_type]
+
+    # Convert to array (enums, basic types)
+    if entry.container == 'array':
+      additional = '[]'
+    else:
+      additional = ''
+
+    java_type = '%s%s' %(base_type, additional)
+
+  # Now box this sucker.
+  return java_type
+
+def jtype_boxed(entry):
+  """
+  Calculate the Java type from an entry type string, to be used as a generic
+  type argument in Java. The type is guaranteed to inherit from Object.
+
+  It will only box when absolutely necessary, i.e. int -> Integer[], but
+  int[] -> int[].
+
+  Remarks:
+    Since Java generics cannot be instantiated with primitives, this version
+    will use boxed types when absolutely required.
+
+  Returns:
+    The string representing the boxed Java type.
+  """
+  unboxed_type = jtype_unboxed(entry)
+  return _jtype_box(unboxed_type)
+
+def _is_jtype_generic(entry):
+  """
+  Determine whether or not the Java type represented by the entry type
+  string and/or typedef is a Java generic.
+
+  For example, "Range<Integer>" would be considered a generic, whereas
+  a "MeteringRectangle" or a plain "Integer" would not be considered a generic.
+
+  Args:
+    entry: An instance of an Entry node
+
+  Returns:
+    True if it's a java generic, False otherwise.
+  """
+  if entry.typedef:
+    local_typedef = _jtypedef_type(entry)
+    if local_typedef:
+      match = re.search(r'<.*>', local_typedef)
+      return bool(match)
+  return False
+
+def _jtype_primitive(what):
+  """
+  Calculate the Java type from an entry type string.
+
+  Remarks:
+    Makes a special exception for Rational, since it's a primitive in terms of
+    the C-library camera_metadata type system.
+
+  Returns:
+    The string representing the primitive type
+  """
+  mapping = {
+    'int32': 'int',
+    'int64': 'long',
+    'float': 'float',
+    'double': 'double',
+    'byte': 'byte',
+    'rational': 'Rational'
+  }
+
+  try:
+    return mapping[what]
+  except KeyError as e:
+    raise ValueError("Can't map '%s' to a primitive, not supported" %what)
+
+def jclass(entry):
+  """
+  Calculate the java Class reference string for an entry.
+
+  Args:
+    entry: an Entry node
+
+  Example:
+    <entry name="some_int" type="int32"/>
+    <entry name="some_int_array" type="int32" container='array'/>
+
+    jclass(some_int) == 'int.class'
+    jclass(some_int_array) == 'int[].class'
+
+  Returns:
+    The ClassName.class string
+  """
+
+  return "%s.class" %jtype_unboxed(entry)
+
+def jkey_type_token(entry):
+  """
+  Calculate the java type token compatible with a Key constructor.
+  This will be the Java Class<T> for non-generic classes, and a
+  TypeReference<T> for generic classes.
+
+  Args:
+    entry: An entry node
+
+  Returns:
+    The ClassName.class string, or 'new TypeReference<ClassName>() {{ }}' string
+  """
+  if _is_jtype_generic(entry):
+    return "new TypeReference<%s>() {{ }}" %(jtype_boxed(entry))
+  else:
+    return jclass(entry)
+
+def jidentifier(what):
+  """
+  Convert the input string into a valid Java identifier.
+
+  Args:
+    what: any identifier string
+
+  Returns:
+    String with added underscores if necessary.
+  """
+  if re.match("\d", what):
+    return "_%s" %what
+  else:
+    return what
+
+def enum_calculate_value_string(enum_value):
+  """
+  Calculate the value of the enum, even if it does not have one explicitly
+  defined.
+
+  This looks back for the first enum value that has a predefined value and then
+  applies addition until we get the right value, using C-enum semantics.
+
+  Args:
+    enum_value: an EnumValue node with a valid Enum parent
+
+  Example:
+    <enum>
+      <value>X</value>
+      <value id="5">Y</value>
+      <value>Z</value>
+    </enum>
+
+    enum_calculate_value_string(X) == '0'
+    enum_calculate_Value_string(Y) == '5'
+    enum_calculate_value_string(Z) == '6'
+
+  Returns:
+    String that represents the enum value as an integer literal.
+  """
+
+  enum_value_siblings = list(enum_value.parent.values)
+  this_index = enum_value_siblings.index(enum_value)
+
+  def is_hex_string(instr):
+    return bool(re.match('0x[a-f0-9]+$', instr, re.IGNORECASE))
+
+  base_value = 0
+  base_offset = 0
+  emit_as_hex = False
+
+  this_id = enum_value_siblings[this_index].id
+  while this_index != 0 and not this_id:
+    this_index -= 1
+    base_offset += 1
+    this_id = enum_value_siblings[this_index].id
+
+  if this_id:
+    base_value = int(this_id, 0)  # guess base
+    emit_as_hex = is_hex_string(this_id)
+
+  if emit_as_hex:
+    return "0x%X" %(base_value + base_offset)
+  else:
+    return "%d" %(base_value + base_offset)
+
+def enumerate_with_last(iterable):
+  """
+  Enumerate a sequence of iterable, while knowing if this element is the last in
+  the sequence or not.
+
+  Args:
+    iterable: an Iterable of some sequence
+
+  Yields:
+    (element, bool) where the bool is True iff the element is last in the seq.
+  """
+  it = (i for i in iterable)
+
+  first = next(it)  # OK: raises exception if it is empty
+
+  second = first  # for when we have only 1 element in iterable
+
+  try:
+    while True:
+      second = next(it)
+      # more elements remaining.
+      yield (first, False)
+      first = second
+  except StopIteration:
+    # last element. no more elements left
+    yield (second, True)
+
+def pascal_case(what):
+  """
+  Convert the first letter of a string to uppercase, to make the identifier
+  conform to PascalCase.
+
+  If there are dots, remove the dots, and capitalize the letter following
+  where the dot was. Letters that weren't following dots are left unchanged,
+  except for the first letter of the string (which is made upper-case).
+
+  Args:
+    what: a string representing some identifier
+
+  Returns:
+    String with first letter capitalized
+
+  Example:
+    pascal_case("helloWorld") == "HelloWorld"
+    pascal_case("foo") == "Foo"
+    pascal_case("hello.world") = "HelloWorld"
+    pascal_case("fooBar.fooBar") = "FooBarFooBar"
+  """
+  return "".join([s[0:1].upper() + s[1:] for s in what.split('.')])
+
+def jkey_identifier(what):
+  """
+  Return a Java identifier from a property name.
+
+  Args:
+    what: a string representing a property name.
+
+  Returns:
+    Java identifier corresponding to the property name. May need to be
+    prepended with the appropriate Java class name by the caller of this
+    function. Note that the outer namespace is stripped from the property
+    name.
+
+  Example:
+    jkey_identifier("android.lens.facing") == "LENS_FACING"
+  """
+  return csym(what[what.find('.') + 1:])
+
+def jenum_value(enum_entry, enum_value):
+  """
+  Calculate the Java name for an integer enum value
+
+  Args:
+    enum: An enum-typed Entry node
+    value: An EnumValue node for the enum
+
+  Returns:
+    String representing the Java symbol
+  """
+
+  cname = csym(enum_entry.name)
+  return cname[cname.find('_') + 1:] + '_' + enum_value.name
+
+def generate_extra_javadoc_detail(entry):
+  """
+  Returns a function to add extra details for an entry into a string for inclusion into
+  javadoc. Adds information about units, the list of enum values for this key, and the valid
+  range.
+  """
+  def inner(text):
+    if entry.units:
+      text += '\n\n<b>Units</b>: %s\n' % (dedent(entry.units))
+    if entry.enum and not (entry.typedef and entry.typedef.languages.get('java')):
+      text += '\n\n<b>Possible values:</b>\n<ul>\n'
+      for value in entry.enum.values:
+        if not value.hidden:
+          text += '  <li>{@link #%s %s}</li>\n' % ( jenum_value(entry, value ), value.name )
+      text += '</ul>\n'
+    if entry.range:
+      if entry.enum and not (entry.typedef and entry.typedef.languages.get('java')):
+        text += '\n\n<b>Available values for this device:</b><br>\n'
+      else:
+        text += '\n\n<b>Range of valid values:</b><br>\n'
+      text += '%s\n' % (dedent(entry.range))
+    if entry.hwlevel != 'legacy': # covers any of (None, 'limited', 'full')
+      text += '\n\n<b>Optional</b> - This value may be {@code null} on some devices.\n'
+    if entry.hwlevel == 'full':
+      text += \
+        '\n<b>Full capability</b> - \n' + \
+        'Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the\n' + \
+        'android.info.supportedHardwareLevel key\n'
+    if entry.hwlevel == 'limited':
+      text += \
+        '\n<b>Limited capability</b> - \n' + \
+        'Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the\n' + \
+        'android.info.supportedHardwareLevel key\n'
+    if entry.hwlevel == 'legacy':
+      text += "\nThis key is available on all devices."
+
+    return text
+  return inner
+
+
+def javadoc(metadata, indent = 4):
+  """
+  Returns a function to format a markdown syntax text block as a
+  javadoc comment section, given a set of metadata
+
+  Args:
+    metadata: A Metadata instance, representing the the top-level root
+      of the metadata for cross-referencing
+    indent: baseline level of indentation for javadoc block
+  Returns:
+    A function that transforms a String text block as follows:
+    - Indent and * for insertion into a Javadoc comment block
+    - Trailing whitespace removed
+    - Entire body rendered via markdown to generate HTML
+    - All tag names converted to appropriate Javadoc {@link} with @see
+      for each tag
+
+  Example:
+    "This is a comment for Javadoc\n" +
+    "     with multiple lines, that should be   \n" +
+    "     formatted better\n" +
+    "\n" +
+    "    That covers multiple lines as well\n"
+    "    And references android.control.mode\n"
+
+    transforms to
+    "    * <p>This is a comment for Javadoc\n" +
+    "    * with multiple lines, that should be\n" +
+    "    * formatted better</p>\n" +
+    "    * <p>That covers multiple lines as well</p>\n" +
+    "    * and references {@link CaptureRequest#CONTROL_MODE android.control.mode}\n" +
+    "    *\n" +
+    "    * @see CaptureRequest#CONTROL_MODE\n"
+  """
+  def javadoc_formatter(text):
+    comment_prefix = " " * indent + " * ";
+
+    # render with markdown => HTML
+    javatext = md(text, JAVADOC_IMAGE_SRC_METADATA)
+
+    # Identity transform for javadoc links
+    def javadoc_link_filter(target, shortname):
+      return '{@link %s %s}' % (target, shortname)
+
+    javatext = filter_links(javatext, javadoc_link_filter)
+
+    # Crossref tag names
+    kind_mapping = {
+        'static': 'CameraCharacteristics',
+        'dynamic': 'CaptureResult',
+        'controls': 'CaptureRequest' }
+
+    # Convert metadata entry "android.x.y.z" to form
+    # "{@link CaptureRequest#X_Y_Z android.x.y.z}"
+    def javadoc_crossref_filter(node):
+      if node.applied_visibility == 'public':
+        return '{@link %s#%s %s}' % (kind_mapping[node.kind],
+                                     jkey_identifier(node.name),
+                                     node.name)
+      else:
+        return node.name
+
+    # For each public tag "android.x.y.z" referenced, add a
+    # "@see CaptureRequest#X_Y_Z"
+    def javadoc_crossref_see_filter(node_set):
+      node_set = (x for x in node_set if x.applied_visibility == 'public')
+
+      text = '\n'
+      for node in node_set:
+        text = text + '\n@see %s#%s' % (kind_mapping[node.kind],
+                                      jkey_identifier(node.name))
+
+      return text if text != '\n' else ''
+
+    javatext = filter_tags(javatext, metadata, javadoc_crossref_filter, javadoc_crossref_see_filter)
+
+    def line_filter(line):
+      # Indent each line
+      # Add ' * ' to it for stylistic reasons
+      # Strip right side of trailing whitespace
+      return (comment_prefix + line).rstrip()
+
+    # Process each line with above filter
+    javatext = "\n".join(line_filter(i) for i in javatext.split("\n")) + "\n"
+
+    return javatext
+
+  return javadoc_formatter
+
+def dedent(text):
+  """
+  Remove all common indentation from every line but the 0th.
+  This will avoid getting <code> blocks when rendering text via markdown.
+  Ignoring the 0th line will also allow the 0th line not to be aligned.
+
+  Args:
+    text: A string of text to dedent.
+
+  Returns:
+    String dedented by above rules.
+
+  For example:
+    assertEquals("bar\nline1\nline2",   dedent("bar\n  line1\n  line2"))
+    assertEquals("bar\nline1\nline2",   dedent(" bar\n  line1\n  line2"))
+    assertEquals("bar\n  line1\nline2", dedent(" bar\n    line1\n  line2"))
+  """
+  text = textwrap.dedent(text)
+  text_lines = text.split('\n')
+  text_not_first = "\n".join(text_lines[1:])
+  text_not_first = textwrap.dedent(text_not_first)
+  text = text_lines[0] + "\n" + text_not_first
+
+  return text
+
+def md(text, img_src_prefix=""):
+    """
+    Run text through markdown to produce HTML.
+
+    This also removes all common indentation from every line but the 0th.
+    This will avoid getting <code> blocks in markdown.
+    Ignoring the 0th line will also allow the 0th line not to be aligned.
+
+    Args:
+      text: A markdown-syntax using block of text to format.
+      img_src_prefix: An optional string to prepend to each <img src="target"/>
+
+    Returns:
+      String rendered by markdown and other rules applied (see above).
+
+    For example, this avoids the following situation:
+
+      <!-- Input -->
+
+      <!--- can't use dedent directly since 'foo' has no indent -->
+      <notes>foo
+          bar
+          bar
+      </notes>
+
+      <!-- Bad Output -- >
+      <!-- if no dedent is done generated code looks like -->
+      <p>foo
+        <code><pre>
+          bar
+          bar</pre></code>
+      </p>
+
+    Instead we get the more natural expected result:
+
+      <!-- Good Output -->
+      <p>foo
+      bar
+      bar</p>
+
+    """
+    text = dedent(text)
+
+    # full list of extensions at http://pythonhosted.org/Markdown/extensions/
+    md_extensions = ['tables'] # make <table> with ASCII |_| tables
+    # render with markdown
+    text = markdown.markdown(text, md_extensions)
+
+    # prepend a prefix to each <img src="foo"> -> <img src="${prefix}foo">
+    text = re.sub(r'src="([^"]*)"', 'src="' + img_src_prefix + r'\1"', text)
+    return text
+
+def filter_tags(text, metadata, filter_function, summary_function = None):
+    """
+    Find all references to tags in the form outer_namespace.xxx.yyy[.zzz] in
+    the provided text, and pass them through filter_function and summary_function.
+
+    Used to linkify entry names in HMTL, javadoc output.
+
+    Args:
+      text: A string representing a block of text destined for output
+      metadata: A Metadata instance, the root of the metadata properties tree
+      filter_function: A Node->string function to apply to each node
+        when found in text; the string returned replaces the tag name in text.
+      summary_function: A Node list->string function that is provided the list of
+        unique tag nodes found in text, and which must return a string that is
+        then appended to the end of the text. The list is sorted alphabetically
+        by node name.
+    """
+
+    tag_set = set()
+    def name_match(name):
+      return lambda node: node.name == name
+
+    # Match outer_namespace.x.y or outer_namespace.x.y.z, making sure
+    # to grab .z and not just outer_namespace.x.y.  (sloppy, but since we
+    # check for validity, a few false positives don't hurt).
+    # Try to ignore items of the form {@link <outer_namespace>...
+    for outer_namespace in metadata.outer_namespaces:
+
+      tag_match = r"(?<!\{@link\s)" + outer_namespace.name + \
+        r"\.([a-zA-Z0-9\n]+)\.([a-zA-Z0-9\n]+)(\.[a-zA-Z0-9\n]+)?([/]?)"
+
+      def filter_sub(match):
+        whole_match = match.group(0)
+        section1 = match.group(1)
+        section2 = match.group(2)
+        section3 = match.group(3)
+        end_slash = match.group(4)
+
+        # Don't linkify things ending in slash (urls, for example)
+        if end_slash:
+          return whole_match
+
+        candidate = ""
+
+        # First try a two-level match
+        candidate2 = "%s.%s.%s" % (outer_namespace.name, section1, section2)
+        got_two_level = False
+
+        node = metadata.find_first(name_match(candidate2.replace('\n','')))
+        if not node and '\n' in section2:
+          # Linefeeds are ambiguous - was the intent to add a space,
+          # or continue a lengthy name? Try the former now.
+          candidate2b = "%s.%s.%s" % (outer_namespace.name, section1, section2[:section2.find('\n')])
+          node = metadata.find_first(name_match(candidate2b))
+          if node:
+            candidate2 = candidate2b
+
+        if node:
+          # Have two-level match
+          got_two_level = True
+          candidate = candidate2
+        elif section3:
+          # Try three-level match
+          candidate3 = "%s%s" % (candidate2, section3)
+          node = metadata.find_first(name_match(candidate3.replace('\n','')))
+
+          if not node and '\n' in section3:
+            # Linefeeds are ambiguous - was the intent to add a space,
+            # or continue a lengthy name? Try the former now.
+            candidate3b = "%s%s" % (candidate2, section3[:section3.find('\n')])
+            node = metadata.find_first(name_match(candidate3b))
+            if node:
+              candidate3 = candidate3b
+
+          if node:
+            # Have 3-level match
+            candidate = candidate3
+
+        # Replace match with crossref or complain if a likely match couldn't be matched
+
+        if node:
+          tag_set.add(node)
+          return whole_match.replace(candidate,filter_function(node))
+        else:
+          print >> sys.stderr,\
+            "  WARNING: Could not crossref likely reference {%s}" % (match.group(0))
+          return whole_match
+
+      text = re.sub(tag_match, filter_sub, text)
+
+    if summary_function is not None:
+      return text + summary_function(sorted(tag_set, key=lambda x: x.name))
+    else:
+      return text
+
+def filter_links(text, filter_function, summary_function = None):
+    """
+    Find all references to tags in the form {@link xxx#yyy [zzz]} in the
+    provided text, and pass them through filter_function and
+    summary_function.
+
+    Used to linkify documentation cross-references in HMTL, javadoc output.
+
+    Args:
+      text: A string representing a block of text destined for output
+      metadata: A Metadata instance, the root of the metadata properties tree
+      filter_function: A (string, string)->string function to apply to each 'xxx#yyy',
+        zzz pair when found in text; the string returned replaces the tag name in text.
+      summary_function: A string list->string function that is provided the list of
+        unique targets found in text, and which must return a string that is
+        then appended to the end of the text. The list is sorted alphabetically
+        by node name.
+
+    """
+
+    target_set = set()
+    def name_match(name):
+      return lambda node: node.name == name
+
+    tag_match = r"\{@link\s+([^\s\}]+)([^\}]*)\}"
+
+    def filter_sub(match):
+      whole_match = match.group(0)
+      target = match.group(1)
+      shortname = match.group(2).strip()
+
+      #print "Found link '%s' as '%s' -> '%s'" % (target, shortname, filter_function(target, shortname))
+
+      # Replace match with crossref
+      target_set.add(target)
+      return filter_function(target, shortname)
+
+    text = re.sub(tag_match, filter_sub, text)
+
+    if summary_function is not None:
+      return text + summary_function(sorted(target_set))
+    else:
+      return text
+
+def any_visible(section, kind_name, visibilities):
+  """
+  Determine if entries in this section have an applied visibility that's in
+  the list of given visibilities.
+
+  Args:
+    section: A section of metadata
+    kind_name: A name of the kind, i.e. 'dynamic' or 'static' or 'controls'
+    visibilities: An iterable of visibilities to match against
+
+  Returns:
+    True if the section has any entries with any of the given visibilities. False otherwise.
+  """
+
+  for inner_namespace in get_children_by_filtering_kind(section, kind_name,
+                                                        'namespaces'):
+    if any(filter_visibility(inner_namespace.merged_entries, visibilities)):
+      return True
+
+  return any(filter_visibility(get_children_by_filtering_kind(section, kind_name,
+                                                              'merged_entries'),
+                               visibilities))
+
+
+def filter_visibility(entries, visibilities):
+  """
+  Remove entries whose applied visibility is not in the supplied visibilities.
+
+  Args:
+    entries: An iterable of Entry nodes
+    visibilities: An iterable of visibilities to filter against
+
+  Yields:
+    An iterable of Entry nodes
+  """
+  return (e for e in entries if e.applied_visibility in visibilities)
+
+def remove_synthetic(entries):
+  """
+  Filter the given entries by removing those that are synthetic.
+
+  Args:
+    entries: An iterable of Entry nodes
+
+  Yields:
+    An iterable of Entry nodes
+  """
+  return (e for e in entries if not e.synthetic)
+
+def wbr(text):
+  """
+  Insert word break hints for the browser in the form of <wbr> HTML tags.
+
+  Word breaks are inserted inside an HTML node only, so the nodes themselves
+  will not be changed. Attributes are also left unchanged.
+
+  The following rules apply to insert word breaks:
+  - For characters in [ '.', '/', '_' ]
+  - For uppercase letters inside a multi-word X.Y.Z (at least 3 parts)
+
+  Args:
+    text: A string of text containing HTML content.
+
+  Returns:
+    A string with <wbr> inserted by the above rules.
+  """
+  SPLIT_CHARS_LIST = ['.', '_', '/']
+  SPLIT_CHARS = r'([.|/|_/,]+)' # split by these characters
+  CAP_LETTER_MIN = 3 # at least 3 components split by above chars, i.e. x.y.z
+  def wbr_filter(text):
+      new_txt = text
+
+      # for johnyOrange.appleCider.redGuardian also insert wbr before the caps
+      # => johny<wbr>Orange.apple<wbr>Cider.red<wbr>Guardian
+      for words in text.split(" "):
+        for char in SPLIT_CHARS_LIST:
+          # match at least x.y.z, don't match x or x.y
+          if len(words.split(char)) >= CAP_LETTER_MIN:
+            new_word = re.sub(r"([a-z])([A-Z])", r"\1<wbr>\2", words)
+            new_txt = new_txt.replace(words, new_word)
+
+      # e.g. X/Y/Z -> X/<wbr>Y/<wbr>/Z. also for X.Y.Z, X_Y_Z.
+      new_txt = re.sub(SPLIT_CHARS, r"\1<wbr>", new_txt)
+
+      return new_txt
+
+  # Do not mangle HTML when doing the replace by using BeatifulSoup
+  # - Use the 'html.parser' to avoid inserting <html><body> when decoding
+  soup = bs4.BeautifulSoup(text, features='html.parser')
+  wbr_tag = lambda: soup.new_tag('wbr') # must generate new tag every time
+
+  for navigable_string in soup.findAll(text=True):
+      parent = navigable_string.parent
+
+      # Insert each '$text<wbr>$foo' before the old '$text$foo'
+      split_by_wbr_list = wbr_filter(navigable_string).split("<wbr>")
+      for (split_string, last) in enumerate_with_last(split_by_wbr_list):
+          navigable_string.insert_before(split_string)
+
+          if not last:
+            # Note that 'insert' will move existing tags to this spot
+            # so make a new tag instead
+            navigable_string.insert_before(wbr_tag())
+
+      # Remove the old unmodified text
+      navigable_string.extract()
+
+  return soup.decode()
diff --git a/media/camera/docs/metadata_helpers_test.py b/media/camera/docs/metadata_helpers_test.py
new file mode 100644
index 0000000..4264c6e
--- /dev/null
+++ b/media/camera/docs/metadata_helpers_test.py
@@ -0,0 +1,217 @@
+import unittest
+import itertools
+from unittest import TestCase
+from metadata_model import *
+from metadata_helpers import *
+from metadata_parser_xml import *
+
+# Simple test metadata block used by the tests below
+test_metadata_xml = \
+'''
+<?xml version="1.0" encoding="utf-8"?>
+<metadata xmlns="http://schemas.android.com/service/camera/metadata/"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
+
+<namespace name="testOuter1">
+  <section name="testSection1">
+    <controls>
+      <entry name="control1" type="byte" visibility="public">
+      </entry>
+      <entry name="control2" type="byte" visibility="public">
+      </entry>
+    </controls>
+    <dynamic>
+      <entry name="dynamic1" type="byte" visibility="public">
+      </entry>
+      <entry name="dynamic2" type="byte" visibility="public">
+      </entry>
+      <clone entry="testOuter1.testSection1.control1" kind="controls">
+      </clone>
+    </dynamic>
+    <static>
+      <entry name="static1" type="byte" visibility="public">
+      </entry>
+      <entry name="static2" type="byte" visibility="public">
+      </entry>
+    </static>
+  </section>
+</namespace>
+<namespace name="testOuter2">
+  <section name="testSection2">
+    <controls>
+      <entry name="control1" type="byte" visibility="public">
+      </entry>
+      <entry name="control2" type="byte" visibility="public">
+      </entry>
+    </controls>
+    <dynamic>
+      <entry name="dynamic1" type="byte" visibility="public">
+      </entry>
+      <entry name="dynamic2" type="byte" visibility="public">
+      </entry>
+      <clone entry="testOuter2.testSection2.control1" kind="controls">
+      </clone>
+    </dynamic>
+    <static>
+      <namespace name="testInner2">
+        <entry name="static1" type="byte" visibility="public">
+        </entry>
+        <entry name="static2" type="byte" visibility="public">
+        </entry>
+      </namespace>
+    </static>
+  </section>
+</namespace>
+</metadata>
+'''
+
+class TestHelpers(TestCase):
+
+  def test_enum_calculate_value_string(self):
+    def compare_values_against_list(expected_list, enum):
+      for (idx, val) in enumerate(expected_list):
+        self.assertEquals(val,
+                          enum_calculate_value_string(list(enum.values)[idx]))
+
+    plain_enum = Enum(parent=None, values=['ON', 'OFF'])
+
+    compare_values_against_list(['0', '1'],
+                                plain_enum)
+
+    ###
+    labeled_enum = Enum(parent=None, values=['A', 'B', 'C'], ids={
+      'A': '12345',
+      'B': '0xC0FFEE',
+      'C': '0xDEADF00D'
+    })
+
+    compare_values_against_list(['12345', '0xC0FFEE', '0xDEADF00D'],
+                                labeled_enum)
+
+    ###
+    mixed_enum = Enum(parent=None,
+                      values=['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'],
+                      ids={
+                        'C': '0xC0FFEE',
+                        'E': '123',
+                        'G': '0xDEADF00D'
+                      })
+
+    expected_values = ['0', '1', '0xC0FFEE', '0xC0FFEF', '123', '124',
+                       '0xDEADF00D',
+                       '0xDEADF00E']
+
+    compare_values_against_list(expected_values, mixed_enum)
+
+  def test_enumerate_with_last(self):
+    empty_list = []
+
+    for (x, y) in enumerate_with_last(empty_list):
+      self.fail("Should not return anything for empty list")
+
+    single_value = [1]
+    for (x, last) in enumerate_with_last(single_value):
+      self.assertEquals(1, x)
+      self.assertEquals(True, last)
+
+    multiple_values = [4, 5, 6]
+    lst = list(enumerate_with_last(multiple_values))
+    self.assertListEqual([(4, False), (5, False), (6, True)], lst)
+
+  def test_filter_tags(self):
+    metadata = MetadataParserXml(test_metadata_xml, 'metadata_helpers_test.py').metadata
+
+    test_text = \
+'''
+In the unlikely event of a
+water landing, testOuter1.testSection1.control1 will deploy.
+If testOuter2.testSection2.testInner2.static1,
+then testOuter1.testSection1.
+dynamic1 will ensue. That should be avoided if testOuter2.testSection2.
+Barring issues, testOuter1.testSection1.dynamic1, and testOuter2.testSection2.control1.
+In the third instance of testOuter1.testSection1.control1
+we will take the other option.
+If the path foo/android.testOuter1.testSection1.control1/bar.txt exists, then oh well.
+'''
+    def filter_test(node):
+      return '*'
+
+    def summary_test(node_set):
+      text = "*" * len(node_set) + "\n"
+      return text
+
+    expected_text = \
+'''
+In the unlikely event of a
+water landing, * will deploy.
+If *,
+then * will ensue. That should be avoided if testOuter2.testSection2.
+Barring issues, *, and *.
+In the third instance of *
+we will take the other option.
+If the path foo/android.testOuter1.testSection1.control1/bar.txt exists, then oh well.
+****
+'''
+    result_text = filter_tags(test_text, metadata, filter_test, summary_test)
+
+    self.assertEqual(result_text, expected_text)
+
+  def test_wbr(self):
+    wbr_string = "<wbr/>"
+    wbr_gen = itertools.repeat(wbr_string)
+
+    # No special characters, do nothing
+    self.assertEquals("no-op", wbr("no-op"))
+    # Insert WBR after characters in [ '.', '/', '_' ]
+    self.assertEquals("word.{0}".format(wbr_string), wbr("word."))
+    self.assertEquals("word/{0}".format(wbr_string), wbr("word/"))
+    self.assertEquals("word_{0}".format(wbr_string), wbr("word_"))
+
+    self.assertEquals("word.{0}break".format(wbr_string), wbr("word.break"))
+    self.assertEquals("word/{0}break".format(wbr_string), wbr("word/break"))
+    self.assertEquals("word_{0}break".format(wbr_string), wbr("word_break"))
+
+    # Test words with more components
+    self.assertEquals("word_{0}break_{0}again".format(wbr_string),
+                      wbr("word_break_again"))
+    self.assertEquals("word_{0}break_{0}again_{0}emphasis".format(wbr_string),
+                      wbr("word_break_again_emphasis"))
+
+    # Words with 2 or less subcomponents are ignored for the capital letters
+    self.assertEquals("word_{0}breakIgnored".format(wbr_string),
+                      wbr("word_breakIgnored"))
+    self.assertEquals("wordIgnored".format(wbr_string),
+                      wbr("wordIgnored"))
+
+    # Words with at least 3 sub components get word breaks before caps
+    self.assertEquals("word_{0}break_{0}again{0}Capitalized".format(wbr_string),
+                      wbr("word_break_againCapitalized"))
+    self.assertEquals("word.{0}break.{0}again{0}Capitalized".format(wbr_string),
+                      wbr("word.break.againCapitalized"))
+    self.assertEquals("a.{0}b{0}C.{0}d{0}E.{0}f{0}G".format(wbr_string),
+                      wbr("a.bC.dE.fG"))
+
+    # Don't be overly aggressive with all caps
+    self.assertEquals("TRANSFORM_{0}MATRIX".format(wbr_string),
+                      wbr("TRANSFORM_MATRIX"))
+
+    self.assertEquals("SCENE_{0}MODE_{0}FACE_{0}PRIORITY".format(wbr_string),
+                      wbr("SCENE_MODE_FACE_PRIORITY"))
+
+    self.assertEquals("android.{0}color{0}Correction.{0}mode is TRANSFORM_{0}MATRIX.{0}".format(wbr_string),
+                      wbr("android.colorCorrection.mode is TRANSFORM_MATRIX."))
+
+    self.assertEquals("The overrides listed for SCENE_{0}MODE_{0}FACE_{0}PRIORITY are ignored".format(wbr_string),
+                      wbr("The overrides listed for SCENE_MODE_FACE_PRIORITY are ignored"));
+
+  def test_dedent(self):
+    # Remove whitespace from 2nd and 3rd line (equal ws)
+    self.assertEquals("bar\nline1\nline2", dedent("bar\n  line1\n  line2"))
+    # Remove whitespace from all lines (1st line ws < 2/3 line ws)
+    self.assertEquals("bar\nline1\nline2", dedent(" bar\n  line1\n  line2"))
+    # Remove some whitespace from 2nd line, all whitespace from other lines
+    self.assertEquals("bar\n  line1\nline2", dedent(" bar\n    line1\n  line2"))
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/media/camera/docs/metadata_model.py b/media/camera/docs/metadata_model.py
new file mode 100644
index 0000000..315c97c
--- /dev/null
+++ b/media/camera/docs/metadata_model.py
@@ -0,0 +1,1485 @@
+#!/usr/bin/python
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+A set of classes (models) each closely representing an XML node in the
+metadata_properties.xml file.
+
+  Node: Base class for most nodes.
+  Entry: A node corresponding to <entry> elements.
+  Clone: A node corresponding to <clone> elements.
+  MergedEntry: A node corresponding to either <entry> or <clone> elements.
+  Kind: A node corresponding to <dynamic>, <static>, <controls> elements.
+  InnerNamespace: A node corresponding to a <namespace> nested under a <kind>.
+  OuterNamespace: A node corresponding to a <namespace> with <kind> children.
+  Section: A node corresponding to a <section> element.
+  Enum: A class corresponding an <enum> element within an <entry>
+  EnumValue: A class corresponding to a <value> element within an Enum
+  Metadata: Root node that also provides tree construction functionality.
+  Tag: A node corresponding to a top level <tag> element.
+  Typedef: A node corresponding to a <typedef> element under <types>.
+"""
+
+import sys
+import itertools
+from collections import OrderedDict
+
+class Node(object):
+  """
+  Base class for most nodes that are part of the Metadata graph.
+
+  Attributes (Read-Only):
+    parent: An edge to a parent Node.
+    name: A string describing the name, usually but not always the 'name'
+          attribute of the corresponding XML node.
+  """
+
+  def __init__(self):
+    self._parent = None
+    self._name = None
+
+  @property
+  def parent(self):
+    return self._parent
+
+  @property
+  def name(self):
+    return self._name
+
+  def find_all(self, pred):
+    """
+    Find all descendants that match the predicate.
+
+    Args:
+      pred: a predicate function that acts as a filter for a Node
+
+    Yields:
+      A sequence of all descendants for which pred(node) is true,
+      in a pre-order visit order.
+    """
+    if pred(self):
+      yield self
+
+    if self._get_children() is None:
+      return
+
+    for i in self._get_children():
+      for j in i.find_all(pred):
+        yield j
+
+  def find_first(self, pred):
+    """
+    Find the first descendant that matches the predicate.
+
+    Args:
+      pred: a predicate function that acts as a filter for a Node
+
+    Returns:
+      The first Node from find_all(pred), or None if there were no results.
+    """
+    for i in self.find_all(pred):
+      return i
+
+    return None
+
+  def find_parent_first(self, pred):
+    """
+    Find the first ancestor that matches the predicate.
+
+    Args:
+      pred: A predicate function that acts as a filter for a Node
+
+    Returns:
+      The first ancestor closest to the node for which pred(node) is true.
+    """
+    for i in self.find_parents(pred):
+      return i
+
+    return None
+
+  def find_parents(self, pred):
+    """
+    Find all ancestors that match the predicate.
+
+    Args:
+      pred: A predicate function that acts as a filter for a Node
+
+    Yields:
+      A sequence of all ancestors (closest to furthest) from the node,
+      where pred(node) is true.
+    """
+    parent = self.parent
+
+    while parent is not None:
+      if pred(parent):
+        yield parent
+      parent = parent.parent
+
+  def sort_children(self):
+    """
+    Sorts the immediate children in-place.
+    """
+    self._sort_by_name(self._children)
+
+  def _sort_by_name(self, what):
+    what.sort(key=lambda x: x.name)
+
+  def _get_name(self):
+    return lambda x: x.name
+
+  # Iterate over all children nodes. None when node doesn't support children.
+  def _get_children(self):
+    return (i for i in self._children)
+
+  def _children_name_map_matching(self, match=lambda x: True):
+    d = {}
+    for i in self._get_children():
+      if match(i):
+        d[i.name] = i
+    return d
+
+  @staticmethod
+  def _dictionary_by_name(values):
+    d = OrderedDict()
+    for i in values:
+      d[i.name] = i
+
+    return d
+
+  def validate_tree(self):
+    """
+    Sanity check the tree recursively, ensuring for a node n, all children's
+    parents are also n.
+
+    Returns:
+      True if validation succeeds, False otherwise.
+    """
+    succ = True
+    children = self._get_children()
+    if children is None:
+      return True
+
+    for child in self._get_children():
+      if child.parent != self:
+        print >> sys.stderr, ("ERROR: Node '%s' doesn't match the parent" +    \
+                             "(expected: %s, actual %s)")                      \
+                             %(child, self, child.parent)
+        succ = False
+
+      succ = child.validate_tree() and succ
+
+    return succ
+
+  def __str__(self):
+    return "<%s name='%s'>" %(self.__class__, self.name)
+
+class Metadata(Node):
+  """
+  A node corresponding to a <metadata> entry.
+
+  Attributes (Read-Only):
+    parent: An edge to the parent Node. This is always None for Metadata.
+    outer_namespaces: A sequence of immediate OuterNamespace children.
+    tags: A sequence of all Tag instances available in the graph.
+    types: An iterable of all Typedef instances available in the graph.
+  """
+
+  def __init__(self):
+    """
+    Initialize with no children. Use insert_* functions and then
+    construct_graph() to build up the Metadata from some source.
+    """
+# Private
+    self._entries = []
+    # kind => { name => entry }
+    self._entry_map = { 'static': {}, 'dynamic': {}, 'controls': {} }
+    self._entries_ordered = [] # list of ordered Entry/Clone instances
+    self._clones = []
+
+# Public (Read Only)
+    self._name = None
+    self._parent = None
+    self._outer_namespaces = None
+    self._tags = []
+    self._types = []
+
+  @property
+  def outer_namespaces(self):
+    if self._outer_namespaces is None:
+      return None
+    else:
+      return (i for i in self._outer_namespaces)
+
+  @property
+  def tags(self):
+    return (i for i in self._tags)
+
+  @property
+  def types(self):
+    return (i for i in self._types)
+
+  def _get_properties(self):
+
+    for i in self._entries:
+      yield i
+
+    for i in self._clones:
+      yield i
+
+  def insert_tag(self, tag, description=""):
+    """
+    Insert a tag into the metadata.
+
+    Args:
+      tag: A string identifier for a tag.
+      description: A string description for a tag.
+
+    Example:
+      metadata.insert_tag("BC", "Backwards Compatibility for old API")
+
+    Remarks:
+      Subsequent calls to insert_tag with the same tag are safe (they will
+      be ignored).
+    """
+    tag_ids = [tg.name for tg in self.tags if tg.name == tag]
+    if not tag_ids:
+      self._tags.append(Tag(tag, self, description))
+
+  def insert_type(self, type_name, type_selector="typedef", **kwargs):
+    """
+    Insert a type into the metadata.
+
+    Args:
+      type_name: A type's name
+      type_selector: The selector for the type, e.g. 'typedef'
+
+    Args (if type_selector == 'typedef'):
+      languages: A map of 'language name' -> 'fully qualified class path'
+
+    Example:
+      metadata.insert_type('rectangle', 'typedef',
+                           { 'java': 'android.graphics.Rect' })
+
+    Remarks:
+      Subsequent calls to insert_type with the same type name are safe (they
+      will be ignored)
+    """
+
+    if type_selector != 'typedef':
+      raise ValueError("Unsupported type_selector given " + type_selector)
+
+    type_names = [tp.name for tp in self.types if tp.name == tp]
+    if not type_names:
+      self._types.append(Typedef(type_name, self, kwargs.get('languages')))
+
+  def insert_entry(self, entry):
+    """
+    Insert an entry into the metadata.
+
+    Args:
+      entry: A key-value dictionary describing an entry. Refer to
+             Entry#__init__ for the keys required/optional.
+
+    Remarks:
+      Subsequent calls to insert_entry with the same entry+kind name are safe
+      (they will be ignored).
+    """
+    e = Entry(**entry)
+    self._entries.append(e)
+    self._entry_map[e.kind][e.name] = e
+    self._entries_ordered.append(e)
+
+  def insert_clone(self, clone):
+    """
+    Insert a clone into the metadata.
+
+    Args:
+      clone: A key-value dictionary describing a clone. Refer to
+            Clone#__init__ for the keys required/optional.
+
+    Remarks:
+      Subsequent calls to insert_clone with the same clone+kind name are safe
+      (they will be ignored). Also the target entry need not be inserted
+      ahead of the clone entry.
+    """
+    # figure out corresponding entry later. allow clone insert, entry insert
+    entry = None
+    c = Clone(entry, **clone)
+    self._entry_map[c.kind][c.name] = c
+    self._clones.append(c)
+    self._entries_ordered.append(c)
+
+  def prune_clones(self):
+    """
+    Remove all clones that don't point to an existing entry.
+
+    Remarks:
+      This should be called after all insert_entry/insert_clone calls have
+      finished.
+    """
+    remove_list = []
+    for p in self._clones:
+      if p.entry is None:
+        remove_list.append(p)
+
+    for p in remove_list:
+
+      # remove from parent's entries list
+      if p.parent is not None:
+        p.parent._entries.remove(p)
+      # remove from parents' _leafs list
+      for ancestor in p.find_parents(lambda x: not isinstance(x, Metadata)):
+        ancestor._leafs.remove(p)
+
+      # remove from global list
+      self._clones.remove(p)
+      self._entry_map[p.kind].pop(p.name)
+      self._entries_ordered.remove(p)
+
+
+  # After all entries/clones are inserted,
+  # invoke this to generate the parent/child node graph all these objects
+  def construct_graph(self):
+    """
+    Generate the graph recursively, after which all Entry nodes will be
+    accessible recursively by crawling through the outer_namespaces sequence.
+
+    Remarks:
+      This is safe to be called multiple times at any time. It should be done at
+      least once or there will be no graph.
+    """
+    self.validate_tree()
+    self._construct_tags()
+    self.validate_tree()
+    self._construct_types()
+    self.validate_tree()
+    self._construct_clones()
+    self.validate_tree()
+    self._construct_outer_namespaces()
+    self.validate_tree()
+
+  def _construct_tags(self):
+    tag_dict = self._dictionary_by_name(self.tags)
+    for p in self._get_properties():
+      p._tags = []
+      for tag_id in p._tag_ids:
+        tag = tag_dict.get(tag_id)
+
+        if tag not in p._tags:
+          p._tags.append(tag)
+
+        if p not in tag.entries:
+          tag._entries.append(p)
+
+  def _construct_types(self):
+    type_dict = self._dictionary_by_name(self.types)
+    for p in self._get_properties():
+      if p._type_name:
+        type_node = type_dict.get(p._type_name)
+        p._typedef = type_node
+
+        if p not in type_node.entries:
+          type_node._entries.append(p)
+
+  def _construct_clones(self):
+    for p in self._clones:
+      target_kind = p.target_kind
+      target_entry = self._entry_map[target_kind].get(p.name)
+      p._entry = target_entry
+
+      # should not throw if we pass validation
+      # but can happen when importing obsolete CSV entries
+      if target_entry is None:
+        print >> sys.stderr, ("WARNING: Clone entry '%s' target kind '%s'" +   \
+                              " has no corresponding entry")                   \
+                             %(p.name, p.target_kind)
+
+  def _construct_outer_namespaces(self):
+
+    if self._outer_namespaces is None: #the first time this runs
+      self._outer_namespaces = []
+
+    root = self._dictionary_by_name(self._outer_namespaces)
+    for ons_name, ons in root.iteritems():
+      ons._leafs = []
+
+    for p in self._entries_ordered:
+      ons_name = p.get_outer_namespace()
+      ons = root.get(ons_name, OuterNamespace(ons_name, self))
+      root[ons_name] = ons
+
+      if p not in ons._leafs:
+        ons._leafs.append(p)
+
+    for ons_name, ons in root.iteritems():
+
+      ons.validate_tree()
+
+      self._construct_sections(ons)
+
+      if ons not in self._outer_namespaces:
+        self._outer_namespaces.append(ons)
+
+      ons.validate_tree()
+
+  def _construct_sections(self, outer_namespace):
+
+    sections_dict = self._dictionary_by_name(outer_namespace.sections)
+    for sec_name, sec in sections_dict.iteritems():
+      sec._leafs = []
+      sec.validate_tree()
+
+    for p in outer_namespace._leafs:
+      does_exist = sections_dict.get(p.get_section())
+
+      sec = sections_dict.get(p.get_section(), \
+          Section(p.get_section(), outer_namespace))
+      sections_dict[p.get_section()] = sec
+
+      sec.validate_tree()
+
+      if p not in sec._leafs:
+        sec._leafs.append(p)
+
+    for sec_name, sec in sections_dict.iteritems():
+
+      if not sec.validate_tree():
+        print >> sys.stderr, ("ERROR: Failed to validate tree in " +           \
+                             "construct_sections (start), with section = '%s'")\
+                             %(sec)
+
+      self._construct_kinds(sec)
+
+      if sec not in outer_namespace.sections:
+        outer_namespace._sections.append(sec)
+
+      if not sec.validate_tree():
+        print >> sys.stderr, ("ERROR: Failed to validate tree in " +           \
+                              "construct_sections (end), with section = '%s'") \
+                             %(sec)
+
+  # 'controls', 'static' 'dynamic'. etc
+  def _construct_kinds(self, section):
+    for kind in section.kinds:
+      kind._leafs = []
+      section.validate_tree()
+
+    group_entry_by_kind = itertools.groupby(section._leafs, lambda x: x.kind)
+    leaf_it = ((k, g) for k, g in group_entry_by_kind)
+
+    # allow multiple kinds with the same name. merge if adjacent
+    # e.g. dynamic,dynamic,static,static,dynamic -> dynamic,static,dynamic
+    # this helps maintain ABI compatibility when adding an entry in a new kind
+    for idx, (kind_name, entry_it) in enumerate(leaf_it):
+      if idx >= len(section._kinds):
+        kind = Kind(kind_name, section)
+        section._kinds.append(kind)
+        section.validate_tree()
+
+      kind = section._kinds[idx]
+
+      for p in entry_it:
+        if p not in kind._leafs:
+          kind._leafs.append(p)
+
+    for kind in section._kinds:
+      kind.validate_tree()
+      self._construct_inner_namespaces(kind)
+      kind.validate_tree()
+      self._construct_entries(kind)
+      kind.validate_tree()
+
+      if not section.validate_tree():
+        print >> sys.stderr, ("ERROR: Failed to validate tree in " +           \
+                             "construct_kinds, with kind = '%s'") %(kind)
+
+      if not kind.validate_tree():
+        print >> sys.stderr, ("ERROR: Failed to validate tree in " +           \
+                              "construct_kinds, with kind = '%s'") %(kind)
+
+  def _construct_inner_namespaces(self, parent, depth=0):
+    #parent is InnerNamespace or Kind
+    ins_dict = self._dictionary_by_name(parent.namespaces)
+    for name, ins in ins_dict.iteritems():
+      ins._leafs = []
+
+    for p in parent._leafs:
+      ins_list = p.get_inner_namespace_list()
+
+      if len(ins_list) > depth:
+        ins_str = ins_list[depth]
+        ins = ins_dict.get(ins_str, InnerNamespace(ins_str, parent))
+        ins_dict[ins_str] = ins
+
+        if p not in ins._leafs:
+          ins._leafs.append(p)
+
+    for name, ins in ins_dict.iteritems():
+      ins.validate_tree()
+      # construct children INS
+      self._construct_inner_namespaces(ins, depth + 1)
+      ins.validate_tree()
+      # construct children entries
+      self._construct_entries(ins, depth + 1)
+
+      if ins not in parent.namespaces:
+        parent._namespaces.append(ins)
+
+      if not ins.validate_tree():
+        print >> sys.stderr, ("ERROR: Failed to validate tree in " +           \
+                              "construct_inner_namespaces, with ins = '%s'")   \
+                             %(ins)
+
+  # doesnt construct the entries, so much as links them
+  def _construct_entries(self, parent, depth=0):
+    #parent is InnerNamespace or Kind
+    entry_dict = self._dictionary_by_name(parent.entries)
+    for p in parent._leafs:
+      ins_list = p.get_inner_namespace_list()
+
+      if len(ins_list) == depth:
+        entry = entry_dict.get(p.name, p)
+        entry_dict[p.name] = entry
+
+    for name, entry in entry_dict.iteritems():
+
+      old_parent = entry.parent
+      entry._parent = parent
+
+      if entry not in parent.entries:
+        parent._entries.append(entry)
+
+      if old_parent is not None and old_parent != parent:
+        print >> sys.stderr, ("ERROR: Parent changed from '%s' to '%s' for " + \
+                              "entry '%s'")                                    \
+                             %(old_parent.name, parent.name, entry.name)
+
+  def _get_children(self):
+    if self.outer_namespaces is not None:
+      for i in self.outer_namespaces:
+        yield i
+
+    if self.tags is not None:
+      for i in self.tags:
+        yield i
+
+class Tag(Node):
+  """
+  A tag Node corresponding to a top-level <tag> element.
+
+  Attributes (Read-Only):
+    name: alias for id
+    id: The name of the tag, e.g. for <tag id="BC"/> id = 'BC'
+    description: The description of the tag, the contents of the <tag> element.
+    parent: An edge to the parent, which is always the Metadata root node.
+    entries: A sequence of edges to entries/clones that are using this Tag.
+  """
+  def __init__(self, name, parent, description=""):
+    self._name        = name  # 'id' attribute in XML
+    self._id          = name
+    self._description = description
+    self._parent      = parent
+
+    # all entries that have this tag, including clones
+    self._entries     = []  # filled in by Metadata#construct_tags
+
+  @property
+  def id(self):
+    return self._id
+
+  @property
+  def description(self):
+    return self._description
+
+  @property
+  def entries(self):
+    return (i for i in self._entries)
+
+  def _get_children(self):
+    return None
+
+class Typedef(Node):
+  """
+  A typedef Node corresponding to a <typedef> element under a top-level <types>.
+
+  Attributes (Read-Only):
+    name: The name of this typedef as a string.
+    languages: A dictionary of 'language name' -> 'fully qualified class'.
+    parent: An edge to the parent, which is always the Metadata root node.
+    entries: An iterable over all entries which reference this typedef.
+  """
+  def __init__(self, name, parent, languages=None):
+    self._name        = name
+    self._parent      = parent
+
+    # all entries that have this typedef
+    self._entries     = []  # filled in by Metadata#construct_types
+
+    self._languages   = languages or {}
+
+  @property
+  def languages(self):
+    return self._languages
+
+  @property
+  def entries(self):
+    return (i for i in self._entries)
+
+  def _get_children(self):
+    return None
+
+class OuterNamespace(Node):
+  """
+  A node corresponding to a <namespace> element under <metadata>
+
+  Attributes (Read-Only):
+    name: The name attribute of the <namespace name="foo"> element.
+    parent: An edge to the parent, which is always the Metadata root node.
+    sections: A sequence of Section children.
+  """
+  def __init__(self, name, parent, sections=[]):
+    self._name = name
+    self._parent = parent # MetadataSet
+    self._sections = sections[:]
+    self._leafs = []
+
+    self._children = self._sections
+
+  @property
+  def sections(self):
+    return (i for i in self._sections)
+
+class Section(Node):
+  """
+  A node corresponding to a <section> element under <namespace>
+
+  Attributes (Read-Only):
+    name: The name attribute of the <section name="foo"> element.
+    parent: An edge to the parent, which is always an OuterNamespace instance.
+    description: A string description of the section, or None.
+    kinds: A sequence of Kind children.
+    merged_kinds: A sequence of virtual Kind children,
+                  with each Kind's children merged by the kind.name
+  """
+  def __init__(self, name, parent, description=None, kinds=[]):
+    self._name = name
+    self._parent = parent
+    self._description = description
+    self._kinds = kinds[:]
+
+    self._leafs = []
+
+
+  @property
+  def description(self):
+    return self._description
+
+  @property
+  def kinds(self):
+    return (i for i in self._kinds)
+
+  def sort_children(self):
+    self.validate_tree()
+    # order is always controls,static,dynamic
+    find_child = lambda x: [i for i in self._get_children() if i.name == x]
+    new_lst = find_child('controls') \
+            + find_child('static')   \
+            + find_child('dynamic')
+    self._kinds = new_lst
+    self.validate_tree()
+
+  def _get_children(self):
+    return (i for i in self.kinds)
+
+  @property
+  def merged_kinds(self):
+
+    def aggregate_by_name(acc, el):
+      existing = [i for i in acc if i.name == el.name]
+      if existing:
+        k = existing[0]
+      else:
+        k = Kind(el.name, el.parent)
+        acc.append(k)
+
+      k._namespaces.extend(el._namespaces)
+      k._entries.extend(el._entries)
+
+      return acc
+
+    new_kinds_lst = reduce(aggregate_by_name, self.kinds, [])
+
+    for k in new_kinds_lst:
+      yield k
+
+  def combine_kinds_into_single_node(self):
+    r"""
+    Combines the section's Kinds into a single node.
+
+    Combines all the children (kinds) of this section into a single
+    virtual Kind node.
+
+    Returns:
+      A new Kind node that collapses all Kind siblings into one, combining
+      all their children together.
+
+      For example, given self.kinds == [ x, y ]
+
+        x  y               z
+      / |  | \    -->   / | | \
+      a b  c d          a b c d
+
+      a new instance z is returned in this example.
+
+    Remarks:
+      The children of the kinds are the same references as before, that is
+      their parents will point to the old parents and not to the new parent.
+    """
+    combined = Kind(name="combined", parent=self)
+
+    for k in self._get_children():
+      combined._namespaces.extend(k.namespaces)
+      combined._entries.extend(k.entries)
+
+    return combined
+
+class Kind(Node):
+  """
+  A node corresponding to one of: <static>,<dynamic>,<controls> under a
+  <section> element.
+
+  Attributes (Read-Only):
+    name: A string which is one of 'static', 'dynamic, or 'controls'.
+    parent: An edge to the parent, which is always a Section  instance.
+    namespaces: A sequence of InnerNamespace children.
+    entries: A sequence of Entry/Clone children.
+    merged_entries: A sequence of MergedEntry virtual nodes from entries
+  """
+  def __init__(self, name, parent):
+    self._name = name
+    self._parent = parent
+    self._namespaces = []
+    self._entries = []
+
+    self._leafs = []
+
+  @property
+  def namespaces(self):
+    return self._namespaces
+
+  @property
+  def entries(self):
+    return self._entries
+
+  @property
+  def merged_entries(self):
+    for i in self.entries:
+      yield i.merge()
+
+  def sort_children(self):
+    self._namespaces.sort(key=self._get_name())
+    self._entries.sort(key=self._get_name())
+
+  def _get_children(self):
+    for i in self.namespaces:
+      yield i
+    for i in self.entries:
+      yield i
+
+  def combine_children_by_name(self):
+    r"""
+    Combine multiple children with the same name into a single node.
+
+    Returns:
+      A new Kind where all of the children with the same name were combined.
+
+      For example:
+
+      Given a Kind k:
+
+              k
+            / | \
+            a b c
+            | | |
+            d e f
+
+      a.name == "foo"
+      b.name == "foo"
+      c.name == "bar"
+
+      The returned Kind will look like this:
+
+             k'
+            /  \
+            a' c'
+          / |  |
+          d e  f
+
+    Remarks:
+      This operation is not recursive. To combine the grandchildren and other
+      ancestors, call this method on the ancestor nodes.
+    """
+    return Kind._combine_children_by_name(self, new_type=type(self))
+
+  # new_type is either Kind or InnerNamespace
+  @staticmethod
+  def _combine_children_by_name(self, new_type):
+    new_ins_dict = OrderedDict()
+    new_ent_dict = OrderedDict()
+
+    for ins in self.namespaces:
+      new_ins = new_ins_dict.setdefault(ins.name,
+                                        InnerNamespace(ins.name, parent=self))
+      new_ins._namespaces.extend(ins.namespaces)
+      new_ins._entries.extend(ins.entries)
+
+    for ent in self.entries:
+      new_ent = new_ent_dict.setdefault(ent.name,
+                                        ent.merge())
+
+    kind = new_type(self.name, self.parent)
+    kind._namespaces = new_ins_dict.values()
+    kind._entries = new_ent_dict.values()
+
+    return kind
+
+class InnerNamespace(Node):
+  """
+  A node corresponding to a <namespace> which is an ancestor of a Kind.
+  These namespaces may have other namespaces recursively, or entries as leafs.
+
+  Attributes (Read-Only):
+    name: Name attribute from the element, e.g. <namespace name="foo"> -> 'foo'
+    parent: An edge to the parent, which is an InnerNamespace or a Kind.
+    namespaces: A sequence of InnerNamespace children.
+    entries: A sequence of Entry/Clone children.
+    merged_entries: A sequence of MergedEntry virtual nodes from entries
+  """
+  def __init__(self, name, parent):
+    self._name        = name
+    self._parent      = parent
+    self._namespaces  = []
+    self._entries     = []
+    self._leafs       = []
+
+  @property
+  def namespaces(self):
+    return self._namespaces
+
+  @property
+  def entries(self):
+    return self._entries
+
+  @property
+  def merged_entries(self):
+    for i in self.entries:
+      yield i.merge()
+
+  def sort_children(self):
+    self._namespaces.sort(key=self._get_name())
+    self._entries.sort(key=self._get_name())
+
+  def _get_children(self):
+    for i in self.namespaces:
+      yield i
+    for i in self.entries:
+      yield i
+
+  def combine_children_by_name(self):
+    r"""
+    Combine multiple children with the same name into a single node.
+
+    Returns:
+      A new InnerNamespace where all of the children with the same name were
+      combined.
+
+      For example:
+
+      Given an InnerNamespace i:
+
+              i
+            / | \
+            a b c
+            | | |
+            d e f
+
+      a.name == "foo"
+      b.name == "foo"
+      c.name == "bar"
+
+      The returned InnerNamespace will look like this:
+
+             i'
+            /  \
+            a' c'
+          / |  |
+          d e  f
+
+    Remarks:
+      This operation is not recursive. To combine the grandchildren and other
+      ancestors, call this method on the ancestor nodes.
+    """
+    return Kind._combine_children_by_name(self, new_type=type(self))
+
+class EnumValue(Node):
+  """
+  A class corresponding to a <value> element within an <enum> within an <entry>.
+
+  Attributes (Read-Only):
+    name: A string,                 e.g. 'ON' or 'OFF'
+    id: An optional numeric string, e.g. '0' or '0xFF'
+    deprecated: A boolean, True if the enum should be deprecated.
+    optional: A boolean
+    hidden: A boolean, True if the enum should be hidden.
+    notes: A string describing the notes, or None.
+    parent: An edge to the parent, always an Enum instance.
+  """
+  def __init__(self, name, parent, id=None, deprecated=False, optional=False, hidden=False, notes=None):
+    self._name = name                    # str, e.g. 'ON' or 'OFF'
+    self._id = id                        # int, e.g. '0'
+    self._deprecated = deprecated        # bool
+    self._optional = optional            # bool
+    self._hidden = hidden                # bool
+    self._notes = notes                  # None or str
+    self._parent = parent
+
+  @property
+  def id(self):
+    return self._id
+
+  @property
+  def deprecated(self):
+    return self._deprecated
+
+  @property
+  def optional(self):
+    return self._optional
+
+  @property
+  def hidden(self):
+    return self._hidden
+
+  @property
+  def notes(self):
+    return self._notes
+
+  def _get_children(self):
+    return None
+
+class Enum(Node):
+  """
+  A class corresponding to an <enum> element within an <entry>.
+
+  Attributes (Read-Only):
+    parent: An edge to the parent, always an Entry instance.
+    values: A sequence of EnumValue children.
+    has_values_with_id: A boolean representing if any of the children have a
+        non-empty id property.
+  """
+  def __init__(self, parent, values, ids={}, deprecateds=[], optionals=[], hiddens=[], notes={}):
+    self._values =                                                             \
+      [ EnumValue(val, self, ids.get(val), val in deprecateds, val in optionals, val in hiddens,  \
+                  notes.get(val))                                              \
+        for val in values ]
+
+    self._parent = parent
+    self._name = None
+
+  @property
+  def values(self):
+    return (i for i in self._values)
+
+  @property
+  def has_values_with_id(self):
+    return bool(any(i for i in self.values if i.id))
+
+  def _get_children(self):
+    return (i for i in self._values)
+
+class Entry(Node):
+  """
+  A node corresponding to an <entry> element.
+
+  Attributes (Read-Only):
+    parent: An edge to the parent node, which is an InnerNamespace or Kind.
+    name: The fully qualified name string, e.g. 'android.shading.mode'
+    name_short: The name attribute from <entry name="mode">, e.g. mode
+    type: The type attribute from <entry type="bar">
+    kind: A string ('static', 'dynamic', 'controls') corresponding to the
+          ancestor Kind#name
+    container: The container attribute from <entry container="array">, or None.
+    container_sizes: A sequence of size strings or None if container is None.
+    enum: An Enum instance if the enum attribute is true, None otherwise.
+    visibility: The visibility of this entry ('system', 'hidden', 'public')
+                across the system. System entries are only visible in native code
+                headers. Hidden entries are marked @hide in managed code, while
+                public entries are visible in the Android SDK.
+    applied_visibility: As visibility, but always valid, defaulting to 'system'
+                        if no visibility is given for an entry.
+    synthetic: The C-level visibility of this entry ('false', 'true').
+               Synthetic entries will not be generated into the native metadata
+               list of entries (in C code). In general a synthetic entry is
+               glued together at the Java layer from multiple visibiltity=hidden
+               entries.
+    hwlevel: The lowest hardware level at which the entry is guaranteed
+             to be supported by the camera device. All devices with higher
+             hwlevels will also include this entry. None means that the
+             entry is optional on any hardware level.
+    deprecated: Marks an entry as @Deprecated in the Java layer; if within an
+               unreleased version this needs to be removed altogether. If applied
+               to an entry from an older release, then this means the entry
+               should be ignored by newer code.
+    optional: a bool representing the optional attribute, which denotes the entry
+              is required for hardware level full devices, but optional for other
+              hardware levels.  None if not present.
+    applied_optional: As optional but always valid, defaulting to False if no
+                      optional attribute is present.
+    tuple_values: A sequence of strings describing the tuple values,
+                  None if container is not 'tuple'.
+    description: A string description, or None.
+    range: A string range, or None.
+    units: A string units, or None.
+    tags: A sequence of Tag nodes associated with this Entry.
+    type_notes: A string describing notes for the type, or None.
+    typedef: A Typedef associated with this Entry, or None.
+
+  Remarks:
+    Subclass Clone can be used interchangeable with an Entry,
+    for when we don't care about the underlying type.
+
+    parent and tags edges are invalid until after Metadata#construct_graph
+    has been invoked.
+  """
+  def __init__(self, **kwargs):
+    """
+    Instantiate a new Entry node.
+
+    Args:
+      name: A string with the fully qualified name, e.g. 'android.shading.mode'
+      type: A string describing the type, e.g. 'int32'
+      kind: A string describing the kind, e.g. 'static'
+
+    Args (if container):
+      container: A string describing the container, e.g. 'array' or 'tuple'
+      container_sizes: A list of string sizes if a container, or None otherwise
+
+    Args (if container is 'tuple'):
+      tuple_values: A list of tuple values, e.g. ['width', 'height']
+
+    Args (if the 'enum' attribute is true):
+      enum: A boolean, True if this is an enum, False otherwise
+      enum_values: A list of value strings, e.g. ['ON', 'OFF']
+      enum_optionals: A list of optional enum values, e.g. ['OFF']
+      enum_notes: A dictionary of value->notes strings.
+      enum_ids: A dictionary of value->id strings.
+
+    Args (optional):
+      description: A string with a description of the entry.
+      range: A string with the range of the values of the entry, e.g. '>= 0'
+      units: A string with the units of the values, e.g. 'inches'
+      details: A string with the detailed documentation for the entry
+      hal_details: A string with the HAL implementation details for the entry
+      tag_ids: A list of tag ID strings, e.g. ['BC', 'V1']
+      type_notes: A string with the notes for the type
+      visibility: A string describing the visibility, eg 'system', 'hidden',
+                  'public'
+      synthetic: A bool to mark whether this entry is visible only at the Java
+                 layer (True), or at both layers (False = default).
+      hwlevel: A string of the HW level (one of 'legacy', 'limited', 'full')
+      deprecated: A bool to mark whether this is @Deprecated at the Java layer
+                 (default = False).
+      optional: A bool to mark whether optional for non-full hardware devices
+      typedef: A string corresponding to a typedef's name attribute.
+    """
+
+    if kwargs.get('type') is None:
+      print >> sys.stderr, "ERROR: Missing type for entry '%s' kind  '%s'"     \
+      %(kwargs.get('name'), kwargs.get('kind'))
+
+    # Attributes are Read-Only, but edges may be mutated by
+    # Metadata, particularly during construct_graph
+
+    self._name = kwargs['name']
+    self._type = kwargs['type']
+    self._kind = kwargs['kind'] # static, dynamic, or controls
+
+    self._init_common(**kwargs)
+
+  @property
+  def type(self):
+    return self._type
+
+  @property
+  def kind(self):
+    return self._kind
+
+  @property
+  def visibility(self):
+    return self._visibility
+
+  @property
+  def applied_visibility(self):
+    return self._visibility or 'system'
+
+  @property
+  def synthetic(self):
+    return self._synthetic
+
+  @property
+  def hwlevel(self):
+    return self._hwlevel
+
+  @property
+  def deprecated(self):
+    return self._deprecated
+
+  # TODO: optional should just return hwlevel is None
+  @property
+  def optional(self):
+    return self._optional
+
+  @property
+  def applied_optional(self):
+    return self._optional or False
+
+  @property
+  def name_short(self):
+    return self.get_name_minimal()
+
+  @property
+  def container(self):
+    return self._container
+
+  @property
+  def container_sizes(self):
+    if self._container_sizes is None:
+      return None
+    else:
+      return (i for i in self._container_sizes)
+
+  @property
+  def tuple_values(self):
+    if self._tuple_values is None:
+      return None
+    else:
+      return (i for i in self._tuple_values)
+
+  @property
+  def description(self):
+    return self._description
+
+  @property
+  def range(self):
+    return self._range
+
+  @property
+  def units(self):
+    return self._units
+
+  @property
+  def details(self):
+    return self._details
+
+  @property
+  def hal_details(self):
+    return self._hal_details
+
+  @property
+  def tags(self):
+    if self._tags is None:
+      return None
+    else:
+      return (i for i in self._tags)
+
+  @property
+  def type_notes(self):
+    return self._type_notes
+
+  @property
+  def typedef(self):
+    return self._typedef
+
+  @property
+  def enum(self):
+    return self._enum
+
+  def _get_children(self):
+    if self.enum:
+      yield self.enum
+
+  def sort_children(self):
+    return None
+
+  def is_clone(self):
+    """
+    Whether or not this is a Clone instance.
+
+    Returns:
+      False
+    """
+    return False
+
+  def _init_common(self, **kwargs):
+
+    self._parent = None # filled in by Metadata::_construct_entries
+
+    self._container = kwargs.get('container')
+    self._container_sizes = kwargs.get('container_sizes')
+
+    # access these via the 'enum' prop
+    enum_values = kwargs.get('enum_values')
+    enum_deprecateds = kwargs.get('enum_deprecateds')
+    enum_optionals = kwargs.get('enum_optionals')
+    enum_hiddens = kwargs.get('enum_hiddens')
+    enum_notes = kwargs.get('enum_notes')  # { value => notes }
+    enum_ids = kwargs.get('enum_ids')  # { value => notes }
+    self._tuple_values = kwargs.get('tuple_values')
+
+    self._description = kwargs.get('description')
+    self._range = kwargs.get('range')
+    self._units = kwargs.get('units')
+    self._details = kwargs.get('details')
+    self._hal_details = kwargs.get('hal_details')
+
+    self._tag_ids = kwargs.get('tag_ids', [])
+    self._tags = None  # Filled in by Metadata::_construct_tags
+
+    self._type_notes = kwargs.get('type_notes')
+    self._type_name = kwargs.get('type_name')
+    self._typedef = None # Filled in by Metadata::_construct_types
+
+    if kwargs.get('enum', False):
+      self._enum = Enum(self, enum_values, enum_ids, enum_deprecateds, enum_optionals,
+                        enum_hiddens, enum_notes)
+    else:
+      self._enum = None
+
+    self._visibility = kwargs.get('visibility')
+    self._synthetic = kwargs.get('synthetic', False)
+    self._hwlevel = kwargs.get('hwlevel')
+    self._deprecated = kwargs.get('deprecated', False)
+    self._optional = kwargs.get('optional')
+
+    self._property_keys = kwargs
+
+  def merge(self):
+    """
+    Copy the attributes into a new entry, merging it with the target entry
+    if it's a clone.
+    """
+    return MergedEntry(self)
+
+  # Helpers for accessing less than the fully qualified name
+
+  def get_name_as_list(self):
+    """
+    Returns the name as a list split by a period.
+
+    For example:
+      entry.name is 'android.lens.info.shading'
+      entry.get_name_as_list() == ['android', 'lens', 'info', 'shading']
+    """
+    return self.name.split(".")
+
+  def get_inner_namespace_list(self):
+    """
+    Returns the inner namespace part of the name as a list
+
+    For example:
+      entry.name is 'android.lens.info.shading'
+      entry.get_inner_namespace_list() == ['info']
+    """
+    return self.get_name_as_list()[2:-1]
+
+  def get_outer_namespace(self):
+    """
+    Returns the outer namespace as a string.
+
+    For example:
+      entry.name is 'android.lens.info.shading'
+      entry.get_outer_namespace() == 'android'
+
+    Remarks:
+      Since outer namespaces are non-recursive,
+      and each entry has one, this does not need to be a list.
+    """
+    return self.get_name_as_list()[0]
+
+  def get_section(self):
+    """
+    Returns the section as a string.
+
+    For example:
+      entry.name is 'android.lens.info.shading'
+      entry.get_section() == ''
+
+    Remarks:
+      Since outer namespaces are non-recursive,
+      and each entry has one, this does not need to be a list.
+    """
+    return self.get_name_as_list()[1]
+
+  def get_name_minimal(self):
+    """
+    Returns only the last component of the fully qualified name as a string.
+
+    For example:
+      entry.name is 'android.lens.info.shading'
+      entry.get_name_minimal() == 'shading'
+
+    Remarks:
+      entry.name_short it an alias for this
+    """
+    return self.get_name_as_list()[-1]
+
+  def get_path_without_name(self):
+    """
+    Returns a string path to the entry, with the name component excluded.
+
+    For example:
+      entry.name is 'android.lens.info.shading'
+      entry.get_path_without_name() == 'android.lens.info'
+    """
+    return ".".join(self.get_name_as_list()[0:-1])
+
+
+class Clone(Entry):
+  """
+  A Node corresponding to a <clone> element. It has all the attributes of an
+  <entry> element (Entry) plus the additions specified below.
+
+  Attributes (Read-Only):
+    entry: an edge to an Entry object that this targets
+    target_kind: A string describing the kind of the target entry.
+    name: a string of the name, same as entry.name
+    kind: a string of the Kind ancestor, one of 'static', 'controls', 'dynamic'
+          for the <clone> element.
+    type: always None, since a clone cannot override the type.
+  """
+  def __init__(self, entry=None, **kwargs):
+    """
+    Instantiate a new Clone node.
+
+    Args:
+      name: A string with the fully qualified name, e.g. 'android.shading.mode'
+      type: A string describing the type, e.g. 'int32'
+      kind: A string describing the kind, e.g. 'static'
+      target_kind: A string for the kind of the target entry, e.g. 'dynamic'
+
+    Args (if container):
+      container: A string describing the container, e.g. 'array' or 'tuple'
+      container_sizes: A list of string sizes if a container, or None otherwise
+
+    Args (if container is 'tuple'):
+      tuple_values: A list of tuple values, e.g. ['width', 'height']
+
+    Args (if the 'enum' attribute is true):
+      enum: A boolean, True if this is an enum, False otherwise
+      enum_values: A list of value strings, e.g. ['ON', 'OFF']
+      enum_optionals: A list of optional enum values, e.g. ['OFF']
+      enum_notes: A dictionary of value->notes strings.
+      enum_ids: A dictionary of value->id strings.
+
+    Args (optional):
+      entry: An edge to the corresponding target Entry.
+      description: A string with a description of the entry.
+      range: A string with the range of the values of the entry, e.g. '>= 0'
+      units: A string with the units of the values, e.g. 'inches'
+      details: A string with the detailed documentation for the entry
+      hal_details: A string with the HAL implementation details for the entry
+      tag_ids: A list of tag ID strings, e.g. ['BC', 'V1']
+      type_notes: A string with the notes for the type
+
+    Remarks:
+      Note that type is not specified since it has to be the same as the
+      entry.type.
+    """
+    self._entry = entry  # Entry object
+    self._target_kind = kwargs['target_kind']
+    self._name = kwargs['name']  # same as entry.name
+    self._kind = kwargs['kind']
+
+    # illegal to override the type, it should be the same as the entry
+    self._type = None
+    # the rest of the kwargs are optional
+    # can be used to override the regular entry data
+    self._init_common(**kwargs)
+
+  @property
+  def entry(self):
+    return self._entry
+
+  @property
+  def target_kind(self):
+    return self._target_kind
+
+  def is_clone(self):
+    """
+    Whether or not this is a Clone instance.
+
+    Returns:
+      True
+    """
+    return True
+
+class MergedEntry(Entry):
+  """
+  A MergedEntry has all the attributes of a Clone and its target Entry merged
+  together.
+
+  Remarks:
+    Useful when we want to 'unfold' a clone into a real entry by copying out
+    the target entry data. In this case we don't care about distinguishing
+    a clone vs an entry.
+  """
+  def __init__(self, entry):
+    """
+    Create a new instance of MergedEntry.
+
+    Args:
+      entry: An Entry or Clone instance
+    """
+    props_distinct = ['description', 'units', 'range', 'details',
+                      'hal_details', 'tags', 'kind']
+
+    for p in props_distinct:
+      p = '_' + p
+      if entry.is_clone():
+        setattr(self, p, getattr(entry, p) or getattr(entry.entry, p))
+      else:
+        setattr(self, p, getattr(entry, p))
+
+    props_common = ['parent', 'name', 'container',
+                    'container_sizes', 'enum',
+                    'tuple_values',
+                    'type',
+                    'type_notes',
+                    'visibility',
+                    'synthetic',
+                    'hwlevel',
+                    'deprecated',
+                    'optional',
+                    'typedef'
+                   ]
+
+    for p in props_common:
+      p = '_' + p
+      if entry.is_clone():
+        setattr(self, p, getattr(entry.entry, p))
+      else:
+        setattr(self, p, getattr(entry, p))
diff --git a/media/camera/docs/metadata_model_test.py b/media/camera/docs/metadata_model_test.py
new file mode 100644
index 0000000..eb79c9b
--- /dev/null
+++ b/media/camera/docs/metadata_model_test.py
@@ -0,0 +1,130 @@
+import unittest
+from unittest import TestCase
+from metadata_model import *
+
+class TestInnerNamespace(TestCase):
+  def test_combine_children_by_name(self):
+    #
+    # Set up
+    #
+    kind = Kind("some_root_kind", parent=None)
+    ins_outer = InnerNamespace("static", parent=kind)
+    kind._namespaces = [ins_outer]
+
+    ins1 = InnerNamespace("ins1", parent=ins_outer)
+    ins1a = InnerNamespace("ins1", parent=ins_outer)  # same name deliberately
+    entry1 = Entry(name="entry1", type="int32", kind="static",
+                   parent=ins1)
+    entry2 = Entry(name="entry2", type="int32", kind="static",
+                   parent=ins1a)
+    entry3 = Entry(name="entry3", type="int32", kind="static",
+                   parent=ins_outer)
+
+    ins_outer._namespaces = [ins1, ins1a]
+    ins_outer._entries = [entry3]
+
+    ins1._entries = [entry1]
+    ins1a._entries = [entry2]
+
+    #
+    # Test
+    #
+    combined_children_namespace = ins_outer.combine_children_by_name()
+
+    self.assertIsInstance(combined_children_namespace, InnerNamespace)
+    combined_ins = [i for i in combined_children_namespace.namespaces]
+    combined_ent = [i for i in combined_children_namespace.entries]
+
+    self.assertEquals(kind, combined_children_namespace.parent)
+    self.assertEquals(1, len(combined_ins))
+    self.assertEquals(1, len(combined_ent))
+
+    self.assertEquals("ins1", combined_ins[0].name)
+    self.assertEquals("entry3", combined_ent[0].name)
+
+    new_ins = combined_ins[0]
+    self.assertIn(entry1, new_ins.entries)
+    self.assertIn(entry2, new_ins.entries)
+
+
+class TestKind(TestCase):
+  def test_combine_kinds_into_single_node(self):
+    #
+    # Set up
+    #
+    section = Section("some_section", parent=None)
+    kind_static = Kind("static", parent=section)
+    kind_dynamic = Kind("dynamic", parent=section)
+    section._kinds = [kind_static, kind_dynamic]
+
+    ins1 = InnerNamespace("ins1", parent=kind_static)
+    ins2 = InnerNamespace("ins2", parent=kind_dynamic)
+    entry1 = Entry(name="entry1", type="int32", kind="static",
+                   parent=kind_static)
+    entry2 = Entry(name="entry2", type="int32", kind="static",
+                   parent=kind_dynamic)
+
+    kind_static._namespaces = [ins1]
+    kind_static._entries = [entry1]
+
+    kind_dynamic._namespaces = [ins2]
+    kind_dynamic._entries = [entry2]
+
+    #
+    # Test
+    #
+    combined_kind = section.combine_kinds_into_single_node()
+
+    self.assertEquals(section, combined_kind.parent)
+
+    self.assertIn(ins1, combined_kind.namespaces)
+    self.assertIn(ins2, combined_kind.namespaces)
+
+    self.assertIn(entry1, combined_kind.entries)
+    self.assertIn(entry2, combined_kind.entries)
+
+  def test_combine_children_by_name(self):
+    #
+    # Set up
+    #
+    section = Section("some_section", parent=None)
+    kind_static = Kind("static", parent=section)
+    section._kinds = [kind_static]
+
+    ins1 = InnerNamespace("ins1", parent=kind_static)
+    ins1a = InnerNamespace("ins1", parent=kind_static)  # same name deliberately
+    entry1 = Entry(name="entry1", type="int32", kind="static",
+                   parent=ins1)
+    entry2 = Entry(name="entry2", type="int32", kind="static",
+                   parent=ins1a)
+    entry3 = Entry(name="entry3", type="int32", kind="static",
+                   parent=kind_static)
+
+    kind_static._namespaces = [ins1, ins1a]
+    kind_static._entries = [entry3]
+
+    ins1._entries = [entry1]
+    ins1a._entries = [entry2]
+
+    #
+    # Test
+    #
+    combined_children_kind = kind_static.combine_children_by_name()
+
+    self.assertIsInstance(combined_children_kind, Kind)
+    combined_ins = [i for i in combined_children_kind.namespaces]
+    combined_ent = [i for i in combined_children_kind.entries]
+
+    self.assertEquals(section, combined_children_kind.parent)
+    self.assertEquals(1, len(combined_ins))
+    self.assertEquals(1, len(combined_ent))
+
+    self.assertEquals("ins1", combined_ins[0].name)
+    self.assertEquals("entry3", combined_ent[0].name)
+
+    new_ins = combined_ins[0]
+    self.assertIn(entry1, new_ins.entries)
+    self.assertIn(entry2, new_ins.entries)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/media/camera/docs/metadata_parser_xml.py b/media/camera/docs/metadata_parser_xml.py
new file mode 100755
index 0000000..57be227
--- /dev/null
+++ b/media/camera/docs/metadata_parser_xml.py
@@ -0,0 +1,336 @@
+#!/usr/bin/python
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+A parser for metadata_properties.xml can also render the resulting model
+over a Mako template.
+
+Usage:
+  metadata_parser_xml.py <filename.xml> <template.mako> [<output_file>]
+  - outputs the resulting template to output_file (stdout if none specified)
+
+Module:
+  The parser is also available as a module import (MetadataParserXml) to use
+  in other modules.
+
+Dependencies:
+  BeautifulSoup - an HTML/XML parser available to download from
+          http://www.crummy.com/software/BeautifulSoup/
+  Mako - a template engine for Python, available to download from
+     http://www.makotemplates.org/
+"""
+
+import sys
+import os
+import StringIO
+
+from bs4 import BeautifulSoup
+from bs4 import NavigableString
+
+from mako.template import Template
+from mako.lookup import TemplateLookup
+from mako.runtime import Context
+
+from metadata_model import *
+import metadata_model
+from metadata_validate import *
+import metadata_helpers
+
+class MetadataParserXml:
+  """
+  A class to parse any XML block that passes validation with metadata-validate.
+  It builds a metadata_model.Metadata graph and then renders it over a
+  Mako template.
+
+  Attributes (Read-Only):
+    soup: an instance of BeautifulSoup corresponding to the XML contents
+    metadata: a constructed instance of metadata_model.Metadata
+  """
+  def __init__(self, xml, file_name):
+    """
+    Construct a new MetadataParserXml, immediately try to parse it into a
+    metadata model.
+
+    Args:
+      xml: The XML block to use for the metadata
+      file_name: Source of the XML block, only for debugging/errors
+
+    Raises:
+      ValueError: if the XML block failed to pass metadata_validate.py
+    """
+    self._soup = validate_xml(xml)
+
+    if self._soup is None:
+      raise ValueError("%s has an invalid XML file" % (file_name))
+
+    self._metadata = Metadata()
+    self._parse()
+    self._metadata.construct_graph()
+
+  @staticmethod
+  def create_from_file(file_name):
+    """
+    Construct a new MetadataParserXml by loading and parsing an XML file.
+
+    Args:
+      file_name: Name of the XML file to load and parse.
+
+    Raises:
+      ValueError: if the XML file failed to pass metadata_validate.py
+
+    Returns:
+      MetadataParserXml instance representing the XML file.
+    """
+    return MetadataParserXml(file(file_name).read(), file_name)
+
+  @property
+  def soup(self):
+    return self._soup
+
+  @property
+  def metadata(self):
+    return self._metadata
+
+  @staticmethod
+  def _find_direct_strings(element):
+    if element.string is not None:
+      return [element.string]
+
+    return [i for i in element.contents if isinstance(i, NavigableString)]
+
+  @staticmethod
+  def _strings_no_nl(element):
+    return "".join([i.strip() for i in MetadataParserXml._find_direct_strings(element)])
+
+  def _parse(self):
+
+    tags = self.soup.tags
+    if tags is not None:
+      for tag in tags.find_all('tag'):
+        self.metadata.insert_tag(tag['id'], tag.string)
+
+    types = self.soup.types
+    if types is not None:
+      for tp in types.find_all('typedef'):
+        languages = {}
+        for lang in tp.find_all('language'):
+          languages[lang['name']] = lang.string
+
+        self.metadata.insert_type(tp['name'], 'typedef', languages=languages)
+
+    # add all entries, preserving the ordering of the XML file
+    # this is important for future ABI compatibility when generating code
+    entry_filter = lambda x: x.name == 'entry' or x.name == 'clone'
+    for entry in self.soup.find_all(entry_filter):
+      if entry.name == 'entry':
+        d = {
+              'name': fully_qualified_name(entry),
+              'type': entry['type'],
+              'kind': find_kind(entry),
+              'type_notes': entry.attrs.get('type_notes')
+            }
+
+        d2 = self._parse_entry(entry)
+        insert = self.metadata.insert_entry
+      else:
+        d = {
+           'name': entry['entry'],
+           'kind': find_kind(entry),
+           'target_kind': entry['kind'],
+          # no type since its the same
+          # no type_notes since its the same
+        }
+        d2 = {}
+
+        insert = self.metadata.insert_clone
+
+      d3 = self._parse_entry_optional(entry)
+
+      entry_dict = dict(d.items() + d2.items() + d3.items())
+      insert(entry_dict)
+
+    self.metadata.construct_graph()
+
+  def _parse_entry(self, entry):
+    d = {}
+
+    #
+    # Visibility
+    #
+    d['visibility'] = entry.get('visibility')
+
+    #
+    # Synthetic ?
+    #
+    d['synthetic'] = entry.get('synthetic') == 'true'
+
+    #
+    # Hardware Level (one of limited, legacy, full)
+    #
+    d['hwlevel'] = entry.get('hwlevel')
+
+    #
+    # Deprecated ?
+    #
+    d['deprecated'] = entry.get('deprecated') == 'true'
+
+    #
+    # Optional for non-full hardware level devices
+    #
+    d['optional'] = entry.get('optional') == 'true'
+
+    #
+    # Typedef
+    #
+    d['type_name'] = entry.get('typedef')
+
+    #
+    # Enum
+    #
+    if entry.get('enum', 'false') == 'true':
+
+      enum_values = []
+      enum_deprecateds = []
+      enum_optionals = []
+      enum_hiddens = []
+      enum_notes = {}
+      enum_ids = {}
+      for value in entry.enum.find_all('value'):
+
+        value_body = self._strings_no_nl(value)
+        enum_values.append(value_body)
+
+        if value.attrs.get('deprecated', 'false') == 'true':
+          enum_deprecateds.append(value_body)
+
+        if value.attrs.get('optional', 'false') == 'true':
+          enum_optionals.append(value_body)
+
+        if value.attrs.get('hidden', 'false') == 'true':
+          enum_hiddens.append(value_body)
+
+        notes = value.find('notes')
+        if notes is not None:
+          enum_notes[value_body] = notes.string
+
+        if value.attrs.get('id') is not None:
+          enum_ids[value_body] = value['id']
+
+      d['enum_values'] = enum_values
+      d['enum_deprecateds'] = enum_deprecateds
+      d['enum_optionals'] = enum_optionals
+      d['enum_hiddens'] = enum_hiddens
+      d['enum_notes'] = enum_notes
+      d['enum_ids'] = enum_ids
+      d['enum'] = True
+
+    #
+    # Container (Array/Tuple)
+    #
+    if entry.attrs.get('container') is not None:
+      container_name = entry['container']
+
+      array = entry.find('array')
+      if array is not None:
+        array_sizes = []
+        for size in array.find_all('size'):
+          array_sizes.append(size.string)
+        d['container_sizes'] = array_sizes
+
+      tupl = entry.find('tuple')
+      if tupl is not None:
+        tupl_values = []
+        for val in tupl.find_all('value'):
+          tupl_values.append(val.name)
+        d['tuple_values'] = tupl_values
+        d['container_sizes'] = len(tupl_values)
+
+      d['container'] = container_name
+
+    return d
+
+  def _parse_entry_optional(self, entry):
+    d = {}
+
+    optional_elements = ['description', 'range', 'units', 'details', 'hal_details']
+    for i in optional_elements:
+      prop = find_child_tag(entry, i)
+
+      if prop is not None:
+        d[i] = prop.string
+
+    tag_ids = []
+    for tag in entry.find_all('tag'):
+      tag_ids.append(tag['id'])
+
+    d['tag_ids'] = tag_ids
+
+    return d
+
+  def render(self, template, output_name=None):
+    """
+    Render the metadata model using a Mako template as the view.
+
+    The template gets the metadata as an argument, as well as all
+    public attributes from the metadata_helpers module.
+
+    The output file is encoded with UTF-8.
+
+    Args:
+      template: path to a Mako template file
+      output_name: path to the output file, or None to use stdout
+    """
+    buf = StringIO.StringIO()
+    metadata_helpers._context_buf = buf
+
+    helpers = [(i, getattr(metadata_helpers, i))
+                for i in dir(metadata_helpers) if not i.startswith('_')]
+    helpers = dict(helpers)
+
+    lookup = TemplateLookup(directories=[os.getcwd()])
+    tpl = Template(filename=template, lookup=lookup)
+
+    ctx = Context(buf, metadata=self.metadata, **helpers)
+    tpl.render_context(ctx)
+
+    tpl_data = buf.getvalue()
+    metadata_helpers._context_buf = None
+    buf.close()
+
+    if output_name is None:
+      print tpl_data
+    else:
+      file(output_name, "w").write(tpl_data.encode('utf-8'))
+
+#####################
+#####################
+
+if __name__ == "__main__":
+  if len(sys.argv) <= 2:
+    print >> sys.stderr,                                                       \
+           "Usage: %s <filename.xml> <template.mako> [<output_file>]"          \
+           % (sys.argv[0])
+    sys.exit(0)
+
+  file_name = sys.argv[1]
+  template_name = sys.argv[2]
+  output_name = sys.argv[3] if len(sys.argv) > 3 else None
+  parser = MetadataParserXml.create_from_file(file_name)
+  parser.render(template_name, output_name)
+
+  sys.exit(0)
diff --git a/media/camera/docs/metadata_properties.xml b/media/camera/docs/metadata_properties.xml
new file mode 100644
index 0000000..b10a793
--- /dev/null
+++ b/media/camera/docs/metadata_properties.xml
@@ -0,0 +1,8624 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2012 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<metadata xmlns="http://schemas.android.com/service/camera/metadata/"
+xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
+
+  <tags>
+    <tag id="BC">
+        Needed for backwards compatibility with old Java API
+    </tag>
+    <tag id="V1">
+        New features for first camera 2 release (API1)
+    </tag>
+    <tag id="RAW">
+        Needed for useful RAW image processing and DNG file support
+    </tag>
+    <tag id="HAL2">
+        Entry is only used by camera device HAL 2.x
+    </tag>
+    <tag id="FULL">
+        Entry is required for full hardware level devices, and optional for other hardware levels
+    </tag>
+    <tag id="DEPTH">
+        Entry is required for the depth capability.
+    </tag>
+    <tag id="REPROC">
+        Entry is required for the YUV or PRIVATE reprocessing capability.
+    </tag>
+    <tag id="FUTURE">
+        Entry is  under-specified and is not required for now. This is for book-keeping purpose,
+        do not implement or use it, it may be revised for future.
+    </tag>
+  </tags>
+
+  <types>
+    <typedef name="pairFloatFloat">
+      <language name="java">android.util.Pair&lt;Float,Float&gt;</language>
+    </typedef>
+    <typedef name="pairDoubleDouble">
+      <language name="java">android.util.Pair&lt;Double,Double&gt;</language>
+    </typedef>
+    <typedef name="rectangle">
+      <language name="java">android.graphics.Rect</language>
+    </typedef>
+    <typedef name="size">
+      <language name="java">android.util.Size</language>
+    </typedef>
+    <typedef name="string">
+      <language name="java">String</language>
+    </typedef>
+    <typedef name="boolean">
+      <language name="java">boolean</language>
+    </typedef>
+    <typedef name="imageFormat">
+      <language name="java">int</language>
+    </typedef>
+    <typedef name="streamConfigurationMap">
+      <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language>
+    </typedef>
+    <typedef name="streamConfiguration">
+      <language name="java">android.hardware.camera2.params.StreamConfiguration</language>
+    </typedef>
+    <typedef name="streamConfigurationDuration">
+      <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language>
+    </typedef>
+    <typedef name="face">
+      <language name="java">android.hardware.camera2.params.Face</language>
+    </typedef>
+    <typedef name="meteringRectangle">
+      <language name="java">android.hardware.camera2.params.MeteringRectangle</language>
+    </typedef>
+    <typedef name="rangeFloat">
+      <language name="java">android.util.Range&lt;Float&gt;</language>
+    </typedef>
+    <typedef name="rangeInt">
+      <language name="java">android.util.Range&lt;Integer&gt;</language>
+    </typedef>
+    <typedef name="rangeLong">
+      <language name="java">android.util.Range&lt;Long&gt;</language>
+    </typedef>
+    <typedef name="colorSpaceTransform">
+      <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language>
+    </typedef>
+    <typedef name="rggbChannelVector">
+      <language name="java">android.hardware.camera2.params.RggbChannelVector</language>
+    </typedef>
+    <typedef name="blackLevelPattern">
+      <language name="java">android.hardware.camera2.params.BlackLevelPattern</language>
+    </typedef>
+    <typedef name="enumList">
+      <language name="java">int</language>
+    </typedef>
+    <typedef name="sizeF">
+      <language name="java">android.util.SizeF</language>
+    </typedef>
+    <typedef name="point">
+      <language name="java">android.graphics.Point</language>
+    </typedef>
+    <typedef name="tonemapCurve">
+      <language name="java">android.hardware.camera2.params.TonemapCurve</language>
+    </typedef>
+    <typedef name="lensShadingMap">
+      <language name="java">android.hardware.camera2.params.LensShadingMap</language>
+    </typedef>
+    <typedef name="location">
+      <language name="java">android.location.Location</language>
+    </typedef>
+    <typedef name="highSpeedVideoConfiguration">
+      <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language>
+    </typedef>
+    <typedef name="reprocessFormatsMap">
+      <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
+    </typedef>
+  </types>
+
+  <namespace name="android">
+    <section name="colorCorrection">
+      <controls>
+        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+          <enum>
+            <value>TRANSFORM_MATRIX
+              <notes>Use the android.colorCorrection.transform matrix
+                and android.colorCorrection.gains to do color conversion.
+
+                All advanced white balance adjustments (not specified
+                by our white balance pipeline) must be disabled.
+
+                If AWB is enabled with `android.control.awbMode != OFF`, then
+                TRANSFORM_MATRIX is ignored. The camera device will override
+                this value to either FAST or HIGH_QUALITY.
+              </notes>
+            </value>
+            <value>FAST
+              <notes>Color correction processing must not slow down
+              capture rate relative to sensor raw output.
+
+              Advanced white balance adjustments above and beyond
+              the specified white balance pipeline may be applied.
+
+              If AWB is enabled with `android.control.awbMode != OFF`, then
+              the camera device uses the last frame's AWB values
+              (or defaults if AWB has never been run).
+            </notes>
+            </value>
+            <value>HIGH_QUALITY
+              <notes>Color correction processing operates at improved
+              quality but the capture rate might be reduced (relative to sensor
+              raw output rate)
+
+              Advanced white balance adjustments above and beyond
+              the specified white balance pipeline may be applied.
+
+              If AWB is enabled with `android.control.awbMode != OFF`, then
+              the camera device uses the last frame's AWB values
+              (or defaults if AWB has never been run).
+            </notes>
+            </value>
+          </enum>
+
+          <description>
+          The mode control selects how the image data is converted from the
+          sensor's native color into linear sRGB color.
+          </description>
+          <details>
+          When auto-white balance (AWB) is enabled with android.control.awbMode, this
+          control is overridden by the AWB routine. When AWB is disabled, the
+          application controls how the color mapping is performed.
+
+          We define the expected processing pipeline below. For consistency
+          across devices, this is always the case with TRANSFORM_MATRIX.
+
+          When either FULL or HIGH_QUALITY is used, the camera device may
+          do additional processing but android.colorCorrection.gains and
+          android.colorCorrection.transform will still be provided by the
+          camera device (in the results) and be roughly correct.
+
+          Switching to TRANSFORM_MATRIX and using the data provided from
+          FAST or HIGH_QUALITY will yield a picture with the same white point
+          as what was produced by the camera device in the earlier frame.
+
+          The expected processing pipeline is as follows:
+
+          ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png)
+
+          The white balance is encoded by two values, a 4-channel white-balance
+          gain vector (applied in the Bayer domain), and a 3x3 color transform
+          matrix (applied after demosaic).
+
+          The 4-channel white-balance gains are defined as:
+
+              android.colorCorrection.gains = [ R G_even G_odd B ]
+
+          where `G_even` is the gain for green pixels on even rows of the
+          output, and `G_odd` is the gain for green pixels on the odd rows.
+          These may be identical for a given camera device implementation; if
+          the camera device does not support a separate gain for even/odd green
+          channels, it will use the `G_even` value, and write `G_odd` equal to
+          `G_even` in the output result metadata.
+
+          The matrices for color transforms are defined as a 9-entry vector:
+
+              android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
+
+          which define a transform from input sensor colors, `P_in = [ r g b ]`,
+          to output linear sRGB, `P_out = [ r' g' b' ]`,
+
+          with colors as follows:
+
+              r' = I0r + I1g + I2b
+              g' = I3r + I4g + I5b
+              b' = I6r + I7g + I8b
+
+          Both the input and output value ranges must match. Overflow/underflow
+          values are clipped to fit within the range.
+          </details>
+          <hal_details>
+          HAL must support both FAST and HIGH_QUALITY if color correction control is available
+          on the camera device, but the underlying implementation can be the same for both modes.
+          That is, if the highest quality implementation on the camera device does not slow down
+          capture rate, then FAST and HIGH_QUALITY should generate the same output.
+          </hal_details>
+        </entry>
+        <entry name="transform" type="rational" visibility="public"
+               type_notes="3x3 rational matrix in row-major order"
+               container="array" typedef="colorSpaceTransform" hwlevel="full">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>A color transform matrix to use to transform
+          from sensor RGB color space to output linear sRGB color space.
+          </description>
+          <units>Unitless scale factors</units>
+          <details>This matrix is either set by the camera device when the request
+          android.colorCorrection.mode is not TRANSFORM_MATRIX, or
+          directly by the application in the request when the
+          android.colorCorrection.mode is TRANSFORM_MATRIX.
+
+          In the latter case, the camera device may round the matrix to account
+          for precision issues; the final rounded matrix should be reported back
+          in this matrix result metadata. The transform should keep the magnitude
+          of the output color values within `[0, 1.0]` (assuming input color
+          values is within the normalized range `[0, 1.0]`), or clipping may occur.
+
+          The valid range of each matrix element varies on different devices, but
+          values within [-1.5, 3.0] are guaranteed not to be clipped.
+          </details>
+        </entry>
+        <entry name="gains" type="float" visibility="public"
+               type_notes="A 1D array of floats for 4 color channel gains"
+               container="array" typedef="rggbChannelVector" hwlevel="full">
+          <array>
+            <size>4</size>
+          </array>
+          <description>Gains applying to Bayer raw color channels for
+          white-balance.</description>
+          <units>Unitless gain factors</units>
+          <details>
+          These per-channel gains are either set by the camera device
+          when the request android.colorCorrection.mode is not
+          TRANSFORM_MATRIX, or directly by the application in the
+          request when the android.colorCorrection.mode is
+          TRANSFORM_MATRIX.
+
+          The gains in the result metadata are the gains actually
+          applied by the camera device to the current frame.
+
+          The valid range of gains varies on different devices, but gains
+          between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
+          device allows gains below 1.0, this is usually not recommended because
+          this can create color artifacts.
+          </details>
+          <hal_details>
+          The 4-channel white-balance gains are defined in
+          the order of `[R G_even G_odd B]`, where `G_even` is the gain
+          for green pixels on even rows of the output, and `G_odd`
+          is the gain for green pixels on the odd rows.
+
+          If a HAL does not support a separate gain for even/odd green
+          channels, it must use the `G_even` value, and write
+          `G_odd` equal to `G_even` in the output result metadata.
+          </hal_details>
+        </entry>
+        <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
+          <enum>
+            <value>OFF
+              <notes>
+                No aberration correction is applied.
+              </notes>
+            </value>
+            <value>FAST
+              <notes>
+                Aberration correction will not slow down capture rate
+                relative to sensor raw output.
+            </notes>
+            </value>
+            <value>HIGH_QUALITY
+              <notes>
+                Aberration correction operates at improved quality but the capture rate might be
+                reduced (relative to sensor raw output rate)
+            </notes>
+            </value>
+          </enum>
+          <description>
+            Mode of operation for the chromatic aberration correction algorithm.
+          </description>
+          <range>android.colorCorrection.availableAberrationModes</range>
+          <details>
+            Chromatic (color) aberration is caused by the fact that different wavelengths of light
+            can not focus on the same point after exiting from the lens. This metadata defines
+            the high level control of chromatic aberration correction algorithm, which aims to
+            minimize the chromatic artifacts that may occur along the object boundaries in an
+            image.
+
+            FAST/HIGH_QUALITY both mean that camera device determined aberration
+            correction will be applied. HIGH_QUALITY mode indicates that the camera device will
+            use the highest-quality aberration correction algorithms, even if it slows down
+            capture rate. FAST means the camera device will not slow down capture rate when
+            applying aberration correction.
+
+            LEGACY devices will always be in FAST mode.
+          </details>
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.colorCorrection.mode" kind="controls">
+        </clone>
+        <clone entry="android.colorCorrection.transform" kind="controls">
+        </clone>
+        <clone entry="android.colorCorrection.gains" kind="controls">
+        </clone>
+        <clone entry="android.colorCorrection.aberrationMode" kind="controls">
+        </clone>
+      </dynamic>
+      <static>
+        <entry name="availableAberrationModes" type="byte" visibility="public"
+        type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+            List of aberration correction modes for android.colorCorrection.aberrationMode that are
+            supported by this camera device.
+          </description>
+          <range>Any value listed in android.colorCorrection.aberrationMode</range>
+          <details>
+            This key lists the valid modes for android.colorCorrection.aberrationMode.  If no
+            aberration correction modes are available for a device, this list will solely include
+            OFF mode. All camera devices will support either OFF or FAST mode.
+
+            Camera devices that support the MANUAL_POST_PROCESSING capability will always list
+            OFF mode. This includes all FULL level devices.
+
+            LEGACY devices will always only support FAST mode.
+          </details>
+          <hal_details>
+            HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available
+            on the camera device, but the underlying implementation can be the same for both modes.
+            That is, if the highest quality implementation on the camera device does not slow down
+            capture rate, then FAST and HIGH_QUALITY will generate the same output.
+          </hal_details>
+          <tag id="V1" />
+        </entry>
+      </static>
+    </section>
+    <section name="control">
+      <controls>
+        <entry name="aeAntibandingMode" type="byte" visibility="public"
+               enum="true" hwlevel="legacy">
+          <enum>
+            <value>OFF
+              <notes>
+                The camera device will not adjust exposure duration to
+                avoid banding problems.
+              </notes>
+            </value>
+            <value>50HZ
+              <notes>
+                The camera device will adjust exposure duration to
+                avoid banding problems with 50Hz illumination sources.
+              </notes>
+            </value>
+            <value>60HZ
+              <notes>
+                The camera device will adjust exposure duration to
+                avoid banding problems with 60Hz illumination
+                sources.
+              </notes>
+            </value>
+            <value>AUTO
+              <notes>
+                The camera device will automatically adapt its
+                antibanding routine to the current illumination
+                condition. This is the default mode if AUTO is
+                available on given camera device.
+              </notes>
+            </value>
+          </enum>
+          <description>
+            The desired setting for the camera device's auto-exposure
+            algorithm's antibanding compensation.
+          </description>
+          <range>
+            android.control.aeAvailableAntibandingModes
+          </range>
+          <details>
+            Some kinds of lighting fixtures, such as some fluorescent
+            lights, flicker at the rate of the power supply frequency
+            (60Hz or 50Hz, depending on country). While this is
+            typically not noticeable to a person, it can be visible to
+            a camera device. If a camera sets its exposure time to the
+            wrong value, the flicker may become visible in the
+            viewfinder as flicker or in a final captured image, as a
+            set of variable-brightness bands across the image.
+
+            Therefore, the auto-exposure routines of camera devices
+            include antibanding routines that ensure that the chosen
+            exposure value will not cause such banding. The choice of
+            exposure time depends on the rate of flicker, which the
+            camera device can detect automatically, or the expected
+            rate can be selected by the application using this
+            control.
+
+            A given camera device may not support all of the possible
+            options for the antibanding mode. The
+            android.control.aeAvailableAntibandingModes key contains
+            the available modes for a given camera device.
+
+            AUTO mode is the default if it is available on given
+            camera device. When AUTO mode is not available, the
+            default will be either 50HZ or 60HZ, and both 50HZ
+            and 60HZ will be available.
+
+            If manual exposure control is enabled (by setting
+            android.control.aeMode or android.control.mode to OFF),
+            then this setting has no effect, and the application must
+            ensure it selects exposure times that do not cause banding
+            issues. The android.statistics.sceneFlicker key can assist
+            the application in this.
+          </details>
+          <hal_details>
+            For all capture request templates, this field must be set
+            to AUTO if AUTO mode is available. If AUTO is not available,
+            the default must be either 50HZ or 60HZ, and both 50HZ and
+            60HZ must be available.
+
+            If manual exposure control is enabled (by setting
+            android.control.aeMode or android.control.mode to OFF),
+            then the exposure values provided by the application must not be
+            adjusted for antibanding.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy">
+          <description>Adjustment to auto-exposure (AE) target image
+          brightness.</description>
+          <units>Compensation steps</units>
+          <range>android.control.aeCompensationRange</range>
+          <details>
+          The adjustment is measured as a count of steps, with the
+          step size defined by android.control.aeCompensationStep and the
+          allowed range by android.control.aeCompensationRange.
+
+          For example, if the exposure value (EV) step is 0.333, '6'
+          will mean an exposure compensation of +2 EV; -3 will mean an
+          exposure compensation of -1 EV. One EV represents a doubling
+          of image brightness. Note that this control will only be
+          effective if android.control.aeMode `!=` OFF. This control
+          will take effect even when android.control.aeLock `== true`.
+
+          In the event of exposure compensation value being changed, camera device
+          may take several frames to reach the newly requested exposure target.
+          During that time, android.control.aeState field will be in the SEARCHING
+          state. Once the new exposure target is reached, android.control.aeState will
+          change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
+          FLASH_REQUIRED (if the scene is too dark for still capture).
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeLock" type="byte" visibility="public" enum="true"
+               typedef="boolean" hwlevel="legacy">
+          <enum>
+            <value>OFF
+            <notes>Auto-exposure lock is disabled; the AE algorithm
+            is free to update its parameters.</notes></value>
+            <value>ON
+            <notes>Auto-exposure lock is enabled; the AE algorithm
+            must not update the exposure and sensitivity parameters
+            while the lock is active.
+
+            android.control.aeExposureCompensation setting changes
+            will still take effect while auto-exposure is locked.
+
+            Some rare LEGACY devices may not support
+            this, in which case the value will always be overridden to OFF.
+            </notes></value>
+          </enum>
+          <description>Whether auto-exposure (AE) is currently locked to its latest
+          calculated values.</description>
+          <details>
+          When set to `true` (ON), the AE algorithm is locked to its latest parameters,
+          and will not change exposure settings until the lock is set to `false` (OFF).
+
+          Note that even when AE is locked, the flash may be fired if
+          the android.control.aeMode is ON_AUTO_FLASH /
+          ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
+
+          When android.control.aeExposureCompensation is changed, even if the AE lock
+          is ON, the camera device will still adjust its exposure value.
+
+          If AE precapture is triggered (see android.control.aePrecaptureTrigger)
+          when AE is already locked, the camera device will not change the exposure time
+          (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity)
+          parameters. The flash may be fired if the android.control.aeMode
+          is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
+          android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed.
+          Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
+
+          When an AE precapture sequence is triggered, AE unlock will not be able to unlock
+          the AE if AE is locked by the camera device internally during precapture metering
+          sequence In other words, submitting requests with AE unlock has no effect for an
+          ongoing precapture metering sequence. Otherwise, the precapture metering sequence
+          will never succeed in a sequence of preview requests where AE lock is always set
+          to `false`.
+
+          Since the camera device has a pipeline of in-flight requests, the settings that
+          get locked do not necessarily correspond to the settings that were present in the
+          latest capture result received from the camera device, since additional captures
+          and AE updates may have occurred even before the result was sent out. If an
+          application is switching between automatic and manual control and wishes to eliminate
+          any flicker during the switch, the following procedure is recommended:
+
+            1. Starting in auto-AE mode:
+            2. Lock AE
+            3. Wait for the first result to be output that has the AE locked
+            4. Copy exposure settings from that result into a request, set the request to manual AE
+            5. Submit the capture request, proceed to run manual AE as desired.
+
+          See android.control.aeState for AE lock related state transition details.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
+          <enum>
+            <value>OFF
+              <notes>
+                The camera device's autoexposure routine is disabled.
+
+                The application-selected android.sensor.exposureTime,
+                android.sensor.sensitivity and
+                android.sensor.frameDuration are used by the camera
+                device, along with android.flash.* fields, if there's
+                a flash unit for this camera device.
+
+                Note that auto-white balance (AWB) and auto-focus (AF)
+                behavior is device dependent when AE is in OFF mode.
+                To have consistent behavior across different devices,
+                it is recommended to either set AWB and AF to OFF mode
+                or lock AWB and AF before setting AE to OFF.
+                See android.control.awbMode, android.control.afMode,
+                android.control.awbLock, and android.control.afTrigger
+                for more details.
+
+                LEGACY devices do not support the OFF mode and will
+                override attempts to use this value to ON.
+              </notes>
+            </value>
+            <value>ON
+              <notes>
+                The camera device's autoexposure routine is active,
+                with no flash control.
+
+                The application's values for
+                android.sensor.exposureTime,
+                android.sensor.sensitivity, and
+                android.sensor.frameDuration are ignored. The
+                application has control over the various
+                android.flash.* fields.
+              </notes>
+            </value>
+            <value>ON_AUTO_FLASH
+              <notes>
+                Like ON, except that the camera device also controls
+                the camera's flash unit, firing it in low-light
+                conditions.
+
+                The flash may be fired during a precapture sequence
+                (triggered by android.control.aePrecaptureTrigger) and
+                may be fired for captures for which the
+                android.control.captureIntent field is set to
+                STILL_CAPTURE
+              </notes>
+            </value>
+            <value>ON_ALWAYS_FLASH
+              <notes>
+                Like ON, except that the camera device also controls
+                the camera's flash unit, always firing it for still
+                captures.
+
+                The flash may be fired during a precapture sequence
+                (triggered by android.control.aePrecaptureTrigger) and
+                will always be fired for captures for which the
+                android.control.captureIntent field is set to
+                STILL_CAPTURE
+              </notes>
+            </value>
+            <value>ON_AUTO_FLASH_REDEYE
+              <notes>
+                Like ON_AUTO_FLASH, but with automatic red eye
+                reduction.
+
+                If deemed necessary by the camera device, a red eye
+                reduction flash will fire during the precapture
+                sequence.
+              </notes>
+            </value>
+          </enum>
+          <description>The desired mode for the camera device's
+          auto-exposure routine.</description>
+          <range>android.control.aeAvailableModes</range>
+          <details>
+            This control is only effective if android.control.mode is
+            AUTO.
+
+            When set to any of the ON modes, the camera device's
+            auto-exposure routine is enabled, overriding the
+            application's selected exposure time, sensor sensitivity,
+            and frame duration (android.sensor.exposureTime,
+            android.sensor.sensitivity, and
+            android.sensor.frameDuration). If one of the FLASH modes
+            is selected, the camera device's flash unit controls are
+            also overridden.
+
+            The FLASH modes are only available if the camera device
+            has a flash unit (android.flash.info.available is `true`).
+
+            If flash TORCH mode is desired, this field must be set to
+            ON or OFF, and android.flash.mode set to TORCH.
+
+            When set to any of the ON modes, the values chosen by the
+            camera device auto-exposure routine for the overridden
+            fields for a given capture will be available in its
+            CaptureResult.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeRegions" type="int32" visibility="public"
+            optional="true" container="array" typedef="meteringRectangle">
+          <array>
+            <size>5</size>
+            <size>area_count</size>
+          </array>
+          <description>List of metering areas to use for auto-exposure adjustment.</description>
+          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
+          <range>Coordinates must be between `[(0,0), (width, height))` of
+          android.sensor.info.activeArraySize</range>
+          <details>
+              Not available if android.control.maxRegionsAe is 0.
+              Otherwise will always be present.
+
+              The maximum number of regions supported by the device is determined by the value
+              of android.control.maxRegionsAe.
+
+              The coordinate system is based on the active pixel array,
+              with (0,0) being the top-left pixel in the active pixel array, and
+              (android.sensor.info.activeArraySize.width - 1,
+              android.sensor.info.activeArraySize.height - 1) being the
+              bottom-right pixel in the active pixel array.
+
+              The weight must be within `[0, 1000]`, and represents a weight
+              for every pixel in the area. This means that a large metering area
+              with the same weight as a smaller area will have more effect in
+              the metering result. Metering areas can partially overlap and the
+              camera device will add the weights in the overlap region.
+
+              The weights are relative to weights of other exposure metering regions, so if only one
+              region is used, all non-zero weights will have the same effect. A region with 0
+              weight is ignored.
+
+              If all regions have 0 weight, then no specific metering area needs to be used by the
+              camera device.
+
+              If the metering region is outside the used android.scaler.cropRegion returned in
+              capture result metadata, the camera device will ignore the sections outside the crop
+              region and output only the intersection rectangle as the metering region in the result
+              metadata.  If the region is entirely outside the crop region, it will be ignored and
+              not reported in the result metadata.
+          </details>
+          <hal_details>
+              The HAL level representation of MeteringRectangle[] is a
+              int[5 * area_count].
+              Every five elements represent a metering region of
+              (xmin, ymin, xmax, ymax, weight).
+              The rectangle is defined to be inclusive on xmin and ymin, but
+              exclusive on xmax and ymax.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeTargetFpsRange" type="int32" visibility="public"
+               container="array" typedef="rangeInt" hwlevel="legacy">
+          <array>
+            <size>2</size>
+          </array>
+          <description>Range over which the auto-exposure routine can
+          adjust the capture frame rate to maintain good
+          exposure.</description>
+          <units>Frames per second (FPS)</units>
+          <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range>
+          <details>Only constrains auto-exposure (AE) algorithm, not
+          manual control of android.sensor.exposureTime and
+          android.sensor.frameDuration.</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aePrecaptureTrigger" type="byte" visibility="public"
+               enum="true" hwlevel="limited">
+          <enum>
+            <value>IDLE
+              <notes>The trigger is idle.</notes>
+            </value>
+            <value>START
+              <notes>The precapture metering sequence will be started
+              by the camera device.
+
+              The exact effect of the precapture trigger depends on
+              the current AE mode and state.</notes>
+            </value>
+            <value>CANCEL
+              <notes>The camera device will cancel any currently active or completed
+              precapture metering sequence, the auto-exposure routine will return to its
+              initial state.</notes>
+            </value>
+          </enum>
+          <description>Whether the camera device will trigger a precapture
+          metering sequence when it processes this request.</description>
+          <details>This entry is normally set to IDLE, or is not
+          included at all in the request settings. When included and
+          set to START, the camera device will trigger the auto-exposure (AE)
+          precapture metering sequence.
+
+          When set to CANCEL, the camera device will cancel any active
+          precapture metering trigger, and return to its initial AE state.
+          If a precapture metering sequence is already completed, and the camera
+          device has implicitly locked the AE for subsequent still capture, the
+          CANCEL trigger will unlock the AE and return to its initial AE state.
+
+          The precapture sequence should be triggered before starting a
+          high-quality still capture for final metering decisions to
+          be made, and for firing pre-capture flash pulses to estimate
+          scene brightness and required final capture flash power, when
+          the flash is enabled.
+
+          Normally, this entry should be set to START for only a
+          single request, and the application should wait until the
+          sequence completes before starting a new one.
+
+          When a precapture metering sequence is finished, the camera device
+          may lock the auto-exposure routine internally to be able to accurately expose the
+          subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`).
+          For this case, the AE may not resume normal scan if no subsequent still capture is
+          submitted. To ensure that the AE routine restarts normal scan, the application should
+          submit a request with `android.control.aeLock == true`, followed by a request
+          with `android.control.aeLock == false`, if the application decides not to submit a
+          still capture request after the precapture sequence completes. Alternatively, for
+          API level 23 or newer devices, the CANCEL can be used to unlock the camera device
+          internally locked AE if the application doesn't submit a still capture request after
+          the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
+          be used in devices that have earlier API levels.
+
+          The exact effect of auto-exposure (AE) precapture trigger
+          depends on the current AE mode and state; see
+          android.control.aeState for AE precapture state transition
+          details.
+
+          On LEGACY-level devices, the precapture trigger is not supported;
+          capturing a high-resolution JPEG image will automatically trigger a
+          precapture sequence before the high-resolution capture, including
+          potentially firing a pre-capture flash.
+
+          Using the precapture trigger and the auto-focus trigger android.control.afTrigger
+          simultaneously is allowed. However, since these triggers often require cooperation between
+          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+          focus sweep), the camera device may delay acting on a later trigger until the previous
+          trigger has been fully handled. This may lead to longer intervals between the trigger and
+          changes to android.control.aeState indicating the start of the precapture sequence, for
+          example.
+
+          If both the precapture and the auto-focus trigger are activated on the same request, then
+          the camera device will complete them in the optimal order for that device.
+          </details>
+          <hal_details>
+          The HAL must support triggering the AE precapture trigger while an AF trigger is active
+          (and vice versa), or at the same time as the AF trigger.  It is acceptable for the HAL to
+          treat these as two consecutive triggers, for example handling the AF trigger and then the
+          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
+          to minimize the latency for converging both focus and exposure/flash usage.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="afMode" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>OFF
+            <notes>The auto-focus routine does not control the lens;
+            android.lens.focusDistance is controlled by the
+            application.</notes></value>
+            <value>AUTO
+            <notes>Basic automatic focus mode.
+
+            In this mode, the lens does not move unless
+            the autofocus trigger action is called. When that trigger
+            is activated, AF will transition to ACTIVE_SCAN, then to
+            the outcome of the scan (FOCUSED or NOT_FOCUSED).
+
+            Always supported if lens is not fixed focus.
+
+            Use android.lens.info.minimumFocusDistance to determine if lens
+            is fixed-focus.
+
+            Triggering AF_CANCEL resets the lens position to default,
+            and sets the AF state to INACTIVE.</notes></value>
+            <value>MACRO
+            <notes>Close-up focusing mode.
+
+            In this mode, the lens does not move unless the
+            autofocus trigger action is called. When that trigger is
+            activated, AF will transition to ACTIVE_SCAN, then to
+            the outcome of the scan (FOCUSED or NOT_FOCUSED). This
+            mode is optimized for focusing on objects very close to
+            the camera.
+
+            When that trigger is activated, AF will transition to
+            ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
+            NOT_FOCUSED). Triggering cancel AF resets the lens
+            position to default, and sets the AF state to
+            INACTIVE.</notes></value>
+            <value>CONTINUOUS_VIDEO
+            <notes>In this mode, the AF algorithm modifies the lens
+            position continually to attempt to provide a
+            constantly-in-focus image stream.
+
+            The focusing behavior should be suitable for good quality
+            video recording; typically this means slower focus
+            movement and no overshoots. When the AF trigger is not
+            involved, the AF algorithm should start in INACTIVE state,
+            and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
+            states as appropriate. When the AF trigger is activated,
+            the algorithm should immediately transition into
+            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+            lens position until a cancel AF trigger is received.
+
+            Once cancel is received, the algorithm should transition
+            back to INACTIVE and resume passive scan. Note that this
+            behavior is not identical to CONTINUOUS_PICTURE, since an
+            ongoing PASSIVE_SCAN must immediately be
+            canceled.</notes></value>
+            <value>CONTINUOUS_PICTURE
+            <notes>In this mode, the AF algorithm modifies the lens
+            position continually to attempt to provide a
+            constantly-in-focus image stream.
+
+            The focusing behavior should be suitable for still image
+            capture; typically this means focusing as fast as
+            possible. When the AF trigger is not involved, the AF
+            algorithm should start in INACTIVE state, and then
+            transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
+            appropriate as it attempts to maintain focus. When the AF
+            trigger is activated, the algorithm should finish its
+            PASSIVE_SCAN if active, and then transition into
+            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+            lens position until a cancel AF trigger is received.
+
+            When the AF cancel trigger is activated, the algorithm
+            should transition back to INACTIVE and then act as if it
+            has just been started.</notes></value>
+            <value>EDOF
+            <notes>Extended depth of field (digital focus) mode.
+
+            The camera device will produce images with an extended
+            depth of field automatically; no special focusing
+            operations need to be done before taking a picture.
+
+            AF triggers are ignored, and the AF state will always be
+            INACTIVE.</notes></value>
+          </enum>
+          <description>Whether auto-focus (AF) is currently enabled, and what
+          mode it is set to.</description>
+          <range>android.control.afAvailableModes</range>
+          <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus
+          (i.e. `android.lens.info.minimumFocusDistance &gt; 0`). Also note that
+          when android.control.aeMode is OFF, the behavior of AF is device
+          dependent. It is recommended to lock AF by using android.control.afTrigger before
+          setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.
+
+          If the lens is controlled by the camera device auto-focus algorithm,
+          the camera device will report the current AF status in android.control.afState
+          in result metadata.</details>
+          <hal_details>
+          When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a
+          request (android.control.afTrigger `==` START). After an AF trigger, the afState will end
+          up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see
+          android.control.afState for detailed state transitions), which indicates that the lens is
+          locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move
+          after the lens is locked, the HAL must compensate this movement appropriately such that
+          the same focal plane remains in focus.
+
+          When afMode is one of the continuous auto focus modes, the HAL is free to start a AF
+          scan whenever it's not locked. When the lens is locked after an AF trigger
+          (see android.control.afState for detailed state transitions), the HAL should maintain the
+          same lock behavior as above.
+
+          When afMode is OFF, the application controls focus manually. The accuracy of the
+          focus distance control depends on the android.lens.info.focusDistanceCalibration.
+          However, the lens must not move regardless of the camera movement for any focus distance
+          manual control.
+
+          To put this in concrete terms, if the camera has lens elements which may move based on
+          camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to
+          remain in a fixed position invariant to the camera's orientation or motion, for example,
+          by using accelerometer measurements in the lens control logic. This is a typical issue
+          that will arise on camera modules with open-loop VCMs.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="afRegions" type="int32" visibility="public"
+               optional="true" container="array" typedef="meteringRectangle">
+          <array>
+            <size>5</size>
+            <size>area_count</size>
+          </array>
+          <description>List of metering areas to use for auto-focus.</description>
+          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
+          <range>Coordinates must be between `[(0,0), (width, height))` of
+          android.sensor.info.activeArraySize</range>
+          <details>
+              Not available if android.control.maxRegionsAf is 0.
+              Otherwise will always be present.
+
+              The maximum number of focus areas supported by the device is determined by the value
+              of android.control.maxRegionsAf.
+
+              The coordinate system is based on the active pixel array,
+              with (0,0) being the top-left pixel in the active pixel array, and
+              (android.sensor.info.activeArraySize.width - 1,
+              android.sensor.info.activeArraySize.height - 1) being the
+              bottom-right pixel in the active pixel array.
+
+              The weight must be within `[0, 1000]`, and represents a weight
+              for every pixel in the area. This means that a large metering area
+              with the same weight as a smaller area will have more effect in
+              the metering result. Metering areas can partially overlap and the
+              camera device will add the weights in the overlap region.
+
+              The weights are relative to weights of other metering regions, so if only one region
+              is used, all non-zero weights will have the same effect. A region with 0 weight is
+              ignored.
+
+              If all regions have 0 weight, then no specific metering area needs to be used by the
+              camera device.
+
+              If the metering region is outside the used android.scaler.cropRegion returned in
+              capture result metadata, the camera device will ignore the sections outside the crop
+              region and output only the intersection rectangle as the metering region in the result
+              metadata. If the region is entirely outside the crop region, it will be ignored and
+              not reported in the result metadata.
+          </details>
+          <hal_details>
+              The HAL level representation of MeteringRectangle[] is a
+              int[5 * area_count].
+              Every five elements represent a metering region of
+              (xmin, ymin, xmax, ymax, weight).
+              The rectangle is defined to be inclusive on xmin and ymin, but
+              exclusive on xmax and ymax.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="afTrigger" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>IDLE
+              <notes>The trigger is idle.</notes>
+            </value>
+            <value>START
+              <notes>Autofocus will trigger now.</notes>
+            </value>
+            <value>CANCEL
+              <notes>Autofocus will return to its initial
+              state, and cancel any currently active trigger.</notes>
+            </value>
+          </enum>
+          <description>
+          Whether the camera device will trigger autofocus for this request.
+          </description>
+          <details>This entry is normally set to IDLE, or is not
+          included at all in the request settings.
+
+          When included and set to START, the camera device will trigger the
+          autofocus algorithm. If autofocus is disabled, this trigger has no effect.
+
+          When set to CANCEL, the camera device will cancel any active trigger,
+          and return to its initial AF state.
+
+          Generally, applications should set this entry to START or CANCEL for only a
+          single capture, and then return it to IDLE (or not set at all). Specifying
+          START for multiple captures in a row means restarting the AF operation over
+          and over again.
+
+          See android.control.afState for what the trigger means for each AF mode.
+
+          Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger
+          simultaneously is allowed. However, since these triggers often require cooperation between
+          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+          focus sweep), the camera device may delay acting on a later trigger until the previous
+          trigger has been fully handled. This may lead to longer intervals between the trigger and
+          changes to android.control.afState, for example.
+          </details>
+          <hal_details>
+          The HAL must support triggering the AF trigger while an AE precapture trigger is active
+          (and vice versa), or at the same time as the AE trigger.  It is acceptable for the HAL to
+          treat these as two consecutive triggers, for example handling the AF trigger and then the
+          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
+          to minimize the latency for converging both focus and exposure/flash usage.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="awbLock" type="byte" visibility="public" enum="true"
+               typedef="boolean" hwlevel="legacy">
+          <enum>
+            <value>OFF
+            <notes>Auto-white balance lock is disabled; the AWB
+            algorithm is free to update its parameters if in AUTO
+            mode.</notes></value>
+            <value>ON
+            <notes>Auto-white balance lock is enabled; the AWB
+            algorithm will not update its parameters while the lock
+            is active.</notes></value>
+          </enum>
+          <description>Whether auto-white balance (AWB) is currently locked to its
+          latest calculated values.</description>
+          <details>
+          When set to `true` (ON), the AWB algorithm is locked to its latest parameters,
+          and will not change color balance settings until the lock is set to `false` (OFF).
+
+          Since the camera device has a pipeline of in-flight requests, the settings that
+          get locked do not necessarily correspond to the settings that were present in the
+          latest capture result received from the camera device, since additional captures
+          and AWB updates may have occurred even before the result was sent out. If an
+          application is switching between automatic and manual control and wishes to eliminate
+          any flicker during the switch, the following procedure is recommended:
+
+            1. Starting in auto-AWB mode:
+            2. Lock AWB
+            3. Wait for the first result to be output that has the AWB locked
+            4. Copy AWB settings from that result into a request, set the request to manual AWB
+            5. Submit the capture request, proceed to run manual AWB as desired.
+
+          Note that AWB lock is only meaningful when
+          android.control.awbMode is in the AUTO mode; in other modes,
+          AWB is already fixed to a specific setting.
+
+          Some LEGACY devices may not support ON; the value is then overridden to OFF.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="awbMode" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>OFF
+            <notes>
+            The camera device's auto-white balance routine is disabled.
+
+            The application-selected color transform matrix
+            (android.colorCorrection.transform) and gains
+            (android.colorCorrection.gains) are used by the camera
+            device for manual white balance control.
+            </notes>
+            </value>
+            <value>AUTO
+            <notes>
+            The camera device's auto-white balance routine is active.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+            <value>INCANDESCENT
+            <notes>
+            The camera device's auto-white balance routine is disabled;
+            the camera device uses incandescent light as the assumed scene
+            illumination for white balance.
+
+            While the exact white balance transforms are up to the
+            camera device, they will approximately match the CIE
+            standard illuminant A.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+            <value>FLUORESCENT
+            <notes>
+            The camera device's auto-white balance routine is disabled;
+            the camera device uses fluorescent light as the assumed scene
+            illumination for white balance.
+
+            While the exact white balance transforms are up to the
+            camera device, they will approximately match the CIE
+            standard illuminant F2.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+            <value>WARM_FLUORESCENT
+            <notes>
+            The camera device's auto-white balance routine is disabled;
+            the camera device uses warm fluorescent light as the assumed scene
+            illumination for white balance.
+
+            While the exact white balance transforms are up to the
+            camera device, they will approximately match the CIE
+            standard illuminant F4.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+            <value>DAYLIGHT
+            <notes>
+            The camera device's auto-white balance routine is disabled;
+            the camera device uses daylight light as the assumed scene
+            illumination for white balance.
+
+            While the exact white balance transforms are up to the
+            camera device, they will approximately match the CIE
+            standard illuminant D65.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+            <value>CLOUDY_DAYLIGHT
+            <notes>
+            The camera device's auto-white balance routine is disabled;
+            the camera device uses cloudy daylight light as the assumed scene
+            illumination for white balance.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+            <value>TWILIGHT
+            <notes>
+            The camera device's auto-white balance routine is disabled;
+            the camera device uses twilight light as the assumed scene
+            illumination for white balance.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+            <value>SHADE
+            <notes>
+            The camera device's auto-white balance routine is disabled;
+            the camera device uses shade light as the assumed scene
+            illumination for white balance.
+
+            The application's values for android.colorCorrection.transform
+            and android.colorCorrection.gains are ignored.
+            For devices that support the MANUAL_POST_PROCESSING capability, the
+            values used by the camera device for the transform and gains
+            will be available in the capture result for this request.
+            </notes>
+            </value>
+          </enum>
+          <description>Whether auto-white balance (AWB) is currently setting the color
+          transform fields, and what its illumination target
+          is.</description>
+          <range>android.control.awbAvailableModes</range>
+          <details>
+          This control is only effective if android.control.mode is AUTO.
+
+          When set to the ON mode, the camera device's auto-white balance
+          routine is enabled, overriding the application's selected
+          android.colorCorrection.transform, android.colorCorrection.gains and
+          android.colorCorrection.mode. Note that when android.control.aeMode
+          is OFF, the behavior of AWB is device dependent. It is recommened to
+          also set AWB mode to OFF or lock AWB by using android.control.awbLock before
+          setting AE mode to OFF.
+
+          When set to the OFF mode, the camera device's auto-white balance
+          routine is disabled. The application manually controls the white
+          balance by android.colorCorrection.transform, android.colorCorrection.gains
+          and android.colorCorrection.mode.
+
+          When set to any other modes, the camera device's auto-white
+          balance routine is disabled. The camera device uses each
+          particular illumination target for white balance
+          adjustment. The application's values for
+          android.colorCorrection.transform,
+          android.colorCorrection.gains and
+          android.colorCorrection.mode are ignored.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="awbRegions" type="int32" visibility="public"
+               optional="true" container="array" typedef="meteringRectangle">
+          <array>
+            <size>5</size>
+            <size>area_count</size>
+          </array>
+          <description>List of metering areas to use for auto-white-balance illuminant
+          estimation.</description>
+          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
+          <range>Coordinates must be between `[(0,0), (width, height))` of
+          android.sensor.info.activeArraySize</range>
+          <details>
+              Not available if android.control.maxRegionsAwb is 0.
+              Otherwise will always be present.
+
+              The maximum number of regions supported by the device is determined by the value
+              of android.control.maxRegionsAwb.
+
+              The coordinate system is based on the active pixel array,
+              with (0,0) being the top-left pixel in the active pixel array, and
+              (android.sensor.info.activeArraySize.width - 1,
+              android.sensor.info.activeArraySize.height - 1) being the
+              bottom-right pixel in the active pixel array.
+
+              The weight must range from 0 to 1000, and represents a weight
+              for every pixel in the area. This means that a large metering area
+              with the same weight as a smaller area will have more effect in
+              the metering result. Metering areas can partially overlap and the
+              camera device will add the weights in the overlap region.
+
+              The weights are relative to weights of other white balance metering regions, so if
+              only one region is used, all non-zero weights will have the same effect. A region with
+              0 weight is ignored.
+
+              If all regions have 0 weight, then no specific metering area needs to be used by the
+              camera device.
+
+              If the metering region is outside the used android.scaler.cropRegion returned in
+              capture result metadata, the camera device will ignore the sections outside the crop
+              region and output only the intersection rectangle as the metering region in the result
+              metadata.  If the region is entirely outside the crop region, it will be ignored and
+              not reported in the result metadata.
+          </details>
+          <hal_details>
+              The HAL level representation of MeteringRectangle[] is a
+              int[5 * area_count].
+              Every five elements represent a metering region of
+              (xmin, ymin, xmax, ymax, weight).
+              The rectangle is defined to be inclusive on xmin and ymin, but
+              exclusive on xmax and ymax.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="captureIntent" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>CUSTOM
+            <notes>The goal of this request doesn't fall into the other
+            categories. The camera device will default to preview-like
+            behavior.</notes></value>
+            <value>PREVIEW
+            <notes>This request is for a preview-like use case.
+
+            The precapture trigger may be used to start off a metering
+            w/flash sequence.
+            </notes></value>
+            <value>STILL_CAPTURE
+            <notes>This request is for a still capture-type
+            use case.
+
+            If the flash unit is under automatic control, it may fire as needed.
+            </notes></value>
+            <value>VIDEO_RECORD
+            <notes>This request is for a video recording
+            use case.</notes></value>
+            <value>VIDEO_SNAPSHOT
+            <notes>This request is for a video snapshot (still
+            image while recording video) use case.
+
+            The camera device should take the highest-quality image
+            possible (given the other settings) without disrupting the
+            frame rate of video recording.  </notes></value>
+            <value>ZERO_SHUTTER_LAG
+            <notes>This request is for a ZSL usecase; the
+            application will stream full-resolution images and
+            reprocess one or several later for a final
+            capture.
+            </notes></value>
+            <value>MANUAL
+            <notes>This request is for manual capture use case where
+            the applications want to directly control the capture parameters.
+
+            For example, the application may wish to manually control
+            android.sensor.exposureTime, android.sensor.sensitivity, etc.
+            </notes></value>
+          </enum>
+          <description>Information to the camera device 3A (auto-exposure,
+          auto-focus, auto-white balance) routines about the purpose
+          of this capture, to help the camera device to decide optimal 3A
+          strategy.</description>
+          <details>This control (except for MANUAL) is only effective if
+          `android.control.mode != OFF` and any 3A routine is active.
+
+          ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
+          contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
+          android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
+          always supported.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="effectMode" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>OFF
+              <notes>
+              No color effect will be applied.
+              </notes>
+            </value>
+            <value optional="true">MONO
+              <notes>
+              A "monocolor" effect where the image is mapped into
+              a single color.
+
+              This will typically be grayscale.
+              </notes>
+            </value>
+            <value optional="true">NEGATIVE
+              <notes>
+              A "photo-negative" effect where the image's colors
+              are inverted.
+              </notes>
+            </value>
+            <value optional="true">SOLARIZE
+              <notes>
+              A "solarisation" effect (Sabattier effect) where the
+              image is wholly or partially reversed in
+              tone.
+              </notes>
+            </value>
+            <value optional="true">SEPIA
+              <notes>
+              A "sepia" effect where the image is mapped into warm
+              gray, red, and brown tones.
+              </notes>
+            </value>
+            <value optional="true">POSTERIZE
+              <notes>
+              A "posterization" effect where the image uses
+              discrete regions of tone rather than a continuous
+              gradient of tones.
+              </notes>
+            </value>
+            <value optional="true">WHITEBOARD
+              <notes>
+              A "whiteboard" effect where the image is typically displayed
+              as regions of white, with black or grey details.
+              </notes>
+            </value>
+            <value optional="true">BLACKBOARD
+              <notes>
+              A "blackboard" effect where the image is typically displayed
+              as regions of black, with white or grey details.
+              </notes>
+            </value>
+            <value optional="true">AQUA
+              <notes>
+              An "aqua" effect where a blue hue is added to the image.
+              </notes>
+            </value>
+          </enum>
+          <description>A special color effect to apply.</description>
+          <range>android.control.availableEffects</range>
+          <details>
+          When this mode is set, a color effect will be applied
+          to images produced by the camera device. The interpretation
+          and implementation of these color effects is left to the
+          implementor of the camera device, and should not be
+          depended on to be consistent (or present) across all
+          devices.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="mode" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>OFF
+            <notes>Full application control of pipeline.
+
+            All control by the device's metering and focusing (3A)
+            routines is disabled, and no other settings in
+            android.control.* have any effect, except that
+            android.control.captureIntent may be used by the camera
+            device to select post-processing values for processing
+            blocks that do not allow for manual control, or are not
+            exposed by the camera API.
+
+            However, the camera device's 3A routines may continue to
+            collect statistics and update their internal state so that
+            when control is switched to AUTO mode, good control values
+            can be immediately applied.
+            </notes></value>
+            <value>AUTO
+            <notes>Use settings for each individual 3A routine.
+
+            Manual control of capture parameters is disabled. All
+            controls in android.control.* besides sceneMode take
+            effect.</notes></value>
+            <value optional="true">USE_SCENE_MODE
+            <notes>Use a specific scene mode.
+
+            Enabling this disables control.aeMode, control.awbMode and
+            control.afMode controls; the camera device will ignore
+            those settings while USE_SCENE_MODE is active (except for
+            FACE_PRIORITY scene mode). Other control entries are still active.
+            This setting can only be used if scene mode is supported (i.e.
+            android.control.availableSceneModes
+            contain some modes other than DISABLED).</notes></value>
+            <value optional="true">OFF_KEEP_STATE
+            <notes>Same as OFF mode, except that this capture will not be
+            used by camera device background auto-exposure, auto-white balance and
+            auto-focus algorithms (3A) to update their statistics.
+
+            Specifically, the 3A routines are locked to the last
+            values set from a request with AUTO, OFF, or
+            USE_SCENE_MODE, and any statistics or state updates
+            collected from manual captures with OFF_KEEP_STATE will be
+            discarded by the camera device.
+            </notes></value>
+          </enum>
+          <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
+          routines.</description>
+          <range>android.control.availableModes</range>
+          <details>
+          This is a top-level 3A control switch. When set to OFF, all 3A control
+          by the camera device is disabled. The application must set the fields for
+          capture parameters itself.
+
+          When set to AUTO, the individual algorithm controls in
+          android.control.* are in effect, such as android.control.afMode.
+
+          When set to USE_SCENE_MODE, the individual controls in
+          android.control.* are mostly disabled, and the camera device implements
+          one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
+          as it wishes. The camera device scene mode 3A settings are provided by
+          {@link android.hardware.camera2.CaptureResult capture results}.
+
+          When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
+          is that this frame will not be used by camera device background 3A statistics
+          update, as if this frame is never captured. This mode can be used in the scenario
+          where the application doesn't want a 3A manual control capture to affect
+          the subsequent auto 3A capture results.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="sceneMode" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value id="0">DISABLED
+              <notes>
+              Indicates that no scene modes are set for a given capture request.
+              </notes>
+            </value>
+            <value>FACE_PRIORITY
+              <notes>If face detection support exists, use face
+              detection data for auto-focus, auto-white balance, and
+              auto-exposure routines.
+
+              If face detection statistics are disabled
+              (i.e. android.statistics.faceDetectMode is set to OFF),
+              this should still operate correctly (but will not return
+              face detection statistics to the framework).
+
+              Unlike the other scene modes, android.control.aeMode,
+              android.control.awbMode, and android.control.afMode
+              remain active when FACE_PRIORITY is set.
+              </notes>
+            </value>
+            <value optional="true">ACTION
+              <notes>
+              Optimized for photos of quickly moving objects.
+
+              Similar to SPORTS.
+              </notes>
+            </value>
+            <value optional="true">PORTRAIT
+              <notes>
+              Optimized for still photos of people.
+              </notes>
+            </value>
+            <value optional="true">LANDSCAPE
+              <notes>
+              Optimized for photos of distant macroscopic objects.
+              </notes>
+            </value>
+            <value optional="true">NIGHT
+              <notes>
+              Optimized for low-light settings.
+              </notes>
+            </value>
+            <value optional="true">NIGHT_PORTRAIT
+              <notes>
+              Optimized for still photos of people in low-light
+              settings.
+              </notes>
+            </value>
+            <value optional="true">THEATRE
+              <notes>
+              Optimized for dim, indoor settings where flash must
+              remain off.
+              </notes>
+            </value>
+            <value optional="true">BEACH
+              <notes>
+              Optimized for bright, outdoor beach settings.
+              </notes>
+            </value>
+            <value optional="true">SNOW
+              <notes>
+              Optimized for bright, outdoor settings containing snow.
+              </notes>
+            </value>
+            <value optional="true">SUNSET
+              <notes>
+              Optimized for scenes of the setting sun.
+              </notes>
+            </value>
+            <value optional="true">STEADYPHOTO
+              <notes>
+              Optimized to avoid blurry photos due to small amounts of
+              device motion (for example: due to hand shake).
+              </notes>
+            </value>
+            <value optional="true">FIREWORKS
+              <notes>
+              Optimized for nighttime photos of fireworks.
+              </notes>
+            </value>
+            <value optional="true">SPORTS
+              <notes>
+              Optimized for photos of quickly moving people.
+
+              Similar to ACTION.
+              </notes>
+            </value>
+            <value optional="true">PARTY
+              <notes>
+              Optimized for dim, indoor settings with multiple moving
+              people.
+              </notes>
+            </value>
+            <value optional="true">CANDLELIGHT
+              <notes>
+              Optimized for dim settings where the main light source
+              is a flame.
+              </notes>
+            </value>
+            <value optional="true">BARCODE
+              <notes>
+              Optimized for accurately capturing a photo of barcode
+              for use by camera applications that wish to read the
+              barcode value.
+              </notes>
+            </value>
+            <value deprecated="true" optional="true">HIGH_SPEED_VIDEO
+              <notes>
+              This is deprecated, please use {@link
+              android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
+              and {@link
+              android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
+              for high speed video recording.
+
+              Optimized for high speed video recording (frame rate >=60fps) use case.
+
+              The supported high speed video sizes and fps ranges are specified in
+              android.control.availableHighSpeedVideoConfigurations. To get desired
+              output frame rates, the application is only allowed to select video size
+              and fps range combinations listed in this static metadata. The fps range
+              can be control via android.control.aeTargetFpsRange.
+
+              In this mode, the camera device will override aeMode, awbMode, and afMode to
+              ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
+              controls will be overridden to be FAST. Therefore, no manual control of capture
+              and post-processing parameters is possible. All other controls operate the
+              same as when android.control.mode == AUTO. This means that all other
+              android.control.* fields continue to work, such as
+
+              * android.control.aeTargetFpsRange
+              * android.control.aeExposureCompensation
+              * android.control.aeLock
+              * android.control.awbLock
+              * android.control.effectMode
+              * android.control.aeRegions
+              * android.control.afRegions
+              * android.control.awbRegions
+              * android.control.afTrigger
+              * android.control.aePrecaptureTrigger
+
+              Outside of android.control.*, the following controls will work:
+
+              * android.flash.mode (automatic flash for still capture will not work since aeMode is ON)
+              * android.lens.opticalStabilizationMode (if it is supported)
+              * android.scaler.cropRegion
+              * android.statistics.faceDetectMode
+
+              For high speed recording use case, the actual maximum supported frame rate may
+              be lower than what camera can output, depending on the destination Surfaces for
+              the image data. For example, if the destination surface is from video encoder,
+              the application need check if the video encoder is capable of supporting the
+              high frame rate for a given video size, or it will end up with lower recording
+              frame rate. If the destination surface is from preview window, the preview frame
+              rate will be bounded by the screen refresh rate.
+
+              The camera device will only support up to 2 output high speed streams
+              (processed non-stalling format defined in android.request.maxNumOutputStreams)
+              in this mode. This control will be effective only if all of below conditions are true:
+
+              * The application created no more than maxNumHighSpeedStreams processed non-stalling
+              format output streams, where maxNumHighSpeedStreams is calculated as
+              min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
+              * The stream sizes are selected from the sizes reported by
+              android.control.availableHighSpeedVideoConfigurations.
+              * No processed non-stalling or raw streams are configured.
+
+              When above conditions are NOT satistied, the controls of this mode and
+              android.control.aeTargetFpsRange will be ignored by the camera device,
+              the camera device will fall back to android.control.mode `==` AUTO,
+              and the returned capture result metadata will give the fps range choosen
+              by the camera device.
+
+              Switching into or out of this mode may trigger some camera ISP/sensor
+              reconfigurations, which may introduce extra latency. It is recommended that
+              the application avoids unnecessary scene mode switch as much as possible.
+              </notes>
+            </value>
+            <value optional="true">HDR
+              <notes>
+              Turn on a device-specific high dynamic range (HDR) mode.
+
+              In this scene mode, the camera device captures images
+              that keep a larger range of scene illumination levels
+              visible in the final image. For example, when taking a
+              picture of a object in front of a bright window, both
+              the object and the scene through the window may be
+              visible when using HDR mode, while in normal AUTO mode,
+              one or the other may be poorly exposed. As a tradeoff,
+              HDR mode generally takes much longer to capture a single
+              image, has no user control, and may have other artifacts
+              depending on the HDR method used.
+
+              Therefore, HDR captures operate at a much slower rate
+              than regular captures.
+
+              In this mode, on LIMITED or FULL devices, when a request
+              is made with a android.control.captureIntent of
+              STILL_CAPTURE, the camera device will capture an image
+              using a high dynamic range capture technique.  On LEGACY
+              devices, captures that target a JPEG-format output will
+              be captured with HDR, and the capture intent is not
+              relevant.
+
+              The HDR capture may involve the device capturing a burst
+              of images internally and combining them into one, or it
+              may involve the device using specialized high dynamic
+              range capture hardware. In all cases, a single image is
+              produced in response to a capture request submitted
+              while in HDR mode.
+
+              Since substantial post-processing is generally needed to
+              produce an HDR image, only YUV and JPEG outputs are
+              supported for LIMITED/FULL device HDR captures, and only
+              JPEG outputs are supported for LEGACY HDR
+              captures. Using a RAW output for HDR capture is not
+              supported.
+              </notes>
+            </value>
+            <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT
+              <notes>Same as FACE_PRIORITY scene mode, except that the camera
+              device will choose higher sensitivity values (android.sensor.sensitivity)
+              under low light conditions.
+
+              The camera device may be tuned to expose the images in a reduced
+              sensitivity range to produce the best quality images. For example,
+              if the android.sensor.info.sensitivityRange gives range of [100, 1600],
+              the camera device auto-exposure routine tuning process may limit the actual
+              exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
+              exessive in order to preserve the image quality. Under this situation, the image under
+              low light may be under-exposed when the sensor max exposure time (bounded by the
+              android.control.aeTargetFpsRange when android.control.aeMode is one of the
+              ON_* modes) and effective max sensitivity are reached. This scene mode allows the
+              camera device auto-exposure routine to increase the sensitivity up to the max
+              sensitivity specified by android.sensor.info.sensitivityRange when the scene is too
+              dark and the max exposure time is reached. The captured images may be noisier
+              compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
+              recommended that the application only use this scene mode when it is capable of
+              reducing the noise level of the captured images.
+
+              Unlike the other scene modes, android.control.aeMode,
+              android.control.awbMode, and android.control.afMode
+              remain active when FACE_PRIORITY_LOW_LIGHT is set.
+              </notes>
+            </value>
+          </enum>
+          <description>
+          Control for which scene mode is currently active.
+          </description>
+          <range>android.control.availableSceneModes</range>
+          <details>
+          Scene modes are custom camera modes optimized for a certain set of conditions and
+          capture settings.
+
+          This is the mode that that is active when
+          `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will
+          disable android.control.aeMode, android.control.awbMode, and android.control.afMode
+          while in use.
+
+          The interpretation and implementation of these scene modes is left
+          to the implementor of the camera device. Their behavior will not be
+          consistent across all devices, and any given device may only implement
+          a subset of these modes.
+          </details>
+          <hal_details>
+          HAL implementations that include scene modes are expected to provide
+          the per-scene settings to use for android.control.aeMode,
+          android.control.awbMode, and android.control.afMode in
+          android.control.sceneModeOverrides.
+
+          For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes,
+          the HAL must list supported video size and fps range in
+          android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.
+          1280x720, if the HAL has two different sensor configurations for normal streaming
+          mode and high speed streaming, when this scene mode is set/reset in a sequence of capture
+          requests, the HAL may have to switch between different sensor modes.
+          This mode is deprecated in HAL3.3, to support high speed video recording, please implement
+          android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
+          capbility defined in android.request.availableCapabilities.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="videoStabilizationMode" type="byte" visibility="public"
+               enum="true" hwlevel="legacy">
+          <enum>
+            <value>OFF
+            <notes>
+              Video stabilization is disabled.
+            </notes></value>
+            <value>ON
+            <notes>
+              Video stabilization is enabled.
+            </notes></value>
+          </enum>
+          <description>Whether video stabilization is
+          active.</description>
+          <details>
+          Video stabilization automatically warps images from
+          the camera in order to stabilize motion between consecutive frames.
+
+          If enabled, video stabilization can modify the
+          android.scaler.cropRegion to keep the video stream stabilized.
+
+          Switching between different video stabilization modes may take several
+          frames to initialize, the camera device will report the current mode
+          in capture result metadata. For example, When "ON" mode is requested,
+          the video stabilization modes in the first several capture results may
+          still be "OFF", and it will become "ON" when the initialization is
+          done.
+
+          In addition, not all recording sizes or frame rates may be supported for
+          stabilization by a device that reports stabilization support. It is guaranteed
+          that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+          the recording resolution is less than or equal to 1920 x 1080 (width less than
+          or equal to 1920, height less than or equal to 1080), and the recording
+          frame rate is less than or equal to 30fps.  At other sizes, the CaptureResult
+          android.control.videoStabilizationMode field will return
+          OFF if the recording output is not stabilized, or if there are no output
+          Surface types that can be stabilized.
+
+          If a camera device supports both this mode and OIS
+          (android.lens.opticalStabilizationMode), turning both modes on may
+          produce undesirable interaction, so it is recommended not to enable
+          both at the same time.
+          </details>
+          <tag id="BC" />
+        </entry>
+      </controls>
+      <static>
+        <entry name="aeAvailableAntibandingModes" type="byte" visibility="public"
+               type_notes="list of enums" container="array" typedef="enumList"
+               hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+            List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are
+            supported by this camera device.
+          </description>
+          <range>Any value listed in android.control.aeAntibandingMode</range>
+          <details>
+            Not all of the auto-exposure anti-banding modes may be
+            supported by a given camera device. This field lists the
+            valid anti-banding modes that the application may request
+            for this camera device with the
+            android.control.aeAntibandingMode control.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeAvailableModes" type="byte" visibility="public"
+               type_notes="list of enums" container="array" typedef="enumList"
+               hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+            List of auto-exposure modes for android.control.aeMode that are supported by this camera
+            device.
+          </description>
+          <range>Any value listed in android.control.aeMode</range>
+          <details>
+            Not all the auto-exposure modes may be supported by a
+            given camera device, especially if no flash unit is
+            available. This entry lists the valid modes for
+            android.control.aeMode for this camera device.
+
+            All camera devices support ON, and all camera devices with flash
+            units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
+
+            FULL mode camera devices always support OFF mode,
+            which enables application control of camera exposure time,
+            sensitivity, and frame duration.
+
+            LEGACY mode camera devices never support OFF mode.
+            LIMITED mode devices support OFF if they support the MANUAL_SENSOR
+            capability.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public"
+               type_notes="list of pairs of frame rates"
+               container="array" typedef="rangeInt"
+               hwlevel="legacy">
+          <array>
+            <size>2</size>
+            <size>n</size>
+          </array>
+          <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by
+          this camera device.</description>
+          <units>Frames per second (FPS)</units>
+          <details>
+          For devices at the LEGACY level or above:
+
+          * For constant-framerate recording, for each normal
+          {@link android.media.CamcorderProfile CamcorderProfile}, that is, a
+          {@link android.media.CamcorderProfile CamcorderProfile} that has
+          {@link android.media.CamcorderProfile#quality quality} in
+          the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
+          {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
+          supported by the device and has
+          {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x`, this list will
+          always include (`x`,`x`).
+
+          * Also, a camera device must either not support any
+          {@link android.media.CamcorderProfile CamcorderProfile},
+          or support at least one
+          normal {@link android.media.CamcorderProfile CamcorderProfile} that has
+          {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x` &gt;= 24.
+
+          For devices at the LIMITED level or above:
+
+          * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`)
+          and (`max`, `max`) where `min` &lt;= 15 and `max` = the maximum output frame rate of the
+          maximum YUV_420_888 output size.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeCompensationRange" type="int32" visibility="public"
+               container="array" typedef="rangeInt"
+               hwlevel="legacy">
+          <array>
+            <size>2</size>
+          </array>
+          <description>Maximum and minimum exposure compensation values for
+          android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep,
+          that are supported by this camera device.</description>
+          <range>
+            Range [0,0] indicates that exposure compensation is not supported.
+
+            For LIMITED and FULL devices, range must follow below requirements if exposure
+            compensation is supported (`range != [0, 0]`):
+
+            `Min.exposure compensation * android.control.aeCompensationStep &lt;= -2 EV`
+
+            `Max.exposure compensation * android.control.aeCompensationStep &gt;= 2 EV`
+
+            LEGACY devices may support a smaller range than this.
+          </range>
+          <tag id="BC" />
+        </entry>
+        <entry name="aeCompensationStep" type="rational" visibility="public"
+               hwlevel="legacy">
+          <description>Smallest step by which the exposure compensation
+          can be changed.</description>
+          <units>Exposure Value (EV)</units>
+          <details>
+          This is the unit for android.control.aeExposureCompensation. For example, if this key has
+          a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means
+          that the target EV offset for the auto-exposure routine is -1 EV.
+
+          One unit of EV compensation changes the brightness of the captured image by a factor
+          of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
+          </details>
+          <hal_details>
+            This must be less than or equal to 1/2.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="afAvailableModes" type="byte" visibility="public"
+               type_notes="List of enums" container="array" typedef="enumList"
+               hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of auto-focus (AF) modes for android.control.afMode that are
+          supported by this camera device.
+          </description>
+          <range>Any value listed in android.control.afMode</range>
+          <details>
+          Not all the auto-focus modes may be supported by a
+          given camera device. This entry lists the valid modes for
+          android.control.afMode for this camera device.
+
+          All LIMITED and FULL mode camera devices will support OFF mode, and all
+          camera devices with adjustable focuser units
+          (`android.lens.info.minimumFocusDistance &gt; 0`) will support AUTO mode.
+
+          LEGACY devices will support OFF mode only if they support
+          focusing to infinity (by also setting android.lens.focusDistance to
+          `0.0f`).
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableEffects" type="byte" visibility="public"
+               type_notes="List of enums (android.control.effectMode)." container="array"
+               typedef="enumList" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of color effects for android.control.effectMode that are supported by this camera
+          device.
+          </description>
+          <range>Any value listed in android.control.effectMode</range>
+          <details>
+          This list contains the color effect modes that can be applied to
+          images produced by the camera device.
+          Implementations are not expected to be consistent across all devices.
+          If no color effect modes are available for a device, this will only list
+          OFF.
+
+          A color effect will only be applied if
+          android.control.mode != OFF.  OFF is always included in this list.
+
+          This control has no effect on the operation of other control routines such
+          as auto-exposure, white balance, or focus.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableSceneModes" type="byte" visibility="public"
+               type_notes="List of enums (android.control.sceneMode)."
+               container="array" typedef="enumList" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of scene modes for android.control.sceneMode that are supported by this camera
+          device.
+          </description>
+          <range>Any value listed in android.control.sceneMode</range>
+          <details>
+          This list contains scene modes that can be set for the camera device.
+          Only scene modes that have been fully implemented for the
+          camera device may be included here. Implementations are not expected
+          to be consistent across all devices.
+
+          If no scene modes are supported by the camera device, this
+          will be set to DISABLED. Otherwise DISABLED will not be listed.
+
+          FACE_PRIORITY is always listed if face detection is
+          supported (i.e.`android.statistics.info.maxFaceCount &gt;
+          0`).
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableVideoStabilizationModes" type="byte"
+               visibility="public" type_notes="List of enums." container="array"
+               typedef="enumList" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of video stabilization modes for android.control.videoStabilizationMode
+          that are supported by this camera device.
+          </description>
+          <range>Any value listed in android.control.videoStabilizationMode</range>
+          <details>
+          OFF will always be listed.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="awbAvailableModes" type="byte" visibility="public"
+               type_notes="List of enums"
+               container="array" typedef="enumList" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of auto-white-balance modes for android.control.awbMode that are supported by this
+          camera device.
+          </description>
+          <range>Any value listed in android.control.awbMode</range>
+          <details>
+          Not all the auto-white-balance modes may be supported by a
+          given camera device. This entry lists the valid modes for
+          android.control.awbMode for this camera device.
+
+          All camera devices will support ON mode.
+
+          Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
+          mode, which enables application control of white balance, by using
+          android.colorCorrection.transform and android.colorCorrection.gains
+          (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL
+          mode camera devices.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="maxRegions" type="int32" visibility="hidden"
+               container="array" hwlevel="legacy">
+          <array>
+            <size>3</size>
+          </array>
+          <description>
+          List of the maximum number of regions that can be used for metering in
+          auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
+          this corresponds to the the maximum number of elements in
+          android.control.aeRegions, android.control.awbRegions,
+          and android.control.afRegions.
+          </description>
+          <range>
+          Value must be &amp;gt;= 0 for each element. For full-capability devices
+          this value must be &amp;gt;= 1 for AE and AF. The order of the elements is:
+          `(AE, AWB, AF)`.</range>
+          <tag id="BC" />
+        </entry>
+        <entry name="maxRegionsAe" type="int32" visibility="public"
+               synthetic="true" hwlevel="legacy">
+          <description>
+          The maximum number of metering regions that can be used by the auto-exposure (AE)
+          routine.
+          </description>
+          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
+          value will be &amp;gt;= 1.
+          </range>
+          <details>
+          This corresponds to the the maximum allowed number of elements in
+          android.control.aeRegions.
+          </details>
+          <hal_details>This entry is private to the framework. Fill in
+          maxRegions to have this entry be automatically populated.
+          </hal_details>
+        </entry>
+        <entry name="maxRegionsAwb" type="int32" visibility="public"
+               synthetic="true" hwlevel="legacy">
+          <description>
+          The maximum number of metering regions that can be used by the auto-white balance (AWB)
+          routine.
+          </description>
+          <range>Value will be &amp;gt;= 0.
+          </range>
+          <details>
+          This corresponds to the the maximum allowed number of elements in
+          android.control.awbRegions.
+          </details>
+          <hal_details>This entry is private to the framework. Fill in
+          maxRegions to have this entry be automatically populated.
+          </hal_details>
+        </entry>
+        <entry name="maxRegionsAf" type="int32" visibility="public"
+               synthetic="true" hwlevel="legacy">
+          <description>
+          The maximum number of metering regions that can be used by the auto-focus (AF) routine.
+          </description>
+          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
+          value will be &amp;gt;= 1.
+          </range>
+          <details>
+          This corresponds to the the maximum allowed number of elements in
+          android.control.afRegions.
+          </details>
+          <hal_details>This entry is private to the framework. Fill in
+          maxRegions to have this entry be automatically populated.
+          </hal_details>
+        </entry>
+        <entry name="sceneModeOverrides" type="byte" visibility="system"
+               container="array" hwlevel="limited">
+          <array>
+            <size>3</size>
+            <size>length(availableSceneModes)</size>
+          </array>
+          <description>
+          Ordered list of auto-exposure, auto-white balance, and auto-focus
+          settings to use with each available scene mode.
+          </description>
+          <range>
+          For each available scene mode, the list must contain three
+          entries containing the android.control.aeMode,
+          android.control.awbMode, and android.control.afMode values used
+          by the camera device. The entry order is `(aeMode, awbMode, afMode)`
+          where aeMode has the lowest index position.
+          </range>
+          <details>
+          When a scene mode is enabled, the camera device is expected
+          to override android.control.aeMode, android.control.awbMode,
+          and android.control.afMode with its preferred settings for
+          that scene mode.
+
+          The order of this list matches that of availableSceneModes,
+          with 3 entries for each mode.  The overrides listed
+          for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored,
+          since for that mode the application-set android.control.aeMode,
+          android.control.awbMode, and android.control.afMode values are
+          used instead, matching the behavior when android.control.mode
+          is set to AUTO. It is recommended that the FACE_PRIORITY and
+          FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.
+
+          For example, if availableSceneModes contains
+          `(FACE_PRIORITY, ACTION, NIGHT)`,  then the camera framework
+          expects sceneModeOverrides to have 9 entries formatted like:
+          `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE,
+          ON_AUTO_FLASH, INCANDESCENT, AUTO)`.
+          </details>
+          <hal_details>
+          To maintain backward compatibility, this list will be made available
+          in the static metadata of the camera service.  The camera service will
+          use these values to set android.control.aeMode,
+          android.control.awbMode, and android.control.afMode when using a scene
+          mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+      </static>
+      <dynamic>
+        <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
+          <description>The ID sent with the latest
+          CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
+          <details>Must be 0 if no
+          CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
+          by HAL. Always updated even if AE algorithm ignores the
+          trigger</details>
+        </entry>
+        <clone entry="android.control.aeAntibandingMode" kind="controls">
+        </clone>
+        <clone entry="android.control.aeExposureCompensation" kind="controls">
+        </clone>
+        <clone entry="android.control.aeLock" kind="controls">
+        </clone>
+        <clone entry="android.control.aeMode" kind="controls">
+        </clone>
+        <clone entry="android.control.aeRegions" kind="controls">
+        </clone>
+        <clone entry="android.control.aeTargetFpsRange" kind="controls">
+        </clone>
+        <clone entry="android.control.aePrecaptureTrigger" kind="controls">
+        </clone>
+        <entry name="aeState" type="byte" visibility="public" enum="true"
+               hwlevel="limited">
+          <enum>
+            <value>INACTIVE
+            <notes>AE is off or recently reset.
+
+            When a camera device is opened, it starts in
+            this state. This is a transient state, the camera device may skip reporting
+            this state in capture result.</notes></value>
+            <value>SEARCHING
+            <notes>AE doesn't yet have a good set of control values
+            for the current scene.
+
+            This is a transient state, the camera device may skip
+            reporting this state in capture result.</notes></value>
+            <value>CONVERGED
+            <notes>AE has a good set of control values for the
+            current scene.</notes></value>
+            <value>LOCKED
+            <notes>AE has been locked.</notes></value>
+            <value>FLASH_REQUIRED
+            <notes>AE has a good set of control values, but flash
+            needs to be fired for good quality still
+            capture.</notes></value>
+            <value>PRECAPTURE
+            <notes>AE has been asked to do a precapture sequence
+            and is currently executing it.
+
+            Precapture can be triggered through setting
+            android.control.aePrecaptureTrigger to START. Currently
+            active and completed (if it causes camera device internal AE lock) precapture
+            metering sequence can be canceled through setting
+            android.control.aePrecaptureTrigger to CANCEL.
+
+            Once PRECAPTURE completes, AE will transition to CONVERGED
+            or FLASH_REQUIRED as appropriate. This is a transient
+            state, the camera device may skip reporting this state in
+            capture result.</notes></value>
+          </enum>
+          <description>Current state of the auto-exposure (AE) algorithm.</description>
+          <details>Switching between or enabling AE modes (android.control.aeMode) always
+          resets the AE state to INACTIVE. Similarly, switching between android.control.mode,
+          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
+          the algorithm states to INACTIVE.
+
+          The camera device can do several state transitions between two results, if it is
+          allowed by the state transition table. For example: INACTIVE may never actually be
+          seen in a result.
+
+          The state in the result is the state for this image (in sync with this image): if
+          AE state becomes CONVERGED, then the image data associated with this result should
+          be good to use.
+
+          Below are state transition tables for different AE modes.
+
+            State       | Transition Cause | New State | Notes
+          :------------:|:----------------:|:---------:|:-----------------------:
+          INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled
+
+          When android.control.aeMode is AE_MODE_ON_*:
+
+            State        | Transition Cause                             | New State      | Notes
+          :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
+          INACTIVE       | Camera device initiates AE scan              | SEARCHING      | Values changing
+          INACTIVE       | android.control.aeLock is ON                 | LOCKED         | Values locked
+          SEARCHING      | Camera device finishes AE scan               | CONVERGED      | Good values, not changing
+          SEARCHING      | Camera device finishes AE scan               | FLASH_REQUIRED | Converged but too dark w/o flash
+          SEARCHING      | android.control.aeLock is ON                 | LOCKED         | Values locked
+          CONVERGED      | Camera device initiates AE scan              | SEARCHING      | Values changing
+          CONVERGED      | android.control.aeLock is ON                 | LOCKED         | Values locked
+          FLASH_REQUIRED | Camera device initiates AE scan              | SEARCHING      | Values changing
+          FLASH_REQUIRED | android.control.aeLock is ON                 | LOCKED         | Values locked
+          LOCKED         | android.control.aeLock is OFF                | SEARCHING      | Values not good after unlock
+          LOCKED         | android.control.aeLock is OFF                | CONVERGED      | Values good after unlock
+          LOCKED         | android.control.aeLock is OFF                | FLASH_REQUIRED | Exposure good, but too dark
+          PRECAPTURE     | Sequence done. android.control.aeLock is OFF | CONVERGED      | Ready for high-quality capture
+          PRECAPTURE     | Sequence done. android.control.aeLock is ON  | LOCKED         | Ready for high-quality capture
+          LOCKED         | aeLock is ON and aePrecaptureTrigger is START | LOCKED        | Precapture trigger is ignored when AE is already locked
+          LOCKED         | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED        | Precapture trigger is ignored when AE is already locked
+          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE     | Start AE precapture metering sequence
+          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled
+
+          For the above table, the camera device may skip reporting any state changes that happen
+          without application intervention (i.e. mode switch, trigger, locking). Any state that
+          can be skipped in that manner is called a transient state.
+
+          For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
+          listed in above table, it is also legal for the camera device to skip one or more
+          transient states between two results. See below table for examples:
+
+            State        | Transition Cause                                            | New State      | Notes
+          :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
+          INACTIVE       | Camera device finished AE scan                              | CONVERGED      | Values are already good, transient states are skipped by camera device.
+          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
+          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
+          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
+          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
+          CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
+          FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.
+          </details>
+        </entry>
+        <clone entry="android.control.afMode" kind="controls">
+        </clone>
+        <clone entry="android.control.afRegions" kind="controls">
+        </clone>
+        <clone entry="android.control.afTrigger" kind="controls">
+        </clone>
+        <entry name="afState" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>INACTIVE
+            <notes>AF is off or has not yet tried to scan/been asked
+            to scan.
+
+            When a camera device is opened, it starts in this
+            state. This is a transient state, the camera device may
+            skip reporting this state in capture
+            result.</notes></value>
+            <value>PASSIVE_SCAN
+            <notes>AF is currently performing an AF scan initiated the
+            camera device in a continuous autofocus mode.
+
+            Only used by CONTINUOUS_* AF modes. This is a transient
+            state, the camera device may skip reporting this state in
+            capture result.</notes></value>
+            <value>PASSIVE_FOCUSED
+            <notes>AF currently believes it is in focus, but may
+            restart scanning at any time.
+
+            Only used by CONTINUOUS_* AF modes. This is a transient
+            state, the camera device may skip reporting this state in
+            capture result.</notes></value>
+            <value>ACTIVE_SCAN
+            <notes>AF is performing an AF scan because it was
+            triggered by AF trigger.
+
+            Only used by AUTO or MACRO AF modes. This is a transient
+            state, the camera device may skip reporting this state in
+            capture result.</notes></value>
+            <value>FOCUSED_LOCKED
+            <notes>AF believes it is focused correctly and has locked
+            focus.
+
+            This state is reached only after an explicit START AF trigger has been
+            sent (android.control.afTrigger), when good focus has been obtained.
+
+            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
+            a new AF trigger is sent to the camera device (android.control.afTrigger).
+            </notes></value>
+            <value>NOT_FOCUSED_LOCKED
+            <notes>AF has failed to focus successfully and has locked
+            focus.
+
+            This state is reached only after an explicit START AF trigger has been
+            sent (android.control.afTrigger), when good focus cannot be obtained.
+
+            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
+            a new AF trigger is sent to the camera device (android.control.afTrigger).
+            </notes></value>
+            <value>PASSIVE_UNFOCUSED
+            <notes>AF finished a passive scan without finding focus,
+            and may restart scanning at any time.
+
+            Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
+            device may skip reporting this state in capture result.
+
+            LEGACY camera devices do not support this state. When a passive
+            scan has finished, it will always go to PASSIVE_FOCUSED.
+            </notes></value>
+          </enum>
+          <description>Current state of auto-focus (AF) algorithm.</description>
+          <details>
+          Switching between or enabling AF modes (android.control.afMode) always
+          resets the AF state to INACTIVE. Similarly, switching between android.control.mode,
+          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
+          the algorithm states to INACTIVE.
+
+          The camera device can do several state transitions between two results, if it is
+          allowed by the state transition table. For example: INACTIVE may never actually be
+          seen in a result.
+
+          The state in the result is the state for this image (in sync with this image): if
+          AF state becomes FOCUSED, then the image data associated with this result should
+          be sharp.
+
+          Below are state transition tables for different AF modes.
+
+          When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:
+
+            State       | Transition Cause | New State | Notes
+          :------------:|:----------------:|:---------:|:-----------:
+          INACTIVE      |                  | INACTIVE  | Never changes
+
+          When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:
+
+            State            | Transition Cause | New State          | Notes
+          :-----------------:|:----------------:|:------------------:|:--------------:
+          INACTIVE           | AF_TRIGGER       | ACTIVE_SCAN        | Start AF sweep, Lens now moving
+          ACTIVE_SCAN        | AF sweep done    | FOCUSED_LOCKED     | Focused, Lens now locked
+          ACTIVE_SCAN        | AF sweep done    | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
+          ACTIVE_SCAN        | AF_CANCEL        | INACTIVE           | Cancel/reset AF, Lens now locked
+          FOCUSED_LOCKED     | AF_CANCEL        | INACTIVE           | Cancel/reset AF
+          FOCUSED_LOCKED     | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
+          NOT_FOCUSED_LOCKED | AF_CANCEL        | INACTIVE           | Cancel/reset AF
+          NOT_FOCUSED_LOCKED | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
+          Any state          | Mode change      | INACTIVE           |
+
+          For the above table, the camera device may skip reporting any state changes that happen
+          without application intervention (i.e. mode switch, trigger, locking). Any state that
+          can be skipped in that manner is called a transient state.
+
+          For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
+          state transitions listed in above table, it is also legal for the camera device to skip
+          one or more transient states between two results. See below table for examples:
+
+            State            | Transition Cause | New State          | Notes
+          :-----------------:|:----------------:|:------------------:|:--------------:
+          INACTIVE           | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
+          INACTIVE           | AF_TRIGGER       | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
+          FOCUSED_LOCKED     | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
+          NOT_FOCUSED_LOCKED | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is good after a scan, lens is not locked.
+
+
+          When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:
+
+            State            | Transition Cause                    | New State          | Notes
+          :-----------------:|:-----------------------------------:|:------------------:|:--------------:
+          INACTIVE           | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+          INACTIVE           | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+          PASSIVE_SCAN       | Camera device completes current scan| PASSIVE_FOCUSED    | End AF scan, Lens now locked
+          PASSIVE_SCAN       | Camera device fails current scan    | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
+          PASSIVE_SCAN       | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, if focus is good. Lens now locked
+          PASSIVE_SCAN       | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
+          PASSIVE_SCAN       | AF_CANCEL                           | INACTIVE           | Reset lens position, Lens now locked
+          PASSIVE_FOCUSED    | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+          PASSIVE_UNFOCUSED  | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
+          PASSIVE_FOCUSED    | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, lens now locked
+          PASSIVE_UNFOCUSED  | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
+          FOCUSED_LOCKED     | AF_TRIGGER                          | FOCUSED_LOCKED     | No effect
+          FOCUSED_LOCKED     | AF_CANCEL                           | INACTIVE           | Restart AF scan
+          NOT_FOCUSED_LOCKED | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | No effect
+          NOT_FOCUSED_LOCKED | AF_CANCEL                           | INACTIVE           | Restart AF scan
+
+          When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:
+
+            State            | Transition Cause                     | New State          | Notes
+          :-----------------:|:------------------------------------:|:------------------:|:--------------:
+          INACTIVE           | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+          INACTIVE           | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+          PASSIVE_SCAN       | Camera device completes current scan | PASSIVE_FOCUSED    | End AF scan, Lens now locked
+          PASSIVE_SCAN       | Camera device fails current scan     | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
+          PASSIVE_SCAN       | AF_TRIGGER                           | FOCUSED_LOCKED     | Eventual transition once the focus is good. Lens now locked
+          PASSIVE_SCAN       | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
+          PASSIVE_SCAN       | AF_CANCEL                            | INACTIVE           | Reset lens position, Lens now locked
+          PASSIVE_FOCUSED    | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+          PASSIVE_UNFOCUSED  | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
+          PASSIVE_FOCUSED    | AF_TRIGGER                           | FOCUSED_LOCKED     | Immediate trans. Lens now locked
+          PASSIVE_UNFOCUSED  | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
+          FOCUSED_LOCKED     | AF_TRIGGER                           | FOCUSED_LOCKED     | No effect
+          FOCUSED_LOCKED     | AF_CANCEL                            | INACTIVE           | Restart AF scan
+          NOT_FOCUSED_LOCKED | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | No effect
+          NOT_FOCUSED_LOCKED | AF_CANCEL                            | INACTIVE           | Restart AF scan
+
+          When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
+          (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
+          camera device. When a trigger is included in a mode switch request, the trigger
+          will be evaluated in the context of the new mode in the request.
+          See below table for examples:
+
+            State      | Transition Cause                       | New State                                | Notes
+          :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
+          any state    | CAF-->AUTO mode switch                 | INACTIVE                                 | Mode switch without trigger, initial state must be INACTIVE
+          any state    | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE   | Mode switch with trigger, INACTIVE is skipped
+          any state    | AUTO-->CAF mode switch                 | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
+          </details>
+        </entry>
+        <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
+          <description>The ID sent with the latest
+          CAMERA2_TRIGGER_AUTOFOCUS call</description>
+          <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
+          received yet by HAL. Always updated even if AF algorithm
+          ignores the trigger</details>
+        </entry>
+        <clone entry="android.control.awbLock" kind="controls">
+        </clone>
+        <clone entry="android.control.awbMode" kind="controls">
+        </clone>
+        <clone entry="android.control.awbRegions" kind="controls">
+        </clone>
+        <clone entry="android.control.captureIntent" kind="controls">
+        </clone>
+        <entry name="awbState" type="byte" visibility="public" enum="true"
+               hwlevel="limited">
+          <enum>
+            <value>INACTIVE
+            <notes>AWB is not in auto mode, or has not yet started metering.
+
+            When a camera device is opened, it starts in this
+            state. This is a transient state, the camera device may
+            skip reporting this state in capture
+            result.</notes></value>
+            <value>SEARCHING
+            <notes>AWB doesn't yet have a good set of control
+            values for the current scene.
+
+            This is a transient state, the camera device
+            may skip reporting this state in capture result.</notes></value>
+            <value>CONVERGED
+            <notes>AWB has a good set of control values for the
+            current scene.</notes></value>
+            <value>LOCKED
+            <notes>AWB has been locked.
+            </notes></value>
+          </enum>
+          <description>Current state of auto-white balance (AWB) algorithm.</description>
+          <details>Switching between or enabling AWB modes (android.control.awbMode) always
+          resets the AWB state to INACTIVE. Similarly, switching between android.control.mode,
+          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
+          the algorithm states to INACTIVE.
+
+          The camera device can do several state transitions between two results, if it is
+          allowed by the state transition table. So INACTIVE may never actually be seen in
+          a result.
+
+          The state in the result is the state for this image (in sync with this image): if
+          AWB state becomes CONVERGED, then the image data associated with this result should
+          be good to use.
+
+          Below are state transition tables for different AWB modes.
+
+          When `android.control.awbMode != AWB_MODE_AUTO`:
+
+            State       | Transition Cause | New State | Notes
+          :------------:|:----------------:|:---------:|:-----------------------:
+          INACTIVE      |                  |INACTIVE   |Camera device auto white balance algorithm is disabled
+
+          When android.control.awbMode is AWB_MODE_AUTO:
+
+            State        | Transition Cause                 | New State     | Notes
+          :-------------:|:--------------------------------:|:-------------:|:-----------------:
+          INACTIVE       | Camera device initiates AWB scan | SEARCHING     | Values changing
+          INACTIVE       | android.control.awbLock is ON    | LOCKED        | Values locked
+          SEARCHING      | Camera device finishes AWB scan  | CONVERGED     | Good values, not changing
+          SEARCHING      | android.control.awbLock is ON    | LOCKED        | Values locked
+          CONVERGED      | Camera device initiates AWB scan | SEARCHING     | Values changing
+          CONVERGED      | android.control.awbLock is ON    | LOCKED        | Values locked
+          LOCKED         | android.control.awbLock is OFF   | SEARCHING     | Values not good after unlock
+
+          For the above table, the camera device may skip reporting any state changes that happen
+          without application intervention (i.e. mode switch, trigger, locking). Any state that
+          can be skipped in that manner is called a transient state.
+
+          For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
+          listed in above table, it is also legal for the camera device to skip one or more
+          transient states between two results. See below table for examples:
+
+            State        | Transition Cause                 | New State     | Notes
+          :-------------:|:--------------------------------:|:-------------:|:-----------------:
+          INACTIVE       | Camera device finished AWB scan  | CONVERGED     | Values are already good, transient states are skipped by camera device.
+          LOCKED         | android.control.awbLock is OFF   | CONVERGED     | Values good after unlock, transient states are skipped by camera device.
+          </details>
+        </entry>
+        <clone entry="android.control.effectMode" kind="controls">
+        </clone>
+        <clone entry="android.control.mode" kind="controls">
+        </clone>
+        <clone entry="android.control.sceneMode" kind="controls">
+        </clone>
+        <clone entry="android.control.videoStabilizationMode" kind="controls">
+        </clone>
+      </dynamic>
+      <static>
+        <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden"
+               container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited">
+          <array>
+            <size>5</size>
+            <size>n</size>
+          </array>
+          <description>
+          List of available high speed video size, fps range and max batch size configurations
+          supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).
+          </description>
+          <range>
+          For each configuration, the fps_max &amp;gt;= 120fps.
+          </range>
+          <details>
+          When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities,
+          this metadata will list the supported high speed video size, fps range and max batch size
+          configurations. All the sizes listed in this configuration will be a subset of the sizes
+          reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes}
+          for processed non-stalling formats.
+
+          For the high speed video use case, the application must
+          select the video size and fps range from this metadata to configure the recording and
+          preview streams and setup the recording requests. For example, if the application intends
+          to do high speed recording, it can select the maximum size reported by this metadata to
+          configure output streams. Once the size is selected, application can filter this metadata
+          by selected size and get the supported fps ranges, and use these fps ranges to setup the
+          recording requests. Note that for the use case of multiple output streams, application
+          must select one unique size from this metadata to use (e.g., preview and recording streams
+          must have the same size). Otherwise, the high speed capture session creation will fail.
+
+          The min and max fps will be multiple times of 30fps.
+
+          High speed video streaming extends significant performance pressue to camera hardware,
+          to achieve efficient high speed streaming, the camera device may have to aggregate
+          multiple frames together and send to camera device for processing where the request
+          controls are same for all the frames in this batch. Max batch size indicates
+          the max possible number of frames the camera device will group together for this high
+          speed stream configuration. This max batch size will be used to generate a high speed
+          recording request list by
+          {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+          The max batch size for each configuration will satisfy below conditions:
+
+          * Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
+          if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
+          * The camera device may choose smaller internal batch size for each configuration, but
+          the actual batch size will be a divisor of max batch size. For example, if the max batch
+          size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
+          * The max batch size in each configuration entry must be no larger than 32.
+
+          The camera device doesn't have to support batch mode to achieve high speed video recording,
+          in such case, batch_size_max will be reported as 1 in each configuration entry.
+
+          This fps ranges in this configuration list can only be used to create requests
+          that are submitted to a high speed camera capture session created by
+          {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}.
+          The fps ranges reported in this metadata must not be used to setup capture requests for
+          normal capture session, or it will cause request error.
+          </details>
+          <hal_details>
+          All the sizes listed in this configuration will be a subset of the sizes reported by
+          android.scaler.availableStreamConfigurations for processed non-stalling output formats.
+          Note that for all high speed video configurations, HAL must be able to support a minimum
+          of two streams, though the application might choose to configure just one stream.
+
+          The HAL may support multiple sensor modes for high speed outputs, for example, 120fps
+          sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application
+          usually starts preview first, then starts recording. To avoid sensor mode switch caused
+          stutter when starting recording as much as possible, the application may want to ensure
+          the same sensor mode is used for preview and recording. Therefore, The HAL must advertise
+          the variable fps range [30, fps_max] for each fixed fps range in this configuration list.
+          For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise
+          [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to
+          do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start
+          recording. For these variable fps ranges, it's up to the HAL to decide the actual fps
+          values that are suitable for smooth preview streaming. If the HAL sees different max_fps
+          values that fall into different sensor modes in a sequence of requests, the HAL must
+          switch the sensor mode as quick as possible to minimize the mode switch caused stutter.
+          </hal_details>
+          <tag id="V1" />
+        </entry>
+        <entry name="aeLockAvailable" type="byte" visibility="public" enum="true"
+               typedef="boolean" hwlevel="legacy">
+          <enum>
+            <value>FALSE</value>
+            <value>TRUE</value>
+          </enum>
+          <description>Whether the camera device supports android.control.aeLock</description>
+          <details>
+              Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
+              list `true`. This includes FULL devices.
+          </details>
+          <tag id="BC"/>
+        </entry>
+        <entry name="awbLockAvailable" type="byte" visibility="public" enum="true"
+               typedef="boolean" hwlevel="legacy">
+          <enum>
+            <value>FALSE</value>
+            <value>TRUE</value>
+          </enum>
+          <description>Whether the camera device supports android.control.awbLock</description>
+          <details>
+              Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
+              always list `true`. This includes FULL devices.
+          </details>
+          <tag id="BC"/>
+        </entry>
+        <entry name="availableModes" type="byte" visibility="public"
+            type_notes="List of enums (android.control.mode)." container="array"
+            typedef="enumList" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of control modes for android.control.mode that are supported by this camera
+          device.
+          </description>
+          <range>Any value listed in android.control.mode</range>
+          <details>
+              This list contains control modes that can be set for the camera device.
+              LEGACY mode devices will always support AUTO mode. LIMITED and FULL
+              devices will always support OFF, AUTO modes.
+          </details>
+        </entry>
+      </static>
+    </section>
+    <section name="demosaic">
+      <controls>
+        <entry name="mode" type="byte" enum="true">
+          <enum>
+            <value>FAST
+            <notes>Minimal or no slowdown of frame rate compared to
+            Bayer RAW output.</notes></value>
+            <value>HIGH_QUALITY
+            <notes>Improved processing quality but the frame rate might be slowed down
+            relative to raw output.</notes></value>
+          </enum>
+          <description>Controls the quality of the demosaicing
+          processing.</description>
+          <tag id="FUTURE" />
+        </entry>
+      </controls>
+    </section>
+    <section name="edge">
+      <controls>
+        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+          <enum>
+            <value>OFF
+            <notes>No edge enhancement is applied.</notes></value>
+            <value>FAST
+            <notes>Apply edge enhancement at a quality level that does not slow down frame rate
+            relative to sensor output. It may be the same as OFF if edge enhancement will
+            slow down frame rate relative to sensor.</notes></value>
+            <value>HIGH_QUALITY
+            <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
+            </notes></value>
+            <value optional="true">ZERO_SHUTTER_LAG
+            <notes>Edge enhancement is applied at different levels for different output streams,
+            based on resolution. Streams at maximum recording resolution (see {@link
+            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
+            edge enhancement applied, while higher-resolution streams have no edge enhancement
+            applied. The level of edge enhancement for low-resolution streams is tuned so that
+            frame rate is not impacted, and the quality is equal to or better than FAST (since it
+            is only applied to lower-resolution outputs, quality may improve from FAST).
+
+            This mode is intended to be used by applications operating in a zero-shutter-lag mode
+            with YUV or PRIVATE reprocessing, where the application continuously captures
+            high-resolution intermediate buffers into a circular buffer, from which a final image is
+            produced via reprocessing when a user takes a picture.  For such a use case, the
+            high-resolution buffers must not have edge enhancement applied to maximize efficiency of
+            preview and to avoid double-applying enhancement when reprocessed, while low-resolution
+            buffers (used for recording or preview, generally) need edge enhancement applied for
+            reasonable preview quality.
+
+            This mode is guaranteed to be supported by devices that support either the
+            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+            (android.request.availableCapabilities lists either of those capabilities) and it will
+            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
+            </notes></value>
+          </enum>
+          <description>Operation mode for edge
+          enhancement.</description>
+          <range>android.edge.availableEdgeModes</range>
+          <details>Edge enhancement improves sharpness and details in the captured image. OFF means
+          no enhancement will be applied by the camera device.
+
+          FAST/HIGH_QUALITY both mean camera device determined enhancement
+          will be applied. HIGH_QUALITY mode indicates that the
+          camera device will use the highest-quality enhancement algorithms,
+          even if it slows down capture rate. FAST means the camera device will
+          not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
+          edge enhancement will slow down capture rate. Every output stream will have a similar
+          amount of enhancement applied.
+
+          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+          buffer of high-resolution images during preview and reprocess image(s) from that buffer
+          into a final capture when triggered by the user. In this mode, the camera device applies
+          edge enhancement to low-resolution streams (below maximum recording resolution) to
+          maximize preview quality, but does not apply edge enhancement to high-resolution streams,
+          since those will be reprocessed later if necessary.
+
+          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
+          device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
+          The camera device may adjust its internal edge enhancement parameters for best
+          image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
+          </details>
+          <hal_details>
+          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
+          adjust the internal edge enhancement reduction parameters appropriately to get the best
+          quality images.
+          </hal_details>
+          <tag id="V1" />
+          <tag id="REPROC" />
+        </entry>
+        <entry name="strength" type="byte">
+          <description>Control the amount of edge enhancement
+          applied to the images</description>
+          <units>1-10; 10 is maximum sharpening</units>
+          <tag id="FUTURE" />
+        </entry>
+      </controls>
+      <static>
+        <entry name="availableEdgeModes" type="byte" visibility="public"
+               type_notes="list of enums" container="array" typedef="enumList"
+               hwlevel="full">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of edge enhancement modes for android.edge.mode that are supported by this camera
+          device.
+          </description>
+          <range>Any value listed in android.edge.mode</range>
+          <details>
+          Full-capability camera devices must always support OFF; camera devices that support
+          YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
+          list FAST.
+          </details>
+          <hal_details>
+          HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available
+          on the camera device, but the underlying implementation can be the same for both modes.
+          That is, if the highest quality implementation on the camera device does not slow down
+          capture rate, then FAST and HIGH_QUALITY will generate the same output.
+          </hal_details>
+          <tag id="V1" />
+          <tag id="REPROC" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.edge.mode" kind="controls">
+          <tag id="V1" />
+          <tag id="REPROC" />
+        </clone>
+      </dynamic>
+    </section>
+    <section name="flash">
+      <controls>
+        <entry name="firingPower" type="byte">
+          <description>Power for flash firing/torch</description>
+          <units>10 is max power; 0 is no flash. Linear</units>
+          <range>0 - 10</range>
+          <details>Power for snapshot may use a different scale than
+          for torch mode. Only one entry for torch mode will be
+          used</details>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="firingTime" type="int64">
+          <description>Firing time of flash relative to start of
+          exposure</description>
+          <units>nanoseconds</units>
+          <range>0-(exposure time-flash duration)</range>
+          <details>Clamped to (0, exposure time - flash
+          duration).</details>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy">
+          <enum>
+            <value>OFF
+              <notes>
+              Do not fire the flash for this capture.
+              </notes>
+            </value>
+            <value>SINGLE
+              <notes>
+              If the flash is available and charged, fire flash
+              for this capture.
+              </notes>
+            </value>
+            <value>TORCH
+              <notes>
+              Transition flash to continuously on.
+              </notes>
+            </value>
+          </enum>
+          <description>The desired mode for for the camera device's flash control.</description>
+          <details>
+          This control is only effective when flash unit is available
+          (`android.flash.info.available == true`).
+
+          When this control is used, the android.control.aeMode must be set to ON or OFF.
+          Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
+          ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
+
+          When set to OFF, the camera device will not fire flash for this capture.
+
+          When set to SINGLE, the camera device will fire flash regardless of the camera
+          device's auto-exposure routine's result. When used in still capture case, this
+          control should be used along with auto-exposure (AE) precapture metering sequence
+          (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.
+
+          When set to TORCH, the flash will be on continuously. This mode can be used
+          for use cases such as preview, auto-focus assist, still capture, or video recording.
+
+          The flash status will be reported by android.flash.state in the capture result metadata.
+          </details>
+          <tag id="BC" />
+        </entry>
+      </controls>
+      <static>
+        <namespace name="info">
+          <entry name="available" type="byte" visibility="public" enum="true"
+                 typedef="boolean" hwlevel="legacy">
+            <enum>
+              <value>FALSE</value>
+              <value>TRUE</value>
+            </enum>
+            <description>Whether this camera device has a
+            flash unit.</description>
+            <details>
+            Will be `false` if no flash is available.
+
+            If there is no flash unit, none of the flash controls do
+            anything.</details>
+            <tag id="BC" />
+          </entry>
+          <entry name="chargeDuration" type="int64">
+            <description>Time taken before flash can fire
+            again</description>
+            <units>nanoseconds</units>
+            <range>0-1e9</range>
+            <details>1 second too long/too short for recharge? Should
+            this be power-dependent?</details>
+            <tag id="FUTURE" />
+          </entry>
+        </namespace>
+        <entry name="colorTemperature" type="byte">
+          <description>The x,y whitepoint of the
+          flash</description>
+          <units>pair of floats</units>
+          <range>0-1 for both</range>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="maxEnergy" type="byte">
+          <description>Max energy output of the flash for a full
+          power single flash</description>
+          <units>lumen-seconds</units>
+          <range>&amp;gt;= 0</range>
+          <tag id="FUTURE" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.flash.firingPower" kind="controls">
+        </clone>
+        <clone entry="android.flash.firingTime" kind="controls">
+        </clone>
+        <clone entry="android.flash.mode" kind="controls"></clone>
+        <entry name="state" type="byte" visibility="public" enum="true"
+               hwlevel="limited">
+          <enum>
+            <value>UNAVAILABLE
+            <notes>No flash on camera.</notes></value>
+            <value>CHARGING
+            <notes>Flash is charging and cannot be fired.</notes></value>
+            <value>READY
+            <notes>Flash is ready to fire.</notes></value>
+            <value>FIRED
+            <notes>Flash fired for this capture.</notes></value>
+            <value>PARTIAL
+            <notes>Flash partially illuminated this frame.
+
+            This is usually due to the next or previous frame having
+            the flash fire, and the flash spilling into this capture
+            due to hardware limitations.</notes></value>
+          </enum>
+          <description>Current state of the flash
+          unit.</description>
+          <details>
+          When the camera device doesn't have flash unit
+          (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE.
+          Other states indicate the current flash status.
+
+          In certain conditions, this will be available on LEGACY devices:
+
+           * Flash-less cameras always return UNAVAILABLE.
+           * Using android.control.aeMode `==` ON_ALWAYS_FLASH
+             will always return FIRED.
+           * Using android.flash.mode `==` TORCH
+             will always return FIRED.
+
+          In all other conditions the state will not be available on
+          LEGACY devices (i.e. it will be `null`).
+          </details>
+        </entry>
+      </dynamic>
+    </section>
+    <section name="hotPixel">
+      <controls>
+        <entry name="mode" type="byte" visibility="public" enum="true">
+          <enum>
+            <value>OFF
+              <notes>
+              No hot pixel correction is applied.
+
+              The frame rate must not be reduced relative to sensor raw output
+              for this option.
+
+              The hotpixel map may be returned in android.statistics.hotPixelMap.
+              </notes>
+            </value>
+            <value>FAST
+              <notes>
+              Hot pixel correction is applied, without reducing frame
+              rate relative to sensor raw output.
+
+              The hotpixel map may be returned in android.statistics.hotPixelMap.
+              </notes>
+            </value>
+            <value>HIGH_QUALITY
+              <notes>
+              High-quality hot pixel correction is applied, at a cost
+              of possibly reduced frame rate relative to sensor raw output.
+
+              The hotpixel map may be returned in android.statistics.hotPixelMap.
+              </notes>
+            </value>
+          </enum>
+          <description>
+          Operational mode for hot pixel correction.
+          </description>
+          <range>android.hotPixel.availableHotPixelModes</range>
+          <details>
+          Hotpixel correction interpolates out, or otherwise removes, pixels
+          that do not accurately measure the incoming light (i.e. pixels that
+          are stuck at an arbitrary value or are oversensitive).
+          </details>
+          <tag id="V1" />
+          <tag id="RAW" />
+        </entry>
+      </controls>
+      <static>
+        <entry name="availableHotPixelModes" type="byte" visibility="public"
+          type_notes="list of enums" container="array" typedef="enumList">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of hot pixel correction modes for android.hotPixel.mode that are supported by this
+          camera device.
+          </description>
+          <range>Any value listed in android.hotPixel.mode</range>
+          <details>
+          FULL mode camera devices will always support FAST.
+          </details>
+          <hal_details>
+          To avoid performance issues, there will be significantly fewer hot
+          pixels than actual pixels on the camera sensor.
+          HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available
+          on the camera device, but the underlying implementation can be the same for both modes.
+          That is, if the highest quality implementation on the camera device does not slow down
+          capture rate, then FAST and HIGH_QUALITY will generate the same output.
+          </hal_details>
+          <tag id="V1" />
+          <tag id="RAW" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.hotPixel.mode" kind="controls">
+          <tag id="V1" />
+          <tag id="RAW" />
+        </clone>
+      </dynamic>
+    </section>
+    <section name="jpeg">
+      <controls>
+        <entry name="gpsLocation" type="byte" visibility="public" synthetic="true"
+        typedef="location" hwlevel="legacy">
+          <description>
+          A location object to use when generating image GPS metadata.
+          </description>
+          <details>
+          Setting a location object in a request will include the GPS coordinates of the location
+          into any JPEG images captured based on the request. These coordinates can then be
+          viewed by anyone who receives the JPEG image.
+          </details>
+        </entry>
+        <entry name="gpsCoordinates" type="double" visibility="hidden"
+        type_notes="latitude, longitude, altitude. First two in degrees, the third in meters"
+        container="array" hwlevel="legacy">
+          <array>
+            <size>3</size>
+          </array>
+          <description>GPS coordinates to include in output JPEG
+          EXIF.</description>
+          <range>(-180 - 180], [-90,90], [-inf, inf]</range>
+          <tag id="BC" />
+        </entry>
+        <entry name="gpsProcessingMethod" type="byte" visibility="hidden"
+               typedef="string" hwlevel="legacy">
+          <description>32 characters describing GPS algorithm to
+          include in EXIF.</description>
+          <units>UTF-8 null-terminated string</units>
+          <tag id="BC" />
+        </entry>
+        <entry name="gpsTimestamp" type="int64" visibility="hidden" hwlevel="legacy">
+          <description>Time GPS fix was made to include in
+          EXIF.</description>
+          <units>UTC in seconds since January 1, 1970</units>
+          <tag id="BC" />
+        </entry>
+        <entry name="orientation" type="int32" visibility="public" hwlevel="legacy">
+          <description>The orientation for a JPEG image.</description>
+          <units>Degrees in multiples of 90</units>
+          <range>0, 90, 180, 270</range>
+          <details>
+          The clockwise rotation angle in degrees, relative to the orientation
+          to the camera, that the JPEG picture needs to be rotated by, to be viewed
+          upright.
+
+          Camera devices may either encode this value into the JPEG EXIF header, or
+          rotate the image data to match this orientation. When the image data is rotated,
+          the thumbnail data will also be rotated.
+
+          Note that this orientation is relative to the orientation of the camera sensor, given
+          by android.sensor.orientation.
+
+          To translate from the device orientation given by the Android sensor APIs, the following
+          sample code may be used:
+
+              private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
+                  if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
+                  int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
+
+                  // Round device orientation to a multiple of 90
+                  deviceOrientation = (deviceOrientation + 45) / 90 * 90;
+
+                  // Reverse device orientation for front-facing cameras
+                  boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
+                  if (facingFront) deviceOrientation = -deviceOrientation;
+
+                  // Calculate desired JPEG orientation relative to camera orientation to make
+                  // the image upright relative to the device orientation
+                  int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+
+                  return jpegOrientation;
+              }
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="quality" type="byte" visibility="public" hwlevel="legacy">
+          <description>Compression quality of the final JPEG
+          image.</description>
+          <range>1-100; larger is higher quality</range>
+          <details>85-95 is typical usage range.</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy">
+          <description>Compression quality of JPEG
+          thumbnail.</description>
+          <range>1-100; larger is higher quality</range>
+          <tag id="BC" />
+        </entry>
+        <entry name="thumbnailSize" type="int32" visibility="public"
+        container="array" typedef="size" hwlevel="legacy">
+          <array>
+            <size>2</size>
+          </array>
+          <description>Resolution of embedded JPEG thumbnail.</description>
+          <range>android.jpeg.availableThumbnailSizes</range>
+          <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
+          but the captured JPEG will still be a valid image.
+
+          For best results, when issuing a request for a JPEG image, the thumbnail size selected
+          should have the same aspect ratio as the main JPEG output.
+
+          If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+          ratio, the camera device creates the thumbnail by cropping it from the primary image.
+          For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
+          16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
+          generate the thumbnail image. The thumbnail image will always have a smaller Field
+          Of View (FOV) than the primary image when aspect ratios differ.
+
+          When an android.jpeg.orientation of non-zero degree is requested,
+          the camera device will handle thumbnail rotation in one of the following ways:
+
+          * Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
+            and keep jpeg and thumbnail image data unrotated.
+          * Rotate the jpeg and thumbnail image data and not set
+            {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
+            case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+            capture result, so the width and height will be interchanged if 90 or 270 degree
+            orientation is requested. LEGACY device will always report unrotated thumbnail
+            size.
+          </details>
+          <hal_details>
+          The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.
+          The cropping must be done on the primary jpeg image rather than the sensor active array.
+          The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the
+          thumbnail image cropping.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+      </controls>
+      <static>
+        <entry name="availableThumbnailSizes" type="int32" visibility="public"
+        container="array" typedef="size" hwlevel="legacy">
+          <array>
+            <size>2</size>
+            <size>n</size>
+          </array>
+          <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this
+          camera device.</description>
+          <details>
+          This list will include at least one non-zero resolution, plus `(0,0)` for indicating no
+          thumbnail should be generated.
+
+          Below condiditions will be satisfied for this size list:
+
+          * The sizes will be sorted by increasing pixel area (width x height).
+          If several resolutions have the same area, they will be sorted by increasing width.
+          * The aspect ratio of the largest thumbnail size will be same as the
+          aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
+          The largest size is defined as the size that has the largest pixel area
+          in a given size list.
+          * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
+          one corresponding size that has the same aspect ratio in availableThumbnailSizes,
+          and vice versa.
+          * All non-`(0, 0)` sizes will have non-zero widths and heights.</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="maxSize" type="int32" visibility="system">
+          <description>Maximum size in bytes for the compressed
+          JPEG buffer</description>
+          <range>Must be large enough to fit any JPEG produced by
+          the camera</range>
+          <details>This is used for sizing the gralloc buffers for
+          JPEG</details>
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.jpeg.gpsLocation" kind="controls">
+        </clone>
+        <clone entry="android.jpeg.gpsCoordinates" kind="controls">
+        </clone>
+        <clone entry="android.jpeg.gpsProcessingMethod"
+        kind="controls"></clone>
+        <clone entry="android.jpeg.gpsTimestamp" kind="controls">
+        </clone>
+        <clone entry="android.jpeg.orientation" kind="controls">
+        </clone>
+        <clone entry="android.jpeg.quality" kind="controls">
+        </clone>
+        <entry name="size" type="int32">
+          <description>The size of the compressed JPEG image, in
+          bytes</description>
+          <range>&amp;gt;= 0</range>
+          <details>If no JPEG output is produced for the request,
+          this must be 0.
+
+          Otherwise, this describes the real size of the compressed
+          JPEG image placed in the output stream.  More specifically,
+          if android.jpeg.maxSize = 1000000, and a specific capture
+          has android.jpeg.size = 500000, then the output buffer from
+          the JPEG stream will be 1000000 bytes, of which the first
+          500000 make up the real data.</details>
+          <tag id="FUTURE" />
+        </entry>
+        <clone entry="android.jpeg.thumbnailQuality"
+        kind="controls"></clone>
+        <clone entry="android.jpeg.thumbnailSize" kind="controls">
+        </clone>
+      </dynamic>
+    </section>
+    <section name="lens">
+      <controls>
+        <entry name="aperture" type="float" visibility="public" hwlevel="full">
+          <description>The desired lens aperture size, as a ratio of lens focal length to the
+          effective aperture diameter.</description>
+          <units>The f-number (f/N)</units>
+          <range>android.lens.info.availableApertures</range>
+          <details>Setting this value is only supported on the camera devices that have a variable
+          aperture lens.
+
+          When this is supported and android.control.aeMode is OFF,
+          this can be set along with android.sensor.exposureTime,
+          android.sensor.sensitivity, and android.sensor.frameDuration
+          to achieve manual exposure control.
+
+          The requested aperture value may take several frames to reach the
+          requested value; the camera device will report the current (intermediate)
+          aperture size in capture result metadata while the aperture is changing.
+          While the aperture is still changing, android.lens.state will be set to MOVING.
+
+          When this is supported and android.control.aeMode is one of
+          the ON modes, this will be overridden by the camera device
+          auto-exposure algorithm, the overridden values are then provided
+          back to the user in the corresponding result.</details>
+          <tag id="V1" />
+        </entry>
+        <entry name="filterDensity" type="float" visibility="public" hwlevel="full">
+          <description>
+          The desired setting for the lens neutral density filter(s).
+          </description>
+          <units>Exposure Value (EV)</units>
+          <range>android.lens.info.availableFilterDensities</range>
+          <details>
+          This control will not be supported on most camera devices.
+
+          Lens filters are typically used to lower the amount of light the
+          sensor is exposed to (measured in steps of EV). As used here, an EV
+          step is the standard logarithmic representation, which are
+          non-negative, and inversely proportional to the amount of light
+          hitting the sensor.  For example, setting this to 0 would result
+          in no reduction of the incoming light, and setting this to 2 would
+          mean that the filter is set to reduce incoming light by two stops
+          (allowing 1/4 of the prior amount of light to the sensor).
+
+          It may take several frames before the lens filter density changes
+          to the requested value. While the filter density is still changing,
+          android.lens.state will be set to MOVING.
+          </details>
+          <tag id="V1" />
+        </entry>
+        <entry name="focalLength" type="float" visibility="public" hwlevel="legacy">
+          <description>
+          The desired lens focal length; used for optical zoom.
+          </description>
+          <units>Millimeters</units>
+          <range>android.lens.info.availableFocalLengths</range>
+          <details>
+          This setting controls the physical focal length of the camera
+          device's lens. Changing the focal length changes the field of
+          view of the camera device, and is usually used for optical zoom.
+
+          Like android.lens.focusDistance and android.lens.aperture, this
+          setting won't be applied instantaneously, and it may take several
+          frames before the lens can change to the requested focal length.
+          While the focal length is still changing, android.lens.state will
+          be set to MOVING.
+
+          Optical zoom will not be supported on most devices.
+          </details>
+          <tag id="V1" />
+        </entry>
+        <entry name="focusDistance" type="float" visibility="public" hwlevel="full">
+          <description>Desired distance to plane of sharpest focus,
+          measured from frontmost surface of the lens.</description>
+          <units>See android.lens.info.focusDistanceCalibration for details</units>
+          <range>&amp;gt;= 0</range>
+          <details>
+          This control can be used for setting manual focus, on devices that support
+          the MANUAL_SENSOR capability and have a variable-focus lens (see
+          android.lens.info.minimumFocusDistance).
+
+          A value of `0.0f` means infinity focus. The value set will be clamped to
+          `[0.0f, android.lens.info.minimumFocusDistance]`.
+
+          Like android.lens.focalLength, this setting won't be applied
+          instantaneously, and it may take several frames before the lens
+          can move to the requested focus distance. While the lens is still moving,
+          android.lens.state will be set to MOVING.
+
+          LEGACY devices support at most setting this to `0.0f`
+          for infinity focus.
+          </details>
+          <tag id="BC" />
+          <tag id="V1" />
+        </entry>
+        <entry name="opticalStabilizationMode" type="byte" visibility="public"
+        enum="true" hwlevel="limited">
+          <enum>
+            <value>OFF
+              <notes>Optical stabilization is unavailable.</notes>
+            </value>
+            <value optional="true">ON
+              <notes>Optical stabilization is enabled.</notes>
+            </value>
+          </enum>
+          <description>
+          Sets whether the camera device uses optical image stabilization (OIS)
+          when capturing images.
+          </description>
+          <range>android.lens.info.availableOpticalStabilization</range>
+          <details>
+          OIS is used to compensate for motion blur due to small
+          movements of the camera during capture. Unlike digital image
+          stabilization (android.control.videoStabilizationMode), OIS
+          makes use of mechanical elements to stabilize the camera
+          sensor, and thus allows for longer exposure times before
+          camera shake becomes apparent.
+
+          Switching between different optical stabilization modes may take several
+          frames to initialize, the camera device will report the current mode in
+          capture result metadata. For example, When "ON" mode is requested, the
+          optical stabilization modes in the first several capture results may still
+          be "OFF", and it will become "ON" when the initialization is done.
+
+          If a camera device supports both OIS and digital image stabilization
+          (android.control.videoStabilizationMode), turning both modes on may produce undesirable
+          interaction, so it is recommended not to enable both at the same time.
+
+          Not all devices will support OIS; see
+          android.lens.info.availableOpticalStabilization for
+          available controls.
+          </details>
+          <tag id="V1" />
+        </entry>
+      </controls>
+      <static>
+        <namespace name="info">
+          <entry name="availableApertures" type="float" visibility="public"
+          container="array" hwlevel="full">
+            <array>
+              <size>n</size>
+            </array>
+            <description>List of aperture size values for android.lens.aperture that are
+            supported by this camera device.</description>
+            <units>The aperture f-number</units>
+            <details>If the camera device doesn't support a variable lens aperture,
+            this list will contain only one value, which is the fixed aperture size.
+
+            If the camera device supports a variable aperture, the aperture values
+            in this list will be sorted in ascending order.</details>
+            <tag id="V1" />
+          </entry>
+          <entry name="availableFilterDensities" type="float" visibility="public"
+          container="array" hwlevel="full">
+            <array>
+              <size>n</size>
+            </array>
+            <description>
+            List of neutral density filter values for
+            android.lens.filterDensity that are supported by this camera device.
+            </description>
+            <units>Exposure value (EV)</units>
+            <range>
+            Values are &amp;gt;= 0
+            </range>
+            <details>
+            If a neutral density filter is not supported by this camera device,
+            this list will contain only 0. Otherwise, this list will include every
+            filter density supported by the camera device, in ascending order.
+            </details>
+            <tag id="V1" />
+          </entry>
+          <entry name="availableFocalLengths" type="float" visibility="public"
+          type_notes="The list of available focal lengths"
+          container="array" hwlevel="legacy">
+            <array>
+              <size>n</size>
+            </array>
+            <description>
+            List of focal lengths for android.lens.focalLength that are supported by this camera
+            device.
+            </description>
+            <units>Millimeters</units>
+            <range>
+            Values are &amp;gt; 0
+            </range>
+            <details>
+            If optical zoom is not supported, this list will only contain
+            a single value corresponding to the fixed focal length of the
+            device. Otherwise, this list will include every focal length supported
+            by the camera device, in ascending order.
+            </details>
+            <tag id="BC" />
+            <tag id="V1" />
+          </entry>
+          <entry name="availableOpticalStabilization" type="byte"
+          visibility="public" type_notes="list of enums" container="array"
+          typedef="enumList" hwlevel="limited">
+            <array>
+              <size>n</size>
+            </array>
+            <description>
+            List of optical image stabilization (OIS) modes for
+            android.lens.opticalStabilizationMode that are supported by this camera device.
+            </description>
+            <range>Any value listed in android.lens.opticalStabilizationMode</range>
+            <details>
+            If OIS is not supported by a given camera device, this list will
+            contain only OFF.
+            </details>
+            <tag id="V1" />
+          </entry>
+          <entry name="hyperfocalDistance" type="float" visibility="public" optional="true"
+                 hwlevel="limited">
+            <description>Hyperfocal distance for this lens.</description>
+            <units>See android.lens.info.focusDistanceCalibration for details</units>
+            <range>If lens is fixed focus, &amp;gt;= 0. If lens has focuser unit, the value is
+            within `(0.0f, android.lens.info.minimumFocusDistance]`</range>
+            <details>
+            If the lens is not fixed focus, the camera device will report this
+            field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.
+            </details>
+          </entry>
+          <entry name="minimumFocusDistance" type="float" visibility="public" optional="true"
+                 hwlevel="limited">
+            <description>Shortest distance from frontmost surface
+            of the lens that can be brought into sharp focus.</description>
+            <units>See android.lens.info.focusDistanceCalibration for details</units>
+            <range>&amp;gt;= 0</range>
+            <details>If the lens is fixed-focus, this will be
+            0.</details>
+            <hal_details>Mandatory for FULL devices; LIMITED devices
+            must always set this value to 0 for fixed-focus; and may omit
+            the minimum focus distance otherwise.
+
+            This field is also mandatory for all devices advertising
+            the MANUAL_SENSOR capability.</hal_details>
+            <tag id="V1" />
+          </entry>
+          <entry name="shadingMapSize" type="int32" visibility="hidden"
+                 type_notes="width and height (N, M) of lens shading map provided by the camera device."
+                 container="array" typedef="size" hwlevel="full">
+            <array>
+              <size>2</size>
+            </array>
+            <description>Dimensions of lens shading map.</description>
+            <range>Both values &amp;gt;= 1</range>
+            <details>
+            The map should be on the order of 30-40 rows and columns, and
+            must be smaller than 64x64.
+            </details>
+            <tag id="V1" />
+          </entry>
+          <entry name="focusDistanceCalibration" type="byte" visibility="public"
+                 enum="true" hwlevel="limited">
+            <enum>
+              <value>UNCALIBRATED
+                <notes>
+                The lens focus distance is not accurate, and the units used for
+                android.lens.focusDistance do not correspond to any physical units.
+
+                Setting the lens to the same focus distance on separate occasions may
+                result in a different real focus distance, depending on factors such
+                as the orientation of the device, the age of the focusing mechanism,
+                and the device temperature. The focus distance value will still be
+                in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0
+                represents the farthest focus.
+                </notes>
+              </value>
+              <value>APPROXIMATE
+                <notes>
+                The lens focus distance is measured in diopters.
+
+                However, setting the lens to the same focus distance
+                on separate occasions may result in a different real
+                focus distance, depending on factors such as the
+                orientation of the device, the age of the focusing
+                mechanism, and the device temperature.
+                </notes>
+              </value>
+              <value>CALIBRATED
+                <notes>
+                The lens focus distance is measured in diopters, and
+                is calibrated.
+
+                The lens mechanism is calibrated so that setting the
+                same focus distance is repeatable on multiple
+                occasions with good accuracy, and the focus distance
+                corresponds to the real physical distance to the plane
+                of best focus.
+                </notes>
+              </value>
+            </enum>
+            <description>The lens focus distance calibration quality.</description>
+            <details>
+            The lens focus distance calibration quality determines the reliability of
+            focus related metadata entries, i.e. android.lens.focusDistance,
+            android.lens.focusRange, android.lens.info.hyperfocalDistance, and
+            android.lens.info.minimumFocusDistance.
+
+            APPROXIMATE and CALIBRATED devices report the focus metadata in
+            units of diopters (1/meter), so `0.0f` represents focusing at infinity,
+            and increasing positive numbers represent focusing closer and closer
+            to the camera device. The focus distance control also uses diopters
+            on these devices.
+
+            UNCALIBRATED devices do not use units that are directly comparable
+            to any real physical measurement, but `0.0f` still represents farthest
+            focus, and android.lens.info.minimumFocusDistance represents the
+            nearest focus the device can achieve.
+            </details>
+            <hal_details>
+            For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity
+            focus) must work. When autofocus is disabled (android.control.afMode == OFF)
+            and the lens focus distance is set to 0 diopters
+            (android.lens.focusDistance == 0), the lens will move to focus at infinity
+            and is stably focused at infinity even if the device tilts. It may take the
+            lens some time to move; during the move the lens state should be MOVING and
+            the output diopter value should be changing toward 0.
+            </hal_details>
+          <tag id="V1" />
+        </entry>
+        </namespace>
+        <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy">
+          <enum>
+            <value>FRONT
+            <notes>
+              The camera device faces the same direction as the device's screen.
+            </notes></value>
+            <value>BACK
+            <notes>
+              The camera device faces the opposite direction as the device's screen.
+            </notes></value>
+            <value>EXTERNAL
+            <notes>
+              The camera device is an external camera, and has no fixed facing relative to the
+              device's screen.
+            </notes></value>
+          </enum>
+          <description>Direction the camera faces relative to
+          device screen.</description>
+        </entry>
+        <entry name="poseRotation" type="float" visibility="public"
+               container="array">
+          <array>
+            <size>4</size>
+          </array>
+          <description>
+            The orientation of the camera relative to the sensor
+            coordinate system.
+          </description>
+          <units>
+            Quaternion coefficients
+          </units>
+          <details>
+            The four coefficients that describe the quaternion
+            rotation from the Android sensor coordinate system to a
+            camera-aligned coordinate system where the X-axis is
+            aligned with the long side of the image sensor, the Y-axis
+            is aligned with the short side of the image sensor, and
+            the Z-axis is aligned with the optical axis of the sensor.
+
+            To convert from the quaternion coefficients `(x,y,z,w)`
+            to the axis of rotation `(a_x, a_y, a_z)` and rotation
+            amount `theta`, the following formulas can be used:
+
+                 theta = 2 * acos(w)
+                a_x = x / sin(theta/2)
+                a_y = y / sin(theta/2)
+                a_z = z / sin(theta/2)
+
+            To create a 3x3 rotation matrix that applies the rotation
+            defined by this quaternion, the following matrix can be
+            used:
+
+                R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
+                           2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
+                           2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
+
+             This matrix can then be used to apply the rotation to a
+             column vector point with
+
+               `p' = Rp`
+
+             where `p` is in the device sensor coordinate system, and
+             `p'` is in the camera-oriented coordinate system.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+        <entry name="poseTranslation" type="float" visibility="public"
+               container="array">
+          <array>
+            <size>3</size>
+          </array>
+          <description>Position of the camera optical center.</description>
+          <units>Meters</units>
+          <details>
+            The position of the camera device's lens optical center,
+            as a three-dimensional vector `(x,y,z)`, relative to the
+            optical center of the largest camera device facing in the
+            same direction as this camera, in the {@link
+            android.hardware.SensorEvent Android sensor coordinate
+            axes}. Note that only the axis definitions are shared with
+            the sensor coordinate system, but not the origin.
+
+            If this device is the largest or only camera device with a
+            given facing, then this position will be `(0, 0, 0)`; a
+            camera device with a lens optical center located 3 cm from
+            the main sensor along the +X axis (to the right from the
+            user's perspective) will report `(0.03, 0, 0)`.
+
+            To transform a pixel coordinates between two cameras
+            facing the same direction, first the source camera
+            android.lens.radialDistortion must be corrected for.  Then
+            the source camera android.lens.intrinsicCalibration needs
+            to be applied, followed by the android.lens.poseRotation
+            of the source camera, the translation of the source camera
+            relative to the destination camera, the
+            android.lens.poseRotation of the destination camera, and
+            finally the inverse of android.lens.intrinsicCalibration
+            of the destination camera. This obtains a
+            radial-distortion-free coordinate in the destination
+            camera pixel coordinates.
+
+            To compare this against a real image from the destination
+            camera, the destination camera image then needs to be
+            corrected for radial distortion before comparison or
+            sampling.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.lens.aperture" kind="controls">
+          <tag id="V1" />
+        </clone>
+        <clone entry="android.lens.filterDensity" kind="controls">
+          <tag id="V1" />
+        </clone>
+        <clone entry="android.lens.focalLength" kind="controls">
+          <tag id="BC" />
+        </clone>
+        <clone entry="android.lens.focusDistance" kind="controls">
+          <details>Should be zero for fixed-focus cameras</details>
+          <tag id="BC" />
+        </clone>
+        <entry name="focusRange" type="float" visibility="public"
+        type_notes="Range of scene distances that are in focus"
+        container="array" typedef="pairFloatFloat" hwlevel="limited">
+          <array>
+            <size>2</size>
+          </array>
+          <description>The range of scene distances that are in
+          sharp focus (depth of field).</description>
+          <units>A pair of focus distances in diopters: (near,
+          far); see android.lens.info.focusDistanceCalibration for details.</units>
+          <range>&amp;gt;=0</range>
+          <details>If variable focus not supported, can still report
+          fixed depth of field range</details>
+          <tag id="BC" />
+        </entry>
+        <clone entry="android.lens.opticalStabilizationMode"
+        kind="controls">
+          <tag id="V1" />
+        </clone>
+        <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited">
+          <enum>
+            <value>STATIONARY
+              <notes>
+              The lens parameters (android.lens.focalLength, android.lens.focusDistance,
+              android.lens.filterDensity and android.lens.aperture) are not changing.
+              </notes>
+            </value>
+            <value>MOVING
+              <notes>
+              One or several of the lens parameters
+              (android.lens.focalLength, android.lens.focusDistance,
+              android.lens.filterDensity or android.lens.aperture) is
+              currently changing.
+              </notes>
+            </value>
+          </enum>
+          <description>Current lens status.</description>
+          <details>
+          For lens parameters android.lens.focalLength, android.lens.focusDistance,
+          android.lens.filterDensity and android.lens.aperture, when changes are requested,
+          they may take several frames to reach the requested values. This state indicates
+          the current status of the lens parameters.
+
+          When the state is STATIONARY, the lens parameters are not changing. This could be
+          either because the parameters are all fixed, or because the lens has had enough
+          time to reach the most recently-requested values.
+          If all these lens parameters are not changable for a camera device, as listed below:
+
+          * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means
+          android.lens.focusDistance parameter will always be 0.
+          * Fixed focal length (android.lens.info.availableFocalLengths contains single value),
+          which means the optical zoom is not supported.
+          * No ND filter (android.lens.info.availableFilterDensities contains only 0).
+          * Fixed aperture (android.lens.info.availableApertures contains single value).
+
+          Then this state will always be STATIONARY.
+
+          When the state is MOVING, it indicates that at least one of the lens parameters
+          is changing.
+          </details>
+          <tag id="V1" />
+        </entry>
+        <clone entry="android.lens.poseRotation" kind="static">
+        </clone>
+        <clone entry="android.lens.poseTranslation" kind="static">
+        </clone>
+      </dynamic>
+      <static>
+        <entry name="intrinsicCalibration" type="float" visibility="public"
+               container="array">
+          <array>
+            <size>5</size>
+          </array>
+          <description>
+            The parameters for this camera device's intrinsic
+            calibration.
+          </description>
+          <units>
+            Pixels in the
+            android.sensor.info.preCorrectionActiveArraySize
+            coordinate system.
+          </units>
+          <details>
+            The five calibration parameters that describe the
+            transform from camera-centric 3D coordinates to sensor
+            pixel coordinates:
+
+                [f_x, f_y, c_x, c_y, s]
+
+            Where `f_x` and `f_y` are the horizontal and vertical
+            focal lengths, `[c_x, c_y]` is the position of the optical
+            axis, and `s` is a skew parameter for the sensor plane not
+            being aligned with the lens plane.
+
+            These are typically used within a transformation matrix K:
+
+                K = [ f_x,   s, c_x,
+                       0, f_y, c_y,
+                       0    0,   1 ]
+
+            which can then be combined with the camera pose rotation
+            `R` and translation `t` (android.lens.poseRotation and
+            android.lens.poseTranslation, respective) to calculate the
+            complete transform from world coordinates to pixel
+            coordinates:
+
+                P = [ K 0   * [ R t
+                     0 1 ]     0 1 ]
+
+            and with `p_w` being a point in the world coordinate system
+            and `p_s` being a point in the camera active pixel array
+            coordinate system, and with the mapping including the
+            homogeneous division by z:
+
+                 p_h = (x_h, y_h, z_h) = P p_w
+                p_s = p_h / z_h
+
+            so `[x_s, y_s]` is the pixel coordinates of the world
+            point, `z_s = 1`, and `w_s` is a measurement of disparity
+            (depth) in pixel coordinates.
+
+            Note that the coordinate system for this transform is the
+            android.sensor.info.preCorrectionActiveArraySize system,
+            where `(0,0)` is the top-left of the
+            preCorrectionActiveArraySize rectangle. Once the pose and
+            intrinsic calibration transforms have been applied to a
+            world point, then the android.lens.radialDistortion
+            transform needs to be applied, and the result adjusted to
+            be in the android.sensor.info.activeArraySize coordinate
+            system (where `(0, 0)` is the top-left of the
+            activeArraySize rectangle), to determine the final pixel
+            coordinate of the world point for processed (non-RAW)
+            output buffers.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+        <entry name="radialDistortion" type="float" visibility="public"
+               container="array">
+          <array>
+            <size>6</size>
+          </array>
+          <description>
+            The correction coefficients to correct for this camera device's
+            radial and tangential lens distortion.
+          </description>
+          <units>
+            Unitless coefficients.
+          </units>
+          <details>
+            Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2,
+            kappa_3]` and two tangential distortion coefficients
+            `[kappa_4, kappa_5]` that can be used to correct the
+            lens's geometric distortion with the mapping equations:
+
+                 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+                       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
+                 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+                       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
+
+            Here, `[x_c, y_c]` are the coordinates to sample in the
+            input image that correspond to the pixel values in the
+            corrected image at the coordinate `[x_i, y_i]`:
+
+                 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
+
+            The pixel coordinates are defined in a normalized
+            coordinate system related to the
+            android.lens.intrinsicCalibration calibration fields.
+            Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the
+            lens optical center `[c_x, c_y]`. The maximum magnitudes
+            of both x and y coordinates are normalized to be 1 at the
+            edge further from the optical center, so the range
+            for both dimensions is `-1 &lt;= x &lt;= 1`.
+
+            Finally, `r` represents the radial distance from the
+            optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude
+            is therefore no larger than `|r| &lt;= sqrt(2)`.
+
+            The distortion model used is the Brown-Conrady model.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.lens.intrinsicCalibration" kind="static">
+        </clone>
+        <clone entry="android.lens.radialDistortion" kind="static">
+        </clone>
+      </dynamic>
+    </section>
+    <section name="noiseReduction">
+      <controls>
+        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+          <enum>
+            <value>OFF
+            <notes>No noise reduction is applied.</notes></value>
+            <value>FAST
+            <notes>Noise reduction is applied without reducing frame rate relative to sensor
+            output. It may be the same as OFF if noise reduction will reduce frame rate
+            relative to sensor.</notes></value>
+            <value>HIGH_QUALITY
+            <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame
+            rate relative to sensor output.</notes></value>
+            <value optional="true">MINIMAL
+            <notes>MINIMAL noise reduction is applied without reducing frame rate relative to
+            sensor output. </notes></value>
+            <value optional="true">ZERO_SHUTTER_LAG
+
+            <notes>Noise reduction is applied at different levels for different output streams,
+            based on resolution. Streams at maximum recording resolution (see {@link
+            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
+            reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
+            noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
+            for low-resolution streams is tuned so that frame rate is not impacted, and the quality
+            is equal to or better than FAST (since it is only applied to lower-resolution outputs,
+            quality may improve from FAST).
+
+            This mode is intended to be used by applications operating in a zero-shutter-lag mode
+            with YUV or PRIVATE reprocessing, where the application continuously captures
+            high-resolution intermediate buffers into a circular buffer, from which a final image is
+            produced via reprocessing when a user takes a picture.  For such a use case, the
+            high-resolution buffers must not have noise reduction applied to maximize efficiency of
+            preview and to avoid over-applying noise filtering when reprocessing, while
+            low-resolution buffers (used for recording or preview, generally) need noise reduction
+            applied for reasonable preview quality.
+
+            This mode is guaranteed to be supported by devices that support either the
+            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+            (android.request.availableCapabilities lists either of those capabilities) and it will
+            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
+            </notes></value>
+          </enum>
+          <description>Mode of operation for the noise reduction algorithm.</description>
+          <range>android.noiseReduction.availableNoiseReductionModes</range>
+          <details>The noise reduction algorithm attempts to improve image quality by removing
+          excessive noise added by the capture process, especially in dark conditions.
+
+          OFF means no noise reduction will be applied by the camera device, for both raw and
+          YUV domain.
+
+          MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
+          demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
+          This mode is optional, may not be support by all devices. The application should check
+          android.noiseReduction.availableNoiseReductionModes before using it.
+
+          FAST/HIGH_QUALITY both mean camera device determined noise filtering
+          will be applied. HIGH_QUALITY mode indicates that the camera device
+          will use the highest-quality noise filtering algorithms,
+          even if it slows down capture rate. FAST means the camera device will not
+          slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
+          MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
+          Every output stream will have a similar amount of enhancement applied.
+
+          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+          buffer of high-resolution images during preview and reprocess image(s) from that buffer
+          into a final capture when triggered by the user. In this mode, the camera device applies
+          noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+          preview quality, but does not apply noise reduction to high-resolution streams, since
+          those will be reprocessed later if necessary.
+
+          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
+          will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
+          may adjust the noise reduction parameters for best image quality based on the
+          android.reprocess.effectiveExposureFactor if it is set.
+          </details>
+          <hal_details>
+          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
+          adjust the internal noise reduction parameters appropriately to get the best quality
+          images.
+          </hal_details>
+          <tag id="V1" />
+          <tag id="REPROC" />
+        </entry>
+        <entry name="strength" type="byte">
+          <description>Control the amount of noise reduction
+          applied to the images</description>
+          <units>1-10; 10 is max noise reduction</units>
+          <range>1 - 10</range>
+          <tag id="FUTURE" />
+        </entry>
+      </controls>
+      <static>
+        <entry name="availableNoiseReductionModes" type="byte" visibility="public"
+        type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of noise reduction modes for android.noiseReduction.mode that are supported
+          by this camera device.
+          </description>
+          <range>Any value listed in android.noiseReduction.mode</range>
+          <details>
+          Full-capability camera devices will always support OFF and FAST.
+
+          Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
+          ZERO_SHUTTER_LAG.
+
+          Legacy-capability camera devices will only support FAST mode.
+          </details>
+          <hal_details>
+          HAL must support both FAST and HIGH_QUALITY if noise reduction control is available
+          on the camera device, but the underlying implementation can be the same for both modes.
+          That is, if the highest quality implementation on the camera device does not slow down
+          capture rate, then FAST and HIGH_QUALITY will generate the same output.
+          </hal_details>
+          <tag id="V1" />
+          <tag id="REPROC" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.noiseReduction.mode" kind="controls">
+          <tag id="V1" />
+          <tag id="REPROC" />
+        </clone>
+      </dynamic>
+    </section>
+    <section name="quirks">
+      <static>
+        <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true">
+          <description>If set to 1, the camera service does not
+          scale 'normalized' coordinates with respect to the crop
+          region. This applies to metering input (a{e,f,wb}Region
+          and output (face rectangles).</description>
+          <details>Normalized coordinates refer to those in the
+          (-1000,1000) range mentioned in the
+          android.hardware.Camera API.
+
+          HAL implementations should instead always use and emit
+          sensor array-relative coordinates for all region data. Does
+          not need to be listed in static metadata. Support will be
+          removed in future versions of camera service.</details>
+        </entry>
+        <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true">
+          <description>If set to 1, then the camera service always
+          switches to FOCUS_MODE_AUTO before issuing a AF
+          trigger.</description>
+          <details>HAL implementations should implement AF trigger
+          modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
+          CONTINUOUS_PICTURE modes instead of using this flag. Does
+          not need to be listed in static metadata. Support will be
+          removed in future versions of camera service</details>
+        </entry>
+        <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true">
+          <description>If set to 1, the camera service uses
+          CAMERA2_PIXEL_FORMAT_ZSL instead of
+          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
+          shutter lag stream</description>
+          <details>HAL implementations should use gralloc usage flags
+          to determine that a stream will be used for
+          zero-shutter-lag, instead of relying on an explicit
+          format setting. Does not need to be listed in static
+          metadata. Support will be removed in future versions of
+          camera service.</details>
+        </entry>
+        <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true">
+          <description>
+          If set to 1, the HAL will always split result
+          metadata for a single capture into multiple buffers,
+          returned using multiple process_capture_result calls.
+          </description>
+          <details>
+          Does not need to be listed in static
+          metadata. Support for partial results will be reworked in
+          future versions of camera service. This quirk will stop
+          working at that point; DO NOT USE without careful
+          consideration of future support.
+          </details>
+          <hal_details>
+          Refer to `camera3_capture_result::partial_result`
+          for information on how to implement partial results.
+          </hal_details>
+        </entry>
+      </static>
+      <dynamic>
+        <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean">
+          <enum>
+            <value>FINAL
+            <notes>The last or only metadata result buffer
+            for this capture.</notes>
+            </value>
+            <value>PARTIAL
+            <notes>A partial buffer of result metadata for this
+            capture. More result buffers for this capture will be sent
+            by the camera device, the last of which will be marked
+            FINAL.</notes>
+            </value>
+          </enum>
+          <description>
+          Whether a result given to the framework is the
+          final one for the capture, or only a partial that contains a
+          subset of the full set of dynamic metadata
+          values.</description>
+          <range>Optional. Default value is FINAL.</range>
+          <details>
+          The entries in the result metadata buffers for a
+          single capture may not overlap, except for this entry. The
+          FINAL buffers must retain FIFO ordering relative to the
+          requests that generate them, so the FINAL buffer for frame 3 must
+          always be sent to the framework after the FINAL buffer for frame 2, and
+          before the FINAL buffer for frame 4. PARTIAL buffers may be returned
+          in any order relative to other frames, but all PARTIAL buffers for a given
+          capture must arrive before the FINAL buffer for that capture. This entry may
+          only be used by the camera device if quirks.usePartialResult is set to 1.
+          </details>
+          <hal_details>
+          Refer to `camera3_capture_result::partial_result`
+          for information on how to implement partial results.
+          </hal_details>
+        </entry>
+      </dynamic>
+    </section>
+    <section name="request">
+      <controls>
+        <entry name="frameCount" type="int32" visibility="system" deprecated="true">
+          <description>A frame counter set by the framework. Must
+          be maintained unchanged in output frame. This value monotonically
+          increases with every new result (that is, each new result has a unique
+          frameCount value).
+          </description>
+          <units>incrementing integer</units>
+          <range>Any int.</range>
+        </entry>
+        <entry name="id" type="int32" visibility="hidden">
+          <description>An application-specified ID for the current
+          request. Must be maintained unchanged in output
+          frame</description>
+          <units>arbitrary integer assigned by application</units>
+          <range>Any int</range>
+          <tag id="V1" />
+        </entry>
+        <entry name="inputStreams" type="int32" visibility="system" deprecated="true"
+               container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>List which camera reprocess stream is used
+          for the source of reprocessing data.</description>
+          <units>List of camera reprocess stream IDs</units>
+          <range>
+          Typically, only one entry allowed, must be a valid reprocess stream ID.
+          </range>
+          <details>Only meaningful when android.request.type ==
+          REPROCESS. Ignored otherwise</details>
+          <tag id="HAL2" />
+        </entry>
+        <entry name="metadataMode" type="byte" visibility="system"
+               enum="true">
+          <enum>
+            <value>NONE
+            <notes>No metadata should be produced on output, except
+            for application-bound buffer data. If no
+            application-bound streams exist, no frame should be
+            placed in the output frame queue. If such streams
+            exist, a frame should be placed on the output queue
+            with null metadata but with the necessary output buffer
+            information. Timestamp information should still be
+            included with any output stream buffers</notes></value>
+            <value>FULL
+            <notes>All metadata should be produced. Statistics will
+            only be produced if they are separately
+            enabled</notes></value>
+          </enum>
+          <description>How much metadata to produce on
+          output</description>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="outputStreams" type="int32" visibility="system" deprecated="true"
+               container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>Lists which camera output streams image data
+          from this capture must be sent to</description>
+          <units>List of camera stream IDs</units>
+          <range>List must only include streams that have been
+          created</range>
+          <details>If no output streams are listed, then the image
+          data should simply be discarded. The image data must
+          still be captured for metadata and statistics production,
+          and the lens and flash must operate as requested.</details>
+          <tag id="HAL2" />
+        </entry>
+        <entry name="type" type="byte" visibility="system" deprecated="true" enum="true">
+          <enum>
+            <value>CAPTURE
+            <notes>Capture a new image from the imaging hardware,
+            and process it according to the
+            settings</notes></value>
+            <value>REPROCESS
+            <notes>Process previously captured data; the
+            android.request.inputStreams parameter determines the
+            source reprocessing stream. TODO: Mark dynamic metadata
+            needed for reprocessing with [RP]</notes></value>
+          </enum>
+          <description>The type of the request; either CAPTURE or
+          REPROCESS. For HAL3, this tag is redundant.
+          </description>
+          <tag id="HAL2" />
+        </entry>
+      </controls>
+      <static>
+        <entry name="maxNumOutputStreams" type="int32" visibility="hidden"
+        container="array" hwlevel="legacy">
+          <array>
+            <size>3</size>
+          </array>
+          <description>The maximum numbers of different types of output streams
+          that can be configured and used simultaneously by a camera device.
+          </description>
+          <range>
+          For processed (and stalling) format streams, &amp;gt;= 1.
+
+          For Raw format (either stalling or non-stalling) streams, &amp;gt;= 0.
+
+          For processed (but not stalling) format streams, &amp;gt;= 3
+          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
+          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
+          </range>
+          <details>
+          This is a 3 element tuple that contains the max number of output simultaneous
+          streams for raw sensor, processed (but not stalling), and processed (and stalling)
+          formats respectively. For example, assuming that JPEG is typically a processed and
+          stalling stream, if max raw sensor format output stream number is 1, max YUV streams
+          number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`.
+
+          This lists the upper bound of the number of output streams supported by
+          the camera device. Using more streams simultaneously may require more hardware and
+          CPU resources that will consume more power. The image format for an output stream can
+          be any supported format provided by android.scaler.availableStreamConfigurations.
+          The formats defined in android.scaler.availableStreamConfigurations can be catergorized
+          into the 3 stream types as below:
+
+          * Processed (but stalling): any non-RAW format with a stallDurations &amp;gt; 0.
+            Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.
+          * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link
+            android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12
+            RAW12}.
+          * Processed (but not-stalling): any non-RAW format without a stall duration.
+            Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
+            {@link android.graphics.ImageFormat#NV21 NV21}, or
+            {@link android.graphics.ImageFormat#YV12 YV12}.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="maxNumOutputRaw" type="int32" visibility="public" synthetic="true" hwlevel="legacy">
+          <description>The maximum numbers of different types of output streams
+          that can be configured and used simultaneously by a camera device
+          for any `RAW` formats.
+          </description>
+          <range>
+          &amp;gt;= 0
+          </range>
+          <details>
+          This value contains the max number of output simultaneous
+          streams from the raw sensor.
+
+          This lists the upper bound of the number of output streams supported by
+          the camera device. Using more streams simultaneously may require more hardware and
+          CPU resources that will consume more power. The image format for this kind of an output stream can
+          be any `RAW` and supported format provided by android.scaler.streamConfigurationMap.
+
+          In particular, a `RAW` format is typically one of:
+
+          * {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}
+          * {@link android.graphics.ImageFormat#RAW10 RAW10}
+          * {@link android.graphics.ImageFormat#RAW12 RAW12}
+
+          LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
+          never support raw streams.
+          </details>
+        </entry>
+        <entry name="maxNumOutputProc" type="int32" visibility="public" synthetic="true" hwlevel="legacy">
+          <description>The maximum numbers of different types of output streams
+          that can be configured and used simultaneously by a camera device
+          for any processed (but not-stalling) formats.
+          </description>
+          <range>
+          &amp;gt;= 3
+          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
+          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
+          </range>
+          <details>
+          This value contains the max number of output simultaneous
+          streams for any processed (but not-stalling) formats.
+
+          This lists the upper bound of the number of output streams supported by
+          the camera device. Using more streams simultaneously may require more hardware and
+          CPU resources that will consume more power. The image format for this kind of an output stream can
+          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
+
+          Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
+          Typically:
+
+          * {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}
+          * {@link android.graphics.ImageFormat#NV21 NV21}
+          * {@link android.graphics.ImageFormat#YV12 YV12}
+          * Implementation-defined formats, i.e. {@link
+            android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)}
+
+          For full guarantees, query {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
+          processed format -- it will return 0 for a non-stalling stream.
+
+          LEGACY devices will support at least 2 processing/non-stalling streams.
+          </details>
+        </entry>
+        <entry name="maxNumOutputProcStalling" type="int32" visibility="public" synthetic="true" hwlevel="legacy">
+          <description>The maximum numbers of different types of output streams
+          that can be configured and used simultaneously by a camera device
+          for any processed (and stalling) formats.
+          </description>
+          <range>
+          &amp;gt;= 1
+          </range>
+          <details>
+          This value contains the max number of output simultaneous
+          streams for any processed (but not-stalling) formats.
+
+          This lists the upper bound of the number of output streams supported by
+          the camera device. Using more streams simultaneously may require more hardware and
+          CPU resources that will consume more power. The image format for this kind of an output stream can
+          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
+
+          A processed and stalling format is defined as any non-RAW format with a stallDurations
+          &amp;gt; 0.  Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a
+          stalling format.
+
+          For full guarantees, query {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
+          processed format -- it will return a non-0 value for a stalling stream.
+
+          LEGACY devices will support up to 1 processing/stalling stream.
+          </details>
+        </entry>
+        <entry name="maxNumReprocessStreams" type="int32" visibility="system"
+        deprecated="true" container="array">
+          <array>
+            <size>1</size>
+          </array>
+          <description>How many reprocessing streams of any type
+          can be allocated at the same time.</description>
+          <range>&amp;gt;= 0</range>
+          <details>
+          Only used by HAL2.x.
+
+          When set to 0, it means no reprocess stream is supported.
+          </details>
+          <tag id="HAL2" />
+        </entry>
+        <entry name="maxNumInputStreams" type="int32" visibility="public" hwlevel="full">
+          <description>
+          The maximum numbers of any type of input streams
+          that can be configured and used simultaneously by a camera device.
+          </description>
+          <range>
+          0 or 1.
+          </range>
+          <details>When set to 0, it means no input stream is supported.
+
+          The image format for a input stream can be any supported format returned by {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
+          input stream, there must be at least one output stream configured to to receive the
+          reprocessed images.
+
+          When an input stream and some output streams are used in a reprocessing request,
+          only the input buffer will be used to produce these output stream buffers, and a
+          new sensor image will not be captured.
+
+          For example, for Zero Shutter Lag (ZSL) still capture use case, the input
+          stream image format will be PRIVATE, the associated output stream image format
+          should be JPEG.
+          </details>
+          <hal_details>
+          For the reprocessing flow and controls, see
+          hardware/libhardware/include/hardware/camera3.h Section 10 for more details.
+          </hal_details>
+          <tag id="REPROC" />
+        </entry>
+      </static>
+      <dynamic>
+        <entry name="frameCount" type="int32" visibility="hidden" deprecated="true">
+          <description>A frame counter set by the framework. This value monotonically
+          increases with every new result (that is, each new result has a unique
+          frameCount value).</description>
+          <units>count of frames</units>
+          <range>&amp;gt; 0</range>
+          <details>Reset on release()</details>
+        </entry>
+        <clone entry="android.request.id" kind="controls"></clone>
+        <clone entry="android.request.metadataMode"
+        kind="controls"></clone>
+        <clone entry="android.request.outputStreams"
+        kind="controls"></clone>
+        <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy">
+          <description>Specifies the number of pipeline stages the frame went
+          through from when it was exposed to when the final completed result
+          was available to the framework.</description>
+          <range>&amp;lt;= android.request.pipelineMaxDepth</range>
+          <details>Depending on what settings are used in the request, and
+          what streams are configured, the data may undergo less processing,
+          and some pipeline stages skipped.
+
+          See android.request.pipelineMaxDepth for more details.
+          </details>
+          <hal_details>
+          This value must always represent the accurate count of how many
+          pipeline stages were actually used.
+          </hal_details>
+        </entry>
+      </dynamic>
+      <static>
+        <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy">
+          <description>Specifies the number of maximum pipeline stages a frame
+          has to go through from when it's exposed to when it's available
+          to the framework.</description>
+          <details>A typical minimum value for this is 2 (one stage to expose,
+          one stage to readout) from the sensor. The ISP then usually adds
+          its own stages to do custom HW processing. Further stages may be
+          added by SW processing.
+
+          Depending on what settings are used (e.g. YUV, JPEG) and what
+          processing is enabled (e.g. face detection), the actual pipeline
+          depth (specified by android.request.pipelineDepth) may be less than
+          the max pipeline depth.
+
+          A pipeline depth of X stages is equivalent to a pipeline latency of
+          X frame intervals.
+
+          This value will normally be 8 or less, however, for high speed capture session,
+          the max pipeline depth will be up to 8 x size of high speed capture request list.
+          </details>
+          <hal_details>
+          This value should be 4 or less, expect for the high speed recording session, where the
+          max batch sizes may be larger than 1.
+          </hal_details>
+        </entry>
+        <entry name="partialResultCount" type="int32" visibility="public" optional="true">
+          <description>Defines how many sub-components
+          a result will be composed of.
+          </description>
+          <range>&amp;gt;= 1</range>
+          <details>In order to combat the pipeline latency, partial results
+          may be delivered to the application layer from the camera device as
+          soon as they are available.
+
+          Optional; defaults to 1. A value of 1 means that partial
+          results are not supported, and only the final TotalCaptureResult will
+          be produced by the camera device.
+
+          A typical use case for this might be: after requesting an
+          auto-focus (AF) lock the new AF state might be available 50%
+          of the way through the pipeline.  The camera device could
+          then immediately dispatch this state via a partial result to
+          the application, and the rest of the metadata via later
+          partial results.
+          </details>
+        </entry>
+        <entry name="availableCapabilities" type="byte" visibility="public"
+          enum="true" container="array" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <enum>
+            <value>BACKWARD_COMPATIBLE
+              <notes>The minimal set of capabilities that every camera
+                device (regardless of android.info.supportedHardwareLevel)
+                supports.
+
+                This capability is listed by all normal devices, and
+                indicates that the camera device has a feature set
+                that's comparable to the baseline requirements for the
+                older android.hardware.Camera API.
+
+                Devices with the DEPTH_OUTPUT capability might not list this
+                capability, indicating that they support only depth measurement,
+                not standard color output.
+              </notes>
+            </value>
+            <value optional="true">MANUAL_SENSOR
+              <notes>
+              The camera device can be manually controlled (3A algorithms such
+              as auto-exposure, and auto-focus can be bypassed).
+              The camera device supports basic manual control of the sensor image
+              acquisition related stages. This means the following controls are
+              guaranteed to be supported:
+
+              * Manual frame duration control
+                  * android.sensor.frameDuration
+                  * android.sensor.info.maxFrameDuration
+              * Manual exposure control
+                  * android.sensor.exposureTime
+                  * android.sensor.info.exposureTimeRange
+              * Manual sensitivity control
+                  * android.sensor.sensitivity
+                  * android.sensor.info.sensitivityRange
+              * Manual lens control (if the lens is adjustable)
+                  * android.lens.*
+              * Manual flash control (if a flash unit is present)
+                  * android.flash.*
+              * Manual black level locking
+                  * android.blackLevel.lock
+              * Auto exposure lock
+                  * android.control.aeLock
+
+              If any of the above 3A algorithms are enabled, then the camera
+              device will accurately report the values applied by 3A in the
+              result.
+
+              A given camera device may also support additional manual sensor controls,
+              but this capability only covers the above list of controls.
+
+              If this is supported, android.scaler.streamConfigurationMap will
+              additionally return a min frame duration that is greater than
+              zero for each supported size-format combination.
+              </notes>
+            </value>
+            <value optional="true">MANUAL_POST_PROCESSING
+              <notes>
+              The camera device post-processing stages can be manually controlled.
+              The camera device supports basic manual control of the image post-processing
+              stages. This means the following controls are guaranteed to be supported:
+
+              * Manual tonemap control
+                  * android.tonemap.curve
+                  * android.tonemap.mode
+                  * android.tonemap.maxCurvePoints
+                  * android.tonemap.gamma
+                  * android.tonemap.presetCurve
+
+              * Manual white balance control
+                  * android.colorCorrection.transform
+                  * android.colorCorrection.gains
+              * Manual lens shading map control
+                    * android.shading.mode
+                    * android.statistics.lensShadingMapMode
+                    * android.statistics.lensShadingMap
+                    * android.lens.info.shadingMapSize
+              * Manual aberration correction control (if aberration correction is supported)
+                    * android.colorCorrection.aberrationMode
+                    * android.colorCorrection.availableAberrationModes
+              * Auto white balance lock
+                    * android.control.awbLock
+
+              If auto white balance is enabled, then the camera device
+              will accurately report the values applied by AWB in the result.
+
+              A given camera device may also support additional post-processing
+              controls, but this capability only covers the above list of controls.
+              </notes>
+            </value>
+            <value optional="true">RAW
+              <notes>
+              The camera device supports outputting RAW buffers and
+              metadata for interpreting them.
+
+              Devices supporting the RAW capability allow both for
+              saving DNG files, and for direct application processing of
+              raw sensor images.
+
+              * RAW_SENSOR is supported as an output format.
+              * The maximum available resolution for RAW_SENSOR streams
+                will match either the value in
+                android.sensor.info.pixelArraySize or
+                android.sensor.info.preCorrectionActiveArraySize.
+              * All DNG-related optional metadata entries are provided
+                by the camera device.
+              </notes>
+            </value>
+            <value optional="true">PRIVATE_REPROCESSING
+              <notes>
+              The camera device supports the Zero Shutter Lag reprocessing use case.
+
+              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
+              * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format,
+                that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of
+                formats returned by {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
+              * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
+                returns non empty int[] for each supported input format returned by {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
+              * Each size returned by {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
+                getInputSizes(ImageFormat.PRIVATE)} is also included in {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
+                getOutputSizes(ImageFormat.PRIVATE)}
+              * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop
+                relative to the sensor's maximum capture rate (at that resolution).
+              * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both
+                {@link android.graphics.ImageFormat#YUV_420_888} and
+                {@link android.graphics.ImageFormat#JPEG} formats.
+              * The maximum available resolution for PRIVATE streams
+                (both input/output) will match the maximum available
+                resolution of JPEG streams.
+              * Static metadata android.reprocess.maxCaptureStall.
+              * Only below controls are effective for reprocessing requests and
+                will be present in capture results, other controls in reprocess
+                requests will be ignored by the camera device.
+                    * android.jpeg.*
+                    * android.noiseReduction.mode
+                    * android.edge.mode
+              * android.noiseReduction.availableNoiseReductionModes and
+                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
+              </notes>
+            </value>
+            <value optional="true">READ_SENSOR_SETTINGS
+              <notes>
+              The camera device supports accurately reporting the sensor settings for many of
+              the sensor controls while the built-in 3A algorithm is running.  This allows
+              reporting of sensor settings even when these settings cannot be manually changed.
+
+              The values reported for the following controls are guaranteed to be available
+              in the CaptureResult, including when 3A is enabled:
+
+              * Exposure control
+                  * android.sensor.exposureTime
+              * Sensitivity control
+                  * android.sensor.sensitivity
+              * Lens controls (if the lens is adjustable)
+                  * android.lens.focusDistance
+                  * android.lens.aperture
+
+              This capability is a subset of the MANUAL_SENSOR control capability, and will
+              always be included if the MANUAL_SENSOR capability is available.
+              </notes>
+            </value>
+            <value optional="true">BURST_CAPTURE
+              <notes>
+              The camera device supports capturing high-resolution images at &gt;= 20 frames per
+              second, in at least the uncompressed YUV format, when post-processing settings are set
+              to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
+              per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
+              resolution of the device, whichever is smaller.
+
+              More specifically, this means that a size matching the camera device's active array
+              size is listed as a supported size for the {@link
+              android.graphics.ImageFormat#YUV_420_888} format in either {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
+              with a minimum frame duration for that format and size of either &lt;= 1/20 s, or
+              &lt;= 1/10 s, respectively; and the android.control.aeAvailableTargetFpsRanges entry
+              lists at least one FPS range where the minimum FPS is &gt;= 1 / minimumFrameDuration
+              for the maximum-size YUV_420_888 format.  If that maximum size is listed in {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes},
+              then the list of resolutions for YUV_420_888 from {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at
+              least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
+              s.
+
+              If the device supports the {@link android.graphics.ImageFormat#RAW10}, {@link
+              android.graphics.ImageFormat#RAW12}, then those can also be captured at the same rate
+              as the maximum-size YUV_420_888 resolution is.
+
+              If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
+              as for the YUV_420_888 format also apply to the {@link
+              android.graphics.ImageFormat#PRIVATE} format.
+
+              In addition, the android.sync.maxLatency field is guaranted to have a value between 0
+              and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
+              are also guaranteed to be `true` so burst capture with these two locks ON yields
+              consistent image output.
+              </notes>
+            </value>
+            <value optional="true">YUV_REPROCESSING
+              <notes>
+              The camera device supports the YUV_420_888 reprocessing use case, similar as
+              PRIVATE_REPROCESSING, This capability requires the camera device to support the
+              following:
+
+              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
+              * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input format, that is,
+                YUV_420_888 is included in the lists of formats returned by
+                {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and
+                {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
+              * {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
+                returns non-empty int[] for each supported input format returned by {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
+              * Each size returned by {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
+                getInputSizes(YUV_420_888)} is also included in {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
+                getOutputSizes(YUV_420_888)}
+              * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate drop
+                relative to the sensor's maximum capture rate (at that resolution).
+              * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
+                {@link android.graphics.ImageFormat#YUV_420_888} and {@link
+                android.graphics.ImageFormat#JPEG} formats.
+              * The maximum available resolution for {@link
+                android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the
+                maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
+              * Static metadata android.reprocess.maxCaptureStall.
+              * Only the below controls are effective for reprocessing requests and will be present
+                in capture results. The reprocess requests are from the original capture results that
+                are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888}
+                output buffers.  All other controls in the reprocess requests will be ignored by the
+                camera device.
+                    * android.jpeg.*
+                    * android.noiseReduction.mode
+                    * android.edge.mode
+                    * android.reprocess.effectiveExposureFactor
+              * android.noiseReduction.availableNoiseReductionModes and
+                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
+              </notes>
+            </value>
+            <value optional="true">DEPTH_OUTPUT
+              <notes>
+              The camera device can produce depth measurements from its field of view.
+
+              This capability requires the camera device to support the following:
+
+              * {@link android.graphics.ImageFormat#DEPTH16} is supported as an output format.
+              * {@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD} is optionally supported as an
+                output format.
+              * This camera device, and all camera devices with the same android.lens.facing,
+                will list the following calibration entries in both
+                {@link android.hardware.camera2.CameraCharacteristics} and
+                {@link android.hardware.camera2.CaptureResult}:
+                  - android.lens.poseTranslation
+                  - android.lens.poseRotation
+                  - android.lens.intrinsicCalibration
+                  - android.lens.radialDistortion
+              * The android.depth.depthIsExclusive entry is listed by this device.
+              * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
+                normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
+                format.
+
+              Generally, depth output operates at a slower frame rate than standard color capture,
+              so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
+              should be accounted for (see
+              {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}).
+              On a device that supports both depth and color-based output, to enable smooth preview,
+              using a repeating burst is recommended, where a depth-output target is only included
+              once every N frames, where N is the ratio between preview output rate and depth output
+              rate, including depth stall time.
+              </notes>
+            </value>
+            <value optional="true">CONSTRAINED_HIGH_SPEED_VIDEO
+              <notes>
+              The device supports constrained high speed video recording (frame rate >=120fps)
+              use case. The camera device will support high speed capture session created by
+              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
+              only accepts high speed request lists created by
+              {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+
+              A camera device can still support high speed video streaming by advertising the high speed
+              FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all normal
+              capture request per frame control and synchronization requirements will apply to
+              the high speed fps ranges, the same as all other fps ranges. This capability describes
+              the capability of a specialized operating mode with many limitations (see below), which
+              is only targeted at high speed video recording.
+
+              The supported high speed video sizes and fps ranges are specified in
+              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
+              To get desired output frame rates, the application is only allowed to select video size
+              and FPS range combinations provided by
+              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
+              The fps range can be controlled via android.control.aeTargetFpsRange.
+
+              In this capability, the camera device will override aeMode, awbMode, and afMode to
+              ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
+              controls will be overridden to be FAST. Therefore, no manual control of capture
+              and post-processing parameters is possible. All other controls operate the
+              same as when android.control.mode == AUTO. This means that all other
+              android.control.* fields continue to work, such as
+
+              * android.control.aeTargetFpsRange
+              * android.control.aeExposureCompensation
+              * android.control.aeLock
+              * android.control.awbLock
+              * android.control.effectMode
+              * android.control.aeRegions
+              * android.control.afRegions
+              * android.control.awbRegions
+              * android.control.afTrigger
+              * android.control.aePrecaptureTrigger
+
+              Outside of android.control.*, the following controls will work:
+
+              * android.flash.mode (TORCH mode only, automatic flash for still capture will not
+              work since aeMode is ON)
+              * android.lens.opticalStabilizationMode (if it is supported)
+              * android.scaler.cropRegion
+              * android.statistics.faceDetectMode (if it is supported)
+
+              For high speed recording use case, the actual maximum supported frame rate may
+              be lower than what camera can output, depending on the destination Surfaces for
+              the image data. For example, if the destination surface is from video encoder,
+              the application need check if the video encoder is capable of supporting the
+              high frame rate for a given video size, or it will end up with lower recording
+              frame rate. If the destination surface is from preview window, the actual preview frame
+              rate will be bounded by the screen refresh rate.
+
+              The camera device will only support up to 2 high speed simultaneous output surfaces
+              (preview and recording surfaces)
+              in this mode. Above controls will be effective only if all of below conditions are true:
+
+              * The application creates a camera capture session with no more than 2 surfaces via
+              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
+              targeted surfaces must be preview surface (either from
+              {@link android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or
+              recording surface(either from {@link android.media.MediaRecorder#getSurface} or
+              {@link android.media.MediaCodec#createInputSurface}).
+              * The stream sizes are selected from the sizes reported by
+              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
+              * The FPS ranges are selected from
+              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
+
+              When above conditions are NOT satistied,
+              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
+              will fail.
+
+              Switching to a FPS range that has different maximum FPS may trigger some camera device
+              reconfigurations, which may introduce extra latency. It is recommended that
+              the application avoids unnecessary maximum target FPS changes as much as possible
+              during high speed streaming.
+              </notes>
+            </value>
+          </enum>
+          <description>List of capabilities that this camera device
+          advertises as fully supporting.</description>
+          <details>
+          A capability is a contract that the camera device makes in order
+          to be able to satisfy one or more use cases.
+
+          Listing a capability guarantees that the whole set of features
+          required to support a common use will all be available.
+
+          Using a subset of the functionality provided by an unsupported
+          capability may be possible on a specific camera device implementation;
+          to do this query each of android.request.availableRequestKeys,
+          android.request.availableResultKeys,
+          android.request.availableCharacteristicsKeys.
+
+          The following capabilities are guaranteed to be available on
+          android.info.supportedHardwareLevel `==` FULL devices:
+
+          * MANUAL_SENSOR
+          * MANUAL_POST_PROCESSING
+
+          Other capabilities may be available on either FULL or LIMITED
+          devices, but the application should query this key to be sure.
+          </details>
+          <hal_details>
+          Additional constraint details per-capability will be available
+          in the Compatibility Test Suite.
+
+          Minimum baseline requirements required for the
+          BACKWARD_COMPATIBLE capability are not explicitly listed.
+          Instead refer to "BC" tags and the camera CTS tests in the
+          android.hardware.camera2.cts package.
+
+          Listed controls that can be either request or result (e.g.
+          android.sensor.exposureTime) must be available both in the
+          request and the result in order to be considered to be
+          capability-compliant.
+
+          For example, if the HAL claims to support MANUAL control,
+          then exposure time must be configurable via the request _and_
+          the actual exposure applied must be available via
+          the result.
+
+          If MANUAL_SENSOR is omitted, the HAL may choose to omit the
+          android.scaler.availableMinFrameDurations static property entirely.
+
+          For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see
+          hardware/libhardware/include/hardware/camera3.h Section 10 for more information.
+
+          Devices that support the MANUAL_SENSOR capability must support the
+          CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.
+
+          Devices that support the PRIVATE_REPROCESSING capability or the
+          YUV_REPROCESSING capability must support the
+          CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.
+
+          For DEPTH_OUTPUT, the depth-format keys
+          android.depth.availableDepthStreamConfigurations,
+          android.depth.availableDepthMinFrameDurations,
+          android.depth.availableDepthStallDurations must be available, in
+          addition to the other keys explicitly mentioned in the DEPTH_OUTPUT
+          enum notes. The entry android.depth.maxDepthSamples must be available
+          if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
+          DEPTH).
+          </hal_details>
+        </entry>
+        <entry name="availableRequestKeys" type="int32" visibility="hidden"
+          container="array" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A list of all keys that the camera device has available
+          to use with {@link android.hardware.camera2.CaptureRequest}.</description>
+
+          <details>Attempting to set a key into a CaptureRequest that is not
+          listed here will result in an invalid request and will be rejected
+          by the camera device.
+
+          This field can be used to query the feature set of a camera device
+          at a more granular level than capabilities. This is especially
+          important for optional keys that are not listed under any capability
+          in android.request.availableCapabilities.
+          </details>
+          <hal_details>
+          Vendor tags must not be listed here. Use the vendor tag metadata
+          extensions C api instead (refer to camera3.h for more details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not consume any request tags that are not listed either
+          here or in the vendor tag list.
+
+          The public camera2 API will always make the vendor tags visible
+          via
+          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
+          </hal_details>
+        </entry>
+        <entry name="availableResultKeys" type="int32" visibility="hidden"
+          container="array" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A list of all keys that the camera device has available
+          to use with {@link android.hardware.camera2.CaptureResult}.</description>
+
+          <details>Attempting to get a key from a CaptureResult that is not
+          listed here will always return a `null` value. Getting a key from
+          a CaptureResult that is listed here will generally never return a `null`
+          value.
+
+          The following keys may return `null` unless they are enabled:
+
+          * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
+
+          (Those sometimes-null keys will nevertheless be listed here
+          if they are available.)
+
+          This field can be used to query the feature set of a camera device
+          at a more granular level than capabilities. This is especially
+          important for optional keys that are not listed under any capability
+          in android.request.availableCapabilities.
+          </details>
+          <hal_details>
+          Tags listed here must always have an entry in the result metadata,
+          even if that size is 0 elements. Only array-type tags (e.g. lists,
+          matrices, strings) are allowed to have 0 elements.
+
+          Vendor tags must not be listed here. Use the vendor tag metadata
+          extensions C api instead (refer to camera3.h for more details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not produce any result tags that are not listed either
+          here or in the vendor tag list.
+
+          The public camera2 API will always make the vendor tags visible via {@link
+          android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
+          </hal_details>
+        </entry>
+        <entry name="availableCharacteristicsKeys" type="int32" visibility="hidden"
+          container="array" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A list of all keys that the camera device has available
+          to use with {@link android.hardware.camera2.CameraCharacteristics}.</description>
+          <details>This entry follows the same rules as
+          android.request.availableResultKeys (except that it applies for
+          CameraCharacteristics instead of CaptureResult). See above for more
+          details.
+          </details>
+          <hal_details>
+          Keys listed here must always have an entry in the static info metadata,
+          even if that size is 0 elements. Only array-type tags (e.g. lists,
+          matrices, strings) are allowed to have 0 elements.
+
+          Vendor tags must not be listed here. Use the vendor tag metadata
+          extensions C api instead (refer to camera3.h for more details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not have any tags in its static info that are not listed
+          either here or in the vendor tag list.
+
+          The public camera2 API will always make the vendor tags visible
+          via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
+          </hal_details>
+        </entry>
+      </static>
+    </section>
+    <section name="scaler">
+      <controls>
+        <entry name="cropRegion" type="int32" visibility="public"
+               container="array" typedef="rectangle" hwlevel="legacy">
+          <array>
+            <size>4</size>
+          </array>
+          <description>The desired region of the sensor to read out for this capture.</description>
+          <units>Pixel coordinates relative to
+          android.sensor.info.activeArraySize</units>
+          <details>
+            This control can be used to implement digital zoom.
+
+            The crop region coordinate system is based off
+            android.sensor.info.activeArraySize, with `(0, 0)` being the
+            top-left corner of the sensor active array.
+
+            Output streams use this rectangle to produce their output,
+            cropping to a smaller region if necessary to maintain the
+            stream's aspect ratio, then scaling the sensor input to
+            match the output's configured resolution.
+
+            The crop region is applied after the RAW to other color
+            space (e.g. YUV) conversion. Since raw streams
+            (e.g. RAW16) don't have the conversion stage, they are not
+            croppable. The crop region will be ignored by raw streams.
+
+            For non-raw streams, any additional per-stream cropping will
+            be done to maximize the final pixel area of the stream.
+
+            For example, if the crop region is set to a 4:3 aspect
+            ratio, then 4:3 streams will use the exact crop
+            region. 16:9 streams will further crop vertically
+            (letterbox).
+
+            Conversely, if the crop region is set to a 16:9, then 4:3
+            outputs will crop horizontally (pillarbox), and 16:9
+            streams will match exactly. These additional crops will
+            be centered within the crop region.
+
+            The width and height of the crop region cannot
+            be set to be smaller than
+            `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and
+            `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively.
+
+            The camera device may adjust the crop region to account
+            for rounding and other hardware requirements; the final
+            crop region used will be included in the output capture
+            result.
+          </details>
+          <hal_details>
+            The output streams must maintain square pixels at all
+            times, no matter what the relative aspect ratios of the
+            crop region and the stream are.  Negative values for
+            corner are allowed for raw output if full pixel array is
+            larger than active pixel array. Width and height may be
+            rounded to nearest larger supportable width, especially
+            for raw output, where only a few fixed scales may be
+            possible.
+
+            For a set of output streams configured, if the sensor output is cropped to a smaller
+            size than active array size, the HAL need follow below cropping rules:
+
+            * The HAL need handle the cropRegion as if the sensor crop size is the effective active
+            array size.More specifically, the HAL must transform the request cropRegion from
+            android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:
+                1. Translate the requested cropRegion w.r.t., the left top corner of the sensor
+                cropped pixel area by (tx, ty),
+                where `tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height)`
+                and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The
+                (sensorCrop.top, sensorCrop.left) is the coordinate based off the
+                android.sensor.info.activeArraySize.
+                2. Scale the width and height of requested cropRegion with scaling factor of
+                sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height
+                respectively.
+            Once this new cropRegion is calculated, the HAL must use this region to crop the image
+            with regard to the sensor crop size (effective active array size). The HAL still need
+            follow the general cropping rule for this new cropRegion and effective active
+            array size.
+
+            * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize.
+            The HAL need convert the new cropRegion generated above w.r.t., full active array size.
+            The reported cropRegion may be slightly different with the requested cropRegion since
+            the HAL may adjust the crop region to account for rounding, conversion error, or other
+            hardware limitations.
+
+            HAL2.x uses only (x, y, width)
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+      </controls>
+      <static>
+        <entry name="availableFormats" type="int32"
+        visibility="hidden" deprecated="true" enum="true"
+        container="array" typedef="imageFormat">
+          <array>
+            <size>n</size>
+          </array>
+          <enum>
+            <value optional="true" id="0x20">RAW16
+              <notes>
+              RAW16 is a standard, cross-platform format for raw image
+              buffers with 16-bit pixels.
+
+              Buffers of this format are typically expected to have a
+              Bayer Color Filter Array (CFA) layout, which is given in
+              android.sensor.info.colorFilterArrangement. Sensors with
+              CFAs that are not representable by a format in
+              android.sensor.info.colorFilterArrangement should not
+              use this format.
+
+              Buffers of this format will also follow the constraints given for
+              RAW_OPAQUE buffers, but with relaxed performance constraints.
+
+              This format is intended to give users access to the full contents
+              of the buffers coming directly from the image sensor prior to any
+              cropping or scaling operations, and all coordinate systems for
+              metadata used for this format are relative to the size of the
+              active region of the image sensor before any geometric distortion
+              correction has been applied (i.e.
+              android.sensor.info.preCorrectionActiveArraySize). Supported
+              dimensions for this format are limited to the full dimensions of
+              the sensor (e.g. either android.sensor.info.pixelArraySize or
+              android.sensor.info.preCorrectionActiveArraySize will be the
+              only supported output size).
+
+              See android.scaler.availableInputOutputFormatsMap for
+              the full set of performance guarantees.
+              </notes>
+            </value>
+            <value optional="true" id="0x24">RAW_OPAQUE
+              <notes>
+              RAW_OPAQUE is a format for raw image buffers coming from an
+              image sensor.
+
+              The actual structure of buffers of this format is
+              platform-specific, but must follow several constraints:
+
+              1. No image post-processing operations may have been applied to
+              buffers of this type. These buffers contain raw image data coming
+              directly from the image sensor.
+              1. If a buffer of this format is passed to the camera device for
+              reprocessing, the resulting images will be identical to the images
+              produced if the buffer had come directly from the sensor and was
+              processed with the same settings.
+
+              The intended use for this format is to allow access to the native
+              raw format buffers coming directly from the camera sensor without
+              any additional conversions or decrease in framerate.
+
+              See android.scaler.availableInputOutputFormatsMap for the full set of
+              performance guarantees.
+              </notes>
+            </value>
+            <value optional="true" id="0x32315659">YV12
+              <notes>YCrCb 4:2:0 Planar</notes>
+            </value>
+            <value optional="true" id="0x11">YCrCb_420_SP
+              <notes>NV21</notes>
+            </value>
+            <value id="0x22">IMPLEMENTATION_DEFINED
+              <notes>System internal format, not application-accessible</notes>
+            </value>
+            <value id="0x23">YCbCr_420_888
+              <notes>Flexible YUV420 Format</notes>
+            </value>
+            <value id="0x21">BLOB
+              <notes>JPEG format</notes>
+            </value>
+          </enum>
+          <description>The list of image formats that are supported by this
+          camera device for output streams.</description>
+          <details>
+          All camera devices will support JPEG and YUV_420_888 formats.
+
+          When set to YUV_420_888, application can access the YUV420 data directly.
+          </details>
+          <hal_details>
+          These format values are from HAL_PIXEL_FORMAT_* in
+          system/core/include/system/graphics.h.
+
+          When IMPLEMENTATION_DEFINED is used, the platform
+          gralloc module will select a format based on the usage flags provided
+          by the camera HAL device and the other endpoint of the stream. It is
+          usually used by preview and recording streams, where the application doesn't
+          need access the image data.
+
+          YCbCr_420_888 format must be supported by the HAL. When an image stream
+          needs CPU/application direct access, this format will be used.
+
+          The BLOB format must be supported by the HAL. This is used for the JPEG stream.
+
+          A RAW_OPAQUE buffer should contain only pixel data. It is strongly
+          recommended that any information used by the camera device when
+          processing images is fully expressed by the result metadata
+          for that image buffer.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true"
+        container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>The minimum frame duration that is supported
+          for each resolution in android.scaler.availableJpegSizes.
+          </description>
+          <units>Nanoseconds</units>
+          <range>TODO: Remove property.</range>
+          <details>
+          This corresponds to the minimum steady-state frame duration when only
+          that JPEG stream is active and captured in a burst, with all
+          processing (typically in android.*.mode) set to FAST.
+
+          When multiple streams are configured, the minimum
+          frame duration will be &amp;gt;= max(individual stream min
+          durations)</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableJpegSizes" type="int32" visibility="hidden"
+        deprecated="true" container="array" typedef="size">
+          <array>
+            <size>n</size>
+            <size>2</size>
+          </array>
+          <description>The JPEG resolutions that are supported by this camera device.</description>
+          <range>TODO: Remove property.</range>
+          <details>
+          The resolutions are listed as `(width, height)` pairs. All camera devices will support
+          sensor maximum resolution (defined by android.sensor.info.activeArraySize).
+          </details>
+          <hal_details>
+          The HAL must include sensor maximum resolution
+          (defined by android.sensor.info.activeArraySize),
+          and should include half/quarter of sensor maximum resolution.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableMaxDigitalZoom" type="float" visibility="public"
+              hwlevel="legacy">
+          <description>The maximum ratio between both active area width
+          and crop region width, and active area height and
+          crop region height, for android.scaler.cropRegion.
+          </description>
+          <units>Zoom scale factor</units>
+          <range>&amp;gt;=1</range>
+          <details>
+          This represents the maximum amount of zooming possible by
+          the camera device, or equivalently, the minimum cropping
+          window size.
+
+          Crop regions that have a width or height that is smaller
+          than this ratio allows will be rounded up to the minimum
+          allowed size by the camera device.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true"
+        container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>For each available processed output size (defined in
+          android.scaler.availableProcessedSizes), this property lists the
+          minimum supportable frame duration for that size.
+          </description>
+          <units>Nanoseconds</units>
+          <details>
+          This should correspond to the frame duration when only that processed
+          stream is active, with all processing (typically in android.*.mode)
+          set to FAST.
+
+          When multiple streams are configured, the minimum frame duration will
+          be &amp;gt;= max(individual stream min durations).
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableProcessedSizes" type="int32" visibility="hidden"
+        deprecated="true" container="array" typedef="size">
+          <array>
+            <size>n</size>
+            <size>2</size>
+          </array>
+          <description>The resolutions available for use with
+          processed output streams, such as YV12, NV12, and
+          platform opaque YUV/RGB streams to the GPU or video
+          encoders.</description>
+          <details>
+          The resolutions are listed as `(width, height)` pairs.
+
+          For a given use case, the actual maximum supported resolution
+          may be lower than what is listed here, depending on the destination
+          Surface for the image data. For example, for recording video,
+          the video encoder chosen may have a maximum size limit (e.g. 1080p)
+          smaller than what the camera (e.g. maximum resolution is 3264x2448)
+          can provide.
+
+          Please reference the documentation for the image data destination to
+          check if it limits the maximum size for image data.
+          </details>
+          <hal_details>
+          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
+          the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes
+          and each below resolution if it is smaller than or equal to the sensor
+          maximum resolution (if they are not listed in JPEG sizes already):
+
+          * 240p (320 x 240)
+          * 480p (640 x 480)
+          * 720p (1280 x 720)
+          * 1080p (1920 x 1080)
+
+          For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`),
+          the HAL only has to list up to the maximum video size supported by the devices.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableRawMinDurations" type="int64" deprecated="true"
+        container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          For each available raw output size (defined in
+          android.scaler.availableRawSizes), this property lists the minimum
+          supportable frame duration for that size.
+          </description>
+          <units>Nanoseconds</units>
+          <details>
+          Should correspond to the frame duration when only the raw stream is
+          active.
+
+          When multiple streams are configured, the minimum
+          frame duration will be &amp;gt;= max(individual stream min
+          durations)</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="availableRawSizes" type="int32" deprecated="true"
+        container="array" typedef="size">
+          <array>
+            <size>n</size>
+            <size>2</size>
+          </array>
+          <description>The resolutions available for use with raw
+          sensor output streams, listed as width,
+          height</description>
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.scaler.cropRegion" kind="controls">
+        </clone>
+      </dynamic>
+      <static>
+        <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden"
+          typedef="reprocessFormatsMap">
+          <description>The mapping of image formats that are supported by this
+          camera device for input streams, to their corresponding output formats.
+          </description>
+          <details>
+          All camera devices with at least 1
+          android.request.maxNumInputStreams will have at least one
+          available input format.
+
+          The camera device will support the following map of formats,
+          if its dependent capability (android.request.availableCapabilities) is supported:
+
+            Input Format                                    | Output Format                                     | Capability
+          :-------------------------------------------------|:--------------------------------------------------|:----------
+          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#JPEG}         | PRIVATE_REPROCESSING
+          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#YUV_420_888}  | PRIVATE_REPROCESSING
+          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#JPEG}         | YUV_REPROCESSING
+          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#YUV_420_888}  | YUV_REPROCESSING
+
+          PRIVATE refers to a device-internal format that is not directly application-visible.  A
+          PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance}
+          with {@link android.graphics.ImageFormat#PRIVATE} as the format.
+
+          For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
+          or output will never hurt maximum frame rate (i.e.  {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration
+          getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),
+
+          Attempting to configure an input stream with output streams not
+          listed as available in this map is not valid.
+          </details>
+          <hal_details>
+          For the formats, see `system/core/include/system/graphics.h` for a definition
+          of the image format enumerations. The PRIVATE format refers to the
+          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine
+          the actual format by using the gralloc usage flags.
+          For ZSL use case in particular, the HAL could choose appropriate format (partially
+          processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL.
+          See camera3.h for more details.
+
+          This value is encoded as a variable-size array-of-arrays.
+          The inner array always contains `[format, length, ...]` where
+          `...` has `length` elements. An inner array is followed by another
+          inner array if the total metadata entry size hasn't yet been exceeded.
+
+          A code sample to read/write this encoding (with a device that
+          supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG,
+          and reprocessing YUV_420_888 to YUV_420_888 and JPEG):
+
+              // reading
+              int32_t* contents = &amp;entry.i32[0];
+              for (size_t i = 0; i &lt; entry.count; ) {
+                  int32_t format = contents[i++];
+                  int32_t length = contents[i++];
+                  int32_t output_formats[length];
+                  memcpy(&amp;output_formats[0], &amp;contents[i],
+                         length * sizeof(int32_t));
+                  i += length;
+              }
+
+              // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
+              int32_t[] contents = {
+                IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
+                YUV_420_888, 2, YUV_420_888, BLOB,
+              };
+              update_camera_metadata_entry(metadata, index, &amp;contents[0],
+                    sizeof(contents)/sizeof(contents[0]), &amp;updated_entry);
+
+          If the HAL claims to support any of the capabilities listed in the
+          above details, then it must also support all the input-output
+          combinations listed for that capability. It can optionally support
+          additional formats if it so chooses.
+          </hal_details>
+          <tag id="REPROC" />
+        </entry>
+        <entry name="availableStreamConfigurations" type="int32" visibility="hidden"
+          enum="true" container="array"
+          typedef="streamConfiguration" hwlevel="legacy">
+          <array>
+            <size>n</size>
+            <size>4</size>
+          </array>
+          <enum>
+            <value>OUTPUT</value>
+            <value>INPUT</value>
+          </enum>
+          <description>The available stream configurations that this
+          camera device supports
+          (i.e. format, width, height, output/input stream).
+          </description>
+          <details>
+          The configurations are listed as `(format, width, height, input?)`
+          tuples.
+
+          For a given use case, the actual maximum supported resolution
+          may be lower than what is listed here, depending on the destination
+          Surface for the image data. For example, for recording video,
+          the video encoder chosen may have a maximum size limit (e.g. 1080p)
+          smaller than what the camera (e.g. maximum resolution is 3264x2448)
+          can provide.
+
+          Please reference the documentation for the image data destination to
+          check if it limits the maximum size for image data.
+
+          Not all output formats may be supported in a configuration with
+          an input stream of a particular format. For more details, see
+          android.scaler.availableInputOutputFormatsMap.
+
+          The following table describes the minimum required output stream
+          configurations based on the hardware level
+          (android.info.supportedHardwareLevel):
+
+          Format         | Size                                         | Hardware Level | Notes
+          :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+          JPEG           | android.sensor.info.activeArraySize          | Any            |
+          JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
+          JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
+          JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
+          JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
+          YUV_420_888    | all output sizes available for JPEG          | FULL           |
+          YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
+          IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |
+
+          Refer to android.request.availableCapabilities for additional
+          mandatory stream configurations on a per-capability basis.
+          </details>
+          <hal_details>
+          It is recommended (but not mandatory) to also include half/quarter
+          of sensor maximum resolution for JPEG formats (regardless of hardware
+          level).
+
+          (The following is a rewording of the above required table):
+
+          For JPEG format, the sizes may be restricted by below conditions:
+
+          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
+          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
+          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
+          it does not have to be included in the supported JPEG sizes.
+          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
+          the dimensions being a multiple of 16.
+
+          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
+          However, the largest JPEG size must be as close as possible to the sensor maximum
+          resolution given above constraints. It is required that after aspect ratio adjustments,
+          additional size reduction due to other issues must be less than 3% in area. For example,
+          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
+          ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
+          3264x2448.
+
+          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
+          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
+          here as output streams.
+
+          It must also include each below resolution if it is smaller than or
+          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
+          formats), as output streams:
+
+          * 240p (320 x 240)
+          * 480p (640 x 480)
+          * 720p (1280 x 720)
+          * 1080p (1920 x 1080)
+
+          For LIMITED capability devices
+          (`android.info.supportedHardwareLevel == LIMITED`),
+          the HAL only has to list up to the maximum video size
+          supported by the device.
+
+          Regardless of hardware level, every output resolution available for
+          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
+
+          This supercedes the following fields, which are now deprecated:
+
+          * availableFormats
+          * available[Processed,Raw,Jpeg]Sizes
+          </hal_details>
+        </entry>
+        <entry name="availableMinFrameDurations" type="int64" visibility="hidden"
+               container="array"
+               typedef="streamConfigurationDuration" hwlevel="legacy">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the minimum frame duration for each
+          format/size combination.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          This should correspond to the frame duration when only that
+          stream is active, with all processing (typically in android.*.mode)
+          set to either OFF or FAST.
+
+          When multiple streams are used in a request, the minimum frame
+          duration will be max(individual stream min durations).
+
+          The minimum frame duration of a stream (of a particular format, size)
+          is the same regardless of whether the stream is input or output.
+
+          See android.sensor.frameDuration and
+          android.scaler.availableStallDurations for more details about
+          calculating the max frame rate.
+
+          (Keep in sync with
+          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
+          </details>
+          <tag id="V1" />
+        </entry>
+        <entry name="availableStallDurations" type="int64" visibility="hidden"
+               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the maximum stall duration for each
+          output format/size combination.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          A stall duration is how much extra time would get added
+          to the normal minimum frame duration for a repeating request
+          that has streams with non-zero stall.
+
+          For example, consider JPEG captures which have the following
+          characteristics:
+
+          * JPEG streams act like processed YUV streams in requests for which
+          they are not included; in requests in which they are directly
+          referenced, they act as JPEG streams. This is because supporting a
+          JPEG stream requires the underlying YUV data to always be ready for
+          use by a JPEG encoder, but the encoder will only be used (and impact
+          frame duration) on requests that actually reference a JPEG stream.
+          * The JPEG processor can run concurrently to the rest of the camera
+          pipeline, but cannot process more than 1 capture at a time.
+
+          In other words, using a repeating YUV request would result
+          in a steady frame rate (let's say it's 30 FPS). If a single
+          JPEG request is submitted periodically, the frame rate will stay
+          at 30 FPS (as long as we wait for the previous JPEG to return each
+          time). If we try to submit a repeating YUV + JPEG request, then
+          the frame rate will drop from 30 FPS.
+
+          In general, submitting a new request with a non-0 stall time
+          stream will _not_ cause a frame rate drop unless there are still
+          outstanding buffers for that stream from previous requests.
+
+          Submitting a repeating request with streams (call this `S`)
+          is the same as setting the minimum frame duration from
+          the normal minimum frame duration corresponding to `S`, added with
+          the maximum stall duration for `S`.
+
+          If interleaving requests with and without a stall duration,
+          a request will stall by the maximum of the remaining times
+          for each can-stall stream with outstanding buffers.
+
+          This means that a stalling request will not have an exposure start
+          until the stall has completed.
+
+          This should correspond to the stall duration when only that stream is
+          active, with all processing (typically in android.*.mode) set to FAST
+          or OFF. Setting any of the processing modes to HIGH_QUALITY
+          effectively results in an indeterminate stall duration for all
+          streams in a request (the regular stall calculation rules are
+          ignored).
+
+          The following formats may always have a stall duration:
+
+          * {@link android.graphics.ImageFormat#JPEG}
+          * {@link android.graphics.ImageFormat#RAW_SENSOR}
+
+          The following formats will never have a stall duration:
+
+          * {@link android.graphics.ImageFormat#YUV_420_888}
+          * {@link android.graphics.ImageFormat#RAW10}
+
+          All other formats may or may not have an allowed stall duration on
+          a per-capability basis; refer to android.request.availableCapabilities
+          for more details.
+
+          See android.sensor.frameDuration for more information about
+          calculating the max frame rate (absent stalls).
+
+          (Keep up to date with
+          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} )
+          </details>
+          <hal_details>
+          If possible, it is recommended that all non-JPEG formats
+          (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE
+          and IMPLEMENTATION_DEFINED must not have stall durations.
+          </hal_details>
+          <tag id="V1" />
+        </entry>
+        <entry name="streamConfigurationMap" type="int32" visibility="public"
+               synthetic="true" typedef="streamConfigurationMap"
+               hwlevel="legacy">
+          <description>The available stream configurations that this
+          camera device supports; also includes the minimum frame durations
+          and the stall durations for each format/size combination.
+          </description>
+          <details>
+          All camera devices will support sensor maximum resolution (defined by
+          android.sensor.info.activeArraySize) for the JPEG format.
+
+          For a given use case, the actual maximum supported resolution
+          may be lower than what is listed here, depending on the destination
+          Surface for the image data. For example, for recording video,
+          the video encoder chosen may have a maximum size limit (e.g. 1080p)
+          smaller than what the camera (e.g. maximum resolution is 3264x2448)
+          can provide.
+
+          Please reference the documentation for the image data destination to
+          check if it limits the maximum size for image data.
+
+          The following table describes the minimum required output stream
+          configurations based on the hardware level
+          (android.info.supportedHardwareLevel):
+
+          Format                                             | Size                                         | Hardware Level | Notes
+          :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------:
+          {@link android.graphics.ImageFormat#JPEG}          | android.sensor.info.activeArraySize (*1)     | Any            |
+          {@link android.graphics.ImageFormat#JPEG}          | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
+          {@link android.graphics.ImageFormat#JPEG}          | 1280x720 (720p)                               | Any            | if 720p &lt;= activeArraySize
+          {@link android.graphics.ImageFormat#JPEG}          | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
+          {@link android.graphics.ImageFormat#JPEG}          | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
+          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG          | FULL           |
+          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
+          {@link android.graphics.ImageFormat#PRIVATE}       | same as YUV_420_888                          | Any            |
+
+          Refer to android.request.availableCapabilities and {@link
+          android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
+          stream configurations on a per-capability basis.
+
+          *1: For JPEG format, the sizes may be restricted by below conditions:
+
+          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
+          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
+          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
+          it does not have to be included in the supported JPEG sizes.
+          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
+          the dimensions being a multiple of 16.
+          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
+          However, the largest JPEG size will be as close as possible to the sensor maximum
+          resolution given above constraints. It is required that after aspect ratio adjustments,
+          additional size reduction due to other issues must be less than 3% in area. For example,
+          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
+          ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
+          3264x2448.
+          </details>
+          <hal_details>
+          Do not set this property directly
+          (it is synthetic and will not be available at the HAL layer);
+          set the android.scaler.availableStreamConfigurations instead.
+
+          Not all output formats may be supported in a configuration with
+          an input stream of a particular format. For more details, see
+          android.scaler.availableInputOutputFormatsMap.
+
+          It is recommended (but not mandatory) to also include half/quarter
+          of sensor maximum resolution for JPEG formats (regardless of hardware
+          level).
+
+          (The following is a rewording of the above required table):
+
+          The HAL must include sensor maximum resolution (defined by
+          android.sensor.info.activeArraySize).
+
+          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
+          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
+          here as output streams.
+
+          It must also include each below resolution if it is smaller than or
+          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
+          formats), as output streams:
+
+          * 240p (320 x 240)
+          * 480p (640 x 480)
+          * 720p (1280 x 720)
+          * 1080p (1920 x 1080)
+
+          For LIMITED capability devices
+          (`android.info.supportedHardwareLevel == LIMITED`),
+          the HAL only has to list up to the maximum video size
+          supported by the device.
+
+          Regardless of hardware level, every output resolution available for
+          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
+
+          This supercedes the following fields, which are now deprecated:
+
+          * availableFormats
+          * available[Processed,Raw,Jpeg]Sizes
+          </hal_details>
+        </entry>
+        <entry name="croppingType" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>CENTER_ONLY
+              <notes>
+                The camera device only supports centered crop regions.
+              </notes>
+            </value>
+            <value>FREEFORM
+              <notes>
+                The camera device supports arbitrarily chosen crop regions.
+              </notes>
+            </value>
+          </enum>
+          <description>The crop type that this camera device supports.</description>
+          <details>
+          When passing a non-centered crop region (android.scaler.cropRegion) to a camera
+          device that only supports CENTER_ONLY cropping, the camera device will move the
+          crop region to the center of the sensor active array (android.sensor.info.activeArraySize)
+          and keep the crop region width and height unchanged. The camera device will return the
+          final used crop region in metadata result android.scaler.cropRegion.
+
+          Camera devices that support FREEFORM cropping will support any crop region that
+          is inside of the active array. The camera device will apply the same crop region and
+          return the final used crop region in capture result metadata android.scaler.cropRegion.
+
+          LEGACY capability devices will only support CENTER_ONLY cropping.
+          </details>
+        </entry>
+      </static>
+    </section>
+    <section name="sensor">
+      <controls>
+        <entry name="exposureTime" type="int64" visibility="public" hwlevel="full">
+          <description>Duration each pixel is exposed to
+          light.</description>
+          <units>Nanoseconds</units>
+          <range>android.sensor.info.exposureTimeRange</range>
+          <details>If the sensor can't expose this exact duration, it will shorten the
+          duration exposed to the nearest possible value (rather than expose longer).
+          The final exposure time used will be available in the output capture result.
+
+          This control is only effective if android.control.aeMode or android.control.mode is set to
+          OFF; otherwise the auto-exposure algorithm will override this value.
+          </details>
+          <tag id="V1" />
+        </entry>
+        <entry name="frameDuration" type="int64" visibility="public" hwlevel="full">
+          <description>Duration from start of frame exposure to
+          start of next frame exposure.</description>
+          <units>Nanoseconds</units>
+          <range>See android.sensor.info.maxFrameDuration,
+          android.scaler.streamConfigurationMap. The duration
+          is capped to `max(duration, exposureTime + overhead)`.</range>
+          <details>
+          The maximum frame rate that can be supported by a camera subsystem is
+          a function of many factors:
+
+          * Requested resolutions of output image streams
+          * Availability of binning / skipping modes on the imager
+          * The bandwidth of the imager interface
+          * The bandwidth of the various ISP processing blocks
+
+          Since these factors can vary greatly between different ISPs and
+          sensors, the camera abstraction tries to represent the bandwidth
+          restrictions with as simple a model as possible.
+
+          The model presented has the following characteristics:
+
+          * The image sensor is always configured to output the smallest
+          resolution possible given the application's requested output stream
+          sizes.  The smallest resolution is defined as being at least as large
+          as the largest requested output stream size; the camera pipeline must
+          never digitally upsample sensor data when the crop region covers the
+          whole sensor. In general, this means that if only small output stream
+          resolutions are configured, the sensor can provide a higher frame
+          rate.
+          * Since any request may use any or all the currently configured
+          output streams, the sensor and ISP must be configured to support
+          scaling a single capture to all the streams at the same time.  This
+          means the camera pipeline must be ready to produce the largest
+          requested output size without any delay.  Therefore, the overall
+          frame rate of a given configured stream set is governed only by the
+          largest requested stream resolution.
+          * Using more than one output stream in a request does not affect the
+          frame duration.
+          * Certain format-streams may need to do additional background processing
+          before data is consumed/produced by that stream. These processors
+          can run concurrently to the rest of the camera pipeline, but
+          cannot process more than 1 capture at a time.
+
+          The necessary information for the application, given the model above,
+          is provided via the android.scaler.streamConfigurationMap field using
+          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.
+          These are used to determine the maximum frame rate / minimum frame
+          duration that is possible for a given stream configuration.
+
+          Specifically, the application can use the following rules to
+          determine the minimum frame duration it can request from the camera
+          device:
+
+          1. Let the set of currently configured input/output streams
+          be called `S`.
+          1. Find the minimum frame durations for each stream in `S`, by looking
+          it up in android.scaler.streamConfigurationMap using {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+          (with its respective size/format). Let this set of frame durations be
+          called `F`.
+          1. For any given request `R`, the minimum frame duration allowed
+          for `R` is the maximum out of all values in `F`. Let the streams
+          used in `R` be called `S_r`.
+
+          If none of the streams in `S_r` have a stall time (listed in {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}
+          using its respective size/format), then the frame duration in `F`
+          determines the steady state frame rate that the application will get
+          if it uses `R` as a repeating request. Let this special kind of
+          request be called `Rsimple`.
+
+          A repeating request `Rsimple` can be _occasionally_ interleaved
+          by a single capture of a new request `Rstall` (which has at least
+          one in-use stream with a non-0 stall time) and if `Rstall` has the
+          same minimum frame duration this will not cause a frame rate loss
+          if all buffers from the previous `Rstall` have already been
+          delivered.
+
+          For more details about stalling, see
+          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}.
+
+          This control is only effective if android.control.aeMode or android.control.mode is set to
+          OFF; otherwise the auto-exposure algorithm will override this value.
+          </details>
+          <hal_details>
+          For more details about stalling, see
+          android.scaler.availableStallDurations.
+          </hal_details>
+          <tag id="V1" />
+        </entry>
+        <entry name="sensitivity" type="int32" visibility="public" hwlevel="full">
+          <description>The amount of gain applied to sensor data
+          before processing.</description>
+          <units>ISO arithmetic units</units>
+          <range>android.sensor.info.sensitivityRange</range>
+          <details>
+          The sensitivity is the standard ISO sensitivity value,
+          as defined in ISO 12232:2006.
+
+          The sensitivity must be within android.sensor.info.sensitivityRange, and
+          if if it less than android.sensor.maxAnalogSensitivity, the camera device
+          is guaranteed to use only analog amplification for applying the gain.
+
+          If the camera device cannot apply the exact sensitivity
+          requested, it will reduce the gain to the nearest supported
+          value. The final sensitivity used will be available in the
+          output capture result.
+          </details>
+          <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details>
+          <tag id="V1" />
+        </entry>
+      </controls>
+      <static>
+        <namespace name="info">
+          <entry name="activeArraySize" type="int32" visibility="public"
+          type_notes="Four ints defining the active pixel rectangle"
+          container="array" typedef="rectangle" hwlevel="legacy">
+            <array>
+              <size>4</size>
+            </array>
+            <description>
+            The area of the image sensor which corresponds to active pixels after any geometric
+            distortion correction has been applied.
+            </description>
+            <units>Pixel coordinates on the image sensor</units>
+            <details>
+            This is the rectangle representing the size of the active region of the sensor (i.e.
+            the region that actually receives light from the scene) after any geometric correction
+            has been applied, and should be treated as the maximum size in pixels of any of the
+            image output formats aside from the raw formats.
+
+            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+            the full pixel array, and the size of the full pixel array is given by
+            android.sensor.info.pixelArraySize.
+
+            The coordinate system for most other keys that list pixel coordinates, including
+            android.scaler.cropRegion, is defined relative to the active array rectangle given in
+            this field, with `(0, 0)` being the top-left of this rectangle.
+
+            The active array may be smaller than the full pixel array, since the full array may
+            include black calibration pixels or other inactive regions, and geometric correction
+            resulting in scaling or cropping may have been applied.
+            </details>
+            <hal_details>
+            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
+            &amp;gt;= `(0,0)`.
+            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
+            </hal_details>
+            <tag id="RAW" />
+          </entry>
+          <entry name="sensitivityRange" type="int32" visibility="public"
+          type_notes="Range of supported sensitivities"
+          container="array" typedef="rangeInt"
+          hwlevel="full">
+            <array>
+              <size>2</size>
+            </array>
+            <description>Range of sensitivities for android.sensor.sensitivity supported by this
+            camera device.</description>
+            <range>Min &lt;= 100, Max &amp;gt;= 800</range>
+            <details>
+              The values are the standard ISO sensitivity values,
+              as defined in ISO 12232:2006.
+            </details>
+
+            <tag id="BC" />
+            <tag id="V1" />
+          </entry>
+          <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true"
+            hwlevel="full">
+            <enum>
+              <value>RGGB</value>
+              <value>GRBG</value>
+              <value>GBRG</value>
+              <value>BGGR</value>
+              <value>RGB
+              <notes>Sensor is not Bayer; output has 3 16-bit
+              values for each pixel, instead of just 1 16-bit value
+              per pixel.</notes></value>
+            </enum>
+            <description>The arrangement of color filters on sensor;
+            represents the colors in the top-left 2x2 section of
+            the sensor, in reading order.</description>
+            <tag id="RAW" />
+          </entry>
+          <entry name="exposureTimeRange" type="int64" visibility="public"
+                 type_notes="nanoseconds" container="array" typedef="rangeLong"
+                 hwlevel="full">
+            <array>
+              <size>2</size>
+            </array>
+            <description>The range of image exposure times for android.sensor.exposureTime supported
+            by this camera device.
+            </description>
+            <units>Nanoseconds</units>
+            <range>The minimum exposure time will be less than 100 us. For FULL
+            capability devices (android.info.supportedHardwareLevel == FULL),
+            the maximum exposure time will be greater than 100ms.</range>
+            <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL),
+            The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least
+            100ms.
+            </hal_details>
+            <tag id="V1" />
+          </entry>
+          <entry name="maxFrameDuration" type="int64" visibility="public"
+                 hwlevel="full">
+            <description>The maximum possible frame duration (minimum frame rate) for
+            android.sensor.frameDuration that is supported this camera device.</description>
+            <units>Nanoseconds</units>
+            <range>For FULL capability devices
+            (android.info.supportedHardwareLevel == FULL), at least 100ms.
+            </range>
+            <details>Attempting to use frame durations beyond the maximum will result in the frame
+            duration being clipped to the maximum. See that control for a full definition of frame
+            durations.
+
+            Refer to {@link
+            android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+            for the minimum frame duration values.
+            </details>
+            <hal_details>
+            For FULL capability devices (android.info.supportedHardwareLevel == FULL),
+            The maximum of the range SHOULD be at least
+            1 second (1e9), MUST be at least 100ms (100e6).
+
+            android.sensor.info.maxFrameDuration must be greater or
+            equal to the android.sensor.info.exposureTimeRange max
+            value (since exposure time overrides frame duration).
+
+            Available minimum frame durations for JPEG must be no greater
+            than that of the YUV_420_888/IMPLEMENTATION_DEFINED
+            minimum frame durations (for that respective size).
+
+            Since JPEG processing is considered offline and can take longer than
+            a single uncompressed capture, refer to
+            android.scaler.availableStallDurations
+            for details about encoding this scenario.
+            </hal_details>
+            <tag id="V1" />
+          </entry>
+          <entry name="physicalSize" type="float" visibility="public"
+          type_notes="width x height"
+          container="array" typedef="sizeF" hwlevel="legacy">
+            <array>
+              <size>2</size>
+            </array>
+            <description>The physical dimensions of the full pixel
+            array.</description>
+            <units>Millimeters</units>
+            <details>This is the physical size of the sensor pixel
+            array defined by android.sensor.info.pixelArraySize.
+            </details>
+            <hal_details>Needed for FOV calculation for old API</hal_details>
+            <tag id="V1" />
+            <tag id="BC" />
+          </entry>
+          <entry name="pixelArraySize" type="int32" visibility="public"
+          container="array" typedef="size" hwlevel="legacy">
+            <array>
+              <size>2</size>
+            </array>
+            <description>Dimensions of the full pixel array, possibly
+            including black calibration pixels.</description>
+            <units>Pixels</units>
+            <details>The pixel count of the full pixel array of the image sensor, which covers
+            android.sensor.info.physicalSize area.  This represents the full pixel dimensions of
+            the raw buffers produced by this sensor.
+
+            If a camera device supports raw sensor formats, either this or
+            android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
+            output formats listed in android.scaler.streamConfigurationMap (this depends on
+            whether or not the image sensor returns buffers containing pixels that are not
+            part of the active array region for blacklevel calibration or other purposes).
+
+            Some parts of the full pixel array may not receive light from the scene,
+            or be otherwise inactive.  The android.sensor.info.preCorrectionActiveArraySize key
+            defines the rectangle of active pixels that will be included in processed image
+            formats.
+            </details>
+            <tag id="RAW" />
+            <tag id="BC" />
+          </entry>
+          <entry name="whiteLevel" type="int32" visibility="public">
+            <description>
+            Maximum raw value output by sensor.
+            </description>
+            <range>&amp;gt; 255 (8-bit output)</range>
+            <details>
+            This specifies the fully-saturated encoding level for the raw
+            sample values from the sensor.  This is typically caused by the
+            sensor becoming highly non-linear or clipping. The minimum for
+            each channel is specified by the offset in the
+            android.sensor.blackLevelPattern key.
+
+            The white level is typically determined either by sensor bit depth
+            (8-14 bits is expected), or by the point where the sensor response
+            becomes too non-linear to be useful.  The default value for this is
+            maximum representable value for a 16-bit raw sample (2^16 - 1).
+            </details>
+            <hal_details>
+            The full bit depth of the sensor must be available in the raw data,
+            so the value for linear sensors should not be significantly lower
+            than maximum raw value supported, i.e. 2^(sensor bits per pixel).
+            </hal_details>
+            <tag id="RAW" />
+          </entry>
+          <entry name="timestampSource" type="byte" visibility="public"
+                 enum="true" hwlevel="legacy">
+            <enum>
+              <value>UNKNOWN
+                <notes>
+                Timestamps from android.sensor.timestamp are in nanoseconds and monotonic,
+                but can not be compared to timestamps from other subsystems
+                (e.g. accelerometer, gyro etc.), or other instances of the same or different
+                camera devices in the same system. Timestamps between streams and results for
+                a single camera instance are comparable, and the timestamps for all buffers
+                and the result metadata generated by a single capture are identical.
+                </notes>
+              </value>
+              <value>REALTIME
+                <notes>
+                Timestamps from android.sensor.timestamp are in the same timebase as
+                {@link android.os.SystemClock#elapsedRealtimeNanos},
+                and they can be compared to other timestamps using that base.
+                </notes>
+              </value>
+            </enum>
+            <description>The time base source for sensor capture start timestamps.</description>
+            <details>
+            The timestamps provided for captures are always in nanoseconds and monotonic, but
+            may not based on a time source that can be compared to other system time sources.
+
+            This characteristic defines the source for the timestamps, and therefore whether they
+            can be compared against other system time sources/timestamps.
+            </details>
+          <tag id="V1" />
+        </entry>
+        <entry name="lensShadingApplied" type="byte" visibility="public" enum="true"
+               typedef="boolean">
+          <enum>
+            <value>FALSE</value>
+            <value>TRUE</value>
+          </enum>
+          <description>Whether the RAW images output from this camera device are subject to
+          lens shading correction.</description>
+          <details>
+          If TRUE, all images produced by the camera device in the RAW image formats will
+          have lens shading correction already applied to it. If FALSE, the images will
+          not be adjusted for lens shading correction.
+          See android.request.maxNumOutputRaw for a list of RAW image formats.
+
+          This key will be `null` for all devices do not report this information.
+          Devices with RAW capability will always report this information in this key.
+          </details>
+        </entry>
+        <entry name="preCorrectionActiveArraySize" type="int32" visibility="public"
+          type_notes="Four ints defining the active pixel rectangle" container="array"
+          typedef="rectangle" hwlevel="legacy">
+            <array>
+              <size>4</size>
+            </array>
+            <description>
+            The area of the image sensor which corresponds to active pixels prior to the
+            application of any geometric distortion correction.
+            </description>
+            <units>Pixel coordinates on the image sensor</units>
+            <details>
+            This is the rectangle representing the size of the active region of the sensor (i.e.
+            the region that actually receives light from the scene) before any geometric correction
+            has been applied, and should be treated as the active region rectangle for any of the
+            raw formats.  All metadata associated with raw processing (e.g. the lens shading
+            correction map, and radial distortion fields) treats the top, left of this rectangle as
+            the origin, (0,0).
+
+            The size of this region determines the maximum field of view and the maximum number of
+            pixels that an image from this sensor can contain, prior to the application of
+            geometric distortion correction. The effective maximum pixel dimensions of a
+            post-distortion-corrected image is given by the android.sensor.info.activeArraySize
+            field, and the effective maximum field of view for a post-distortion-corrected image
+            can be calculated by applying the geometric distortion correction fields to this
+            rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.
+
+            E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
+            dimensions in android.sensor.info.activeArraySize given the position of a pixel,
+            (x', y'), in the raw pixel array with dimensions give in
+            android.sensor.info.pixelArraySize:
+
+            1. Choose a pixel (x', y') within the active array region of the raw buffer given in
+            android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered
+            to be outside of the FOV, and will not be shown in the processed output image.
+            1. Apply geometric distortion correction to get the post-distortion pixel coordinate,
+            (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
+            buffers is defined relative to the top, left of the
+            android.sensor.info.preCorrectionActiveArraySize rectangle.
+            1. If the resulting corrected pixel coordinate is within the region given in
+            android.sensor.info.activeArraySize, then the position of this pixel in the
+            processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`,
+            when the top, left coordinate of that buffer is treated as (0, 0).
+
+            Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize
+            is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100),
+            android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion
+            correction doesn't change the pixel coordinate, the resulting pixel selected in
+            pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
+            with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5)
+            relative to the top,left of post-processed YUV output buffer with dimensions given in
+            android.sensor.info.activeArraySize.
+
+            The currently supported fields that correct for geometric distortion are:
+
+            1. android.lens.radialDistortion.
+
+            If all of the geometric distortion fields are no-ops, this rectangle will be the same
+            as the post-distortion-corrected rectangle given in
+            android.sensor.info.activeArraySize.
+
+            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+            the full pixel array, and the size of the full pixel array is given by
+            android.sensor.info.pixelArraySize.
+
+            The pre-correction active array may be smaller than the full pixel array, since the
+            full array may include black calibration pixels or other inactive regions.
+            </details>
+            <hal_details>
+            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
+            &amp;gt;= `(0,0)`.
+            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
+
+            If omitted by the HAL implementation, the camera framework will assume that this is
+            the same as the post-correction active array region given in
+            android.sensor.info.activeArraySize.
+            </hal_details>
+            <tag id="RAW" />
+          </entry>
+        </namespace>
+        <entry name="referenceIlluminant1" type="byte" visibility="public"
+               enum="true">
+          <enum>
+            <value id="1">DAYLIGHT</value>
+            <value id="2">FLUORESCENT</value>
+            <value id="3">TUNGSTEN
+              <notes>Incandescent light</notes>
+            </value>
+            <value id="4">FLASH</value>
+            <value id="9">FINE_WEATHER</value>
+            <value id="10">CLOUDY_WEATHER</value>
+            <value id="11">SHADE</value>
+            <value id="12">DAYLIGHT_FLUORESCENT
+              <notes>D 5700 - 7100K</notes>
+            </value>
+            <value id="13">DAY_WHITE_FLUORESCENT
+              <notes>N 4600 - 5400K</notes>
+            </value>
+            <value id="14">COOL_WHITE_FLUORESCENT
+              <notes>W 3900 - 4500K</notes>
+            </value>
+            <value id="15">WHITE_FLUORESCENT
+              <notes>WW 3200 - 3700K</notes>
+            </value>
+            <value id="17">STANDARD_A</value>
+            <value id="18">STANDARD_B</value>
+            <value id="19">STANDARD_C</value>
+            <value id="20">D55</value>
+            <value id="21">D65</value>
+            <value id="22">D75</value>
+            <value id="23">D50</value>
+            <value id="24">ISO_STUDIO_TUNGSTEN</value>
+          </enum>
+          <description>
+          The standard reference illuminant used as the scene light source when
+          calculating the android.sensor.colorTransform1,
+          android.sensor.calibrationTransform1, and
+          android.sensor.forwardMatrix1 matrices.
+          </description>
+          <details>
+          The values in this key correspond to the values defined for the
+          EXIF LightSource tag. These illuminants are standard light sources
+          that are often used calibrating camera devices.
+
+          If this key is present, then android.sensor.colorTransform1,
+          android.sensor.calibrationTransform1, and
+          android.sensor.forwardMatrix1 will also be present.
+
+          Some devices may choose to provide a second set of calibration
+          information for improved quality, including
+          android.sensor.referenceIlluminant2 and its corresponding matrices.
+          </details>
+          <hal_details>
+          The first reference illuminant (android.sensor.referenceIlluminant1)
+          and corresponding matrices must be present to support the RAW capability
+          and DNG output.
+
+          When producing raw images with a color profile that has only been
+          calibrated against a single light source, it is valid to omit
+          android.sensor.referenceIlluminant2 along with the
+          android.sensor.colorTransform2, android.sensor.calibrationTransform2,
+          and android.sensor.forwardMatrix2 matrices.
+
+          If only android.sensor.referenceIlluminant1 is included, it should be
+          chosen so that it is representative of typical scene lighting.  In
+          general, D50 or DAYLIGHT will be chosen for this case.
+
+          If both android.sensor.referenceIlluminant1 and
+          android.sensor.referenceIlluminant2 are included, they should be
+          chosen to represent the typical range of scene lighting conditions.
+          In general, low color temperature illuminant such as Standard-A will
+          be chosen for the first reference illuminant and a higher color
+          temperature illuminant such as D65 will be chosen for the second
+          reference illuminant.
+          </hal_details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="referenceIlluminant2" type="byte" visibility="public">
+          <description>
+          The standard reference illuminant used as the scene light source when
+          calculating the android.sensor.colorTransform2,
+          android.sensor.calibrationTransform2, and
+          android.sensor.forwardMatrix2 matrices.
+          </description>
+          <range>Any value listed in android.sensor.referenceIlluminant1</range>
+          <details>
+          See android.sensor.referenceIlluminant1 for more details.
+
+          If this key is present, then android.sensor.colorTransform2,
+          android.sensor.calibrationTransform2, and
+          android.sensor.forwardMatrix2 will also be present.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="calibrationTransform1" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array"
+        typedef="colorSpaceTransform">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A per-device calibration transform matrix that maps from the
+          reference sensor colorspace to the actual device sensor colorspace.
+          </description>
+          <details>
+          This matrix is used to correct for per-device variations in the
+          sensor colorspace, and is used for processing raw buffer data.
+
+          The matrix is expressed as a 3x3 matrix in row-major-order, and
+          contains a per-device calibration transform that maps colors
+          from reference sensor color space (i.e. the "golden module"
+          colorspace) into this camera device's native sensor color
+          space under the first reference illuminant
+          (android.sensor.referenceIlluminant1).
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="calibrationTransform2" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array"
+        typedef="colorSpaceTransform">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A per-device calibration transform matrix that maps from the
+          reference sensor colorspace to the actual device sensor colorspace
+          (this is the colorspace of the raw buffer data).
+          </description>
+          <details>
+          This matrix is used to correct for per-device variations in the
+          sensor colorspace, and is used for processing raw buffer data.
+
+          The matrix is expressed as a 3x3 matrix in row-major-order, and
+          contains a per-device calibration transform that maps colors
+          from reference sensor color space (i.e. the "golden module"
+          colorspace) into this camera device's native sensor color
+          space under the second reference illuminant
+          (android.sensor.referenceIlluminant2).
+
+          This matrix will only be present if the second reference
+          illuminant is present.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="colorTransform1" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array"
+        typedef="colorSpaceTransform">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A matrix that transforms color values from CIE XYZ color space to
+          reference sensor color space.
+          </description>
+          <details>
+          This matrix is used to convert from the standard CIE XYZ color
+          space to the reference sensor colorspace, and is used when processing
+          raw buffer data.
+
+          The matrix is expressed as a 3x3 matrix in row-major-order, and
+          contains a color transform matrix that maps colors from the CIE
+          XYZ color space to the reference sensor color space (i.e. the
+          "golden module" colorspace) under the first reference illuminant
+          (android.sensor.referenceIlluminant1).
+
+          The white points chosen in both the reference sensor color space
+          and the CIE XYZ colorspace when calculating this transform will
+          match the standard white point for the first reference illuminant
+          (i.e. no chromatic adaptation will be applied by this transform).
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="colorTransform2" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array"
+        typedef="colorSpaceTransform">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A matrix that transforms color values from CIE XYZ color space to
+          reference sensor color space.
+          </description>
+          <details>
+          This matrix is used to convert from the standard CIE XYZ color
+          space to the reference sensor colorspace, and is used when processing
+          raw buffer data.
+
+          The matrix is expressed as a 3x3 matrix in row-major-order, and
+          contains a color transform matrix that maps colors from the CIE
+          XYZ color space to the reference sensor color space (i.e. the
+          "golden module" colorspace) under the second reference illuminant
+          (android.sensor.referenceIlluminant2).
+
+          The white points chosen in both the reference sensor color space
+          and the CIE XYZ colorspace when calculating this transform will
+          match the standard white point for the second reference illuminant
+          (i.e. no chromatic adaptation will be applied by this transform).
+
+          This matrix will only be present if the second reference
+          illuminant is present.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="forwardMatrix1" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array"
+        typedef="colorSpaceTransform">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A matrix that transforms white balanced camera colors from the reference
+          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
+          </description>
+          <details>
+          This matrix is used to convert to the standard CIE XYZ colorspace, and
+          is used when processing raw buffer data.
+
+          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+          a color transform matrix that maps white balanced colors from the
+          reference sensor color space to the CIE XYZ color space with a D50 white
+          point.
+
+          Under the first reference illuminant (android.sensor.referenceIlluminant1)
+          this matrix is chosen so that the standard white point for this reference
+          illuminant in the reference sensor colorspace is mapped to D50 in the
+          CIE XYZ colorspace.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="forwardMatrix2" type="rational"
+        visibility="public" optional="true"
+        type_notes="3x3 matrix in row-major-order" container="array"
+        typedef="colorSpaceTransform">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>
+          A matrix that transforms white balanced camera colors from the reference
+          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
+          </description>
+          <details>
+          This matrix is used to convert to the standard CIE XYZ colorspace, and
+          is used when processing raw buffer data.
+
+          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+          a color transform matrix that maps white balanced colors from the
+          reference sensor color space to the CIE XYZ color space with a D50 white
+          point.
+
+          Under the second reference illuminant (android.sensor.referenceIlluminant2)
+          this matrix is chosen so that the standard white point for this reference
+          illuminant in the reference sensor colorspace is mapped to D50 in the
+          CIE XYZ colorspace.
+
+          This matrix will only be present if the second reference
+          illuminant is present.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="baseGainFactor" type="rational"
+        optional="true">
+          <description>Gain factor from electrons to raw units when
+          ISO=100</description>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="blackLevelPattern" type="int32" visibility="public"
+        optional="true" type_notes="2x2 raw count block" container="array"
+        typedef="blackLevelPattern">
+          <array>
+            <size>4</size>
+          </array>
+          <description>
+          A fixed black level offset for each of the color filter arrangement
+          (CFA) mosaic channels.
+          </description>
+          <range>&amp;gt;= 0 for each.</range>
+          <details>
+          This key specifies the zero light value for each of the CFA mosaic
+          channels in the camera sensor.  The maximal value output by the
+          sensor is represented by the value in android.sensor.info.whiteLevel.
+
+          The values are given in the same order as channels listed for the CFA
+          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
+          nth value given corresponds to the black level offset for the nth
+          color channel listed in the CFA.
+          </details>
+          <hal_details>
+          The values are given in row-column scan order, with the first value
+          corresponding to the element of the CFA in row=0, column=0.
+          </hal_details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="maxAnalogSensitivity" type="int32" visibility="public"
+               optional="true" hwlevel="full">
+          <description>Maximum sensitivity that is implemented
+          purely through analog gain.</description>
+          <details>For android.sensor.sensitivity values less than or
+          equal to this, all applied gain must be analog. For
+          values above this, the gain applied can be a mix of analog and
+          digital.</details>
+          <tag id="V1" />
+          <tag id="FULL" />
+        </entry>
+        <entry name="orientation" type="int32" visibility="public"
+               hwlevel="legacy">
+          <description>Clockwise angle through which the output image needs to be rotated to be
+          upright on the device screen in its native orientation.
+          </description>
+          <units>Degrees of clockwise rotation; always a multiple of
+          90</units>
+          <range>0, 90, 180, 270</range>
+          <details>
+          Also defines the direction of rolling shutter readout, which is from top to bottom in
+          the sensor's coordinate system.
+          </details>
+          <tag id="BC" />
+        </entry>
+        <entry name="profileHueSatMapDimensions" type="int32"
+        visibility="system" optional="true"
+        type_notes="Number of samples for hue, saturation, and value"
+        container="array">
+          <array>
+            <size>3</size>
+          </array>
+          <description>
+          The number of input samples for each dimension of
+          android.sensor.profileHueSatMap.
+          </description>
+          <range>
+          Hue &amp;gt;= 1,
+          Saturation &amp;gt;= 2,
+          Value &amp;gt;= 1
+          </range>
+          <details>
+          The number of input samples for the hue, saturation, and value
+          dimension of android.sensor.profileHueSatMap. The order of the
+          dimensions given is hue, saturation, value; where hue is the 0th
+          element.
+          </details>
+          <tag id="RAW" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.sensor.exposureTime" kind="controls">
+        </clone>
+        <clone entry="android.sensor.frameDuration"
+        kind="controls"></clone>
+        <clone entry="android.sensor.sensitivity" kind="controls">
+        </clone>
+        <entry name="timestamp" type="int64" visibility="public"
+               hwlevel="legacy">
+          <description>Time at start of exposure of first
+          row of the image sensor active array, in nanoseconds.</description>
+          <units>Nanoseconds</units>
+          <range>&amp;gt; 0</range>
+          <details>The timestamps are also included in all image
+          buffers produced for the same capture, and will be identical
+          on all the outputs.
+
+          When android.sensor.info.timestampSource `==` UNKNOWN,
+          the timestamps measure time since an unspecified starting point,
+          and are monotonically increasing. They can be compared with the
+          timestamps for other captures from the same camera device, but are
+          not guaranteed to be comparable to any other time source.
+
+          When android.sensor.info.timestampSource `==` REALTIME, the
+          timestamps measure time in the same timebase as {@link
+          android.os.SystemClock#elapsedRealtimeNanos}, and they can
+          be compared to other timestamps from other subsystems that
+          are using that base.
+
+          For reprocessing, the timestamp will match the start of exposure of
+          the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
+          timestamp} in the TotalCaptureResult that was used to create the
+          reprocess capture request.
+          </details>
+          <hal_details>
+          All timestamps must be in reference to the kernel's
+          CLOCK_BOOTTIME monotonic clock, which properly accounts for
+          time spent asleep. This allows for synchronization with
+          sensors that continue to operate while the system is
+          otherwise asleep.
+
+          If android.sensor.info.timestampSource `==` REALTIME,
+          The timestamp must be synchronized with the timestamps from other
+          sensor subsystems that are using the same timebase.
+
+          For reprocessing, the input image's start of exposure can be looked up
+          with android.sensor.timestamp from the metadata included in the
+          capture request.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="temperature" type="float"
+        optional="true">
+          <description>The temperature of the sensor, sampled at the time
+          exposure began for this frame.
+
+          The thermal diode being queried should be inside the sensor PCB, or
+          somewhere close to it.
+          </description>
+
+          <units>Celsius</units>
+          <range>Optional. This value is missing if no temperature is available.</range>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="neutralColorPoint" type="rational" visibility="public"
+        optional="true" container="array">
+          <array>
+            <size>3</size>
+          </array>
+          <description>
+          The estimated camera neutral color in the native sensor colorspace at
+          the time of capture.
+          </description>
+          <details>
+          This value gives the neutral color point encoded as an RGB value in the
+          native sensor color space.  The neutral color point indicates the
+          currently estimated white point of the scene illumination.  It can be
+          used to interpolate between the provided color transforms when
+          processing raw sensor data.
+
+          The order of the values is R, G, B; where R is in the lowest index.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="noiseProfile" type="double" visibility="public"
+        optional="true" type_notes="Pairs of noise model coefficients"
+        container="array" typedef="pairDoubleDouble">
+          <array>
+            <size>2</size>
+            <size>CFA Channels</size>
+          </array>
+          <description>
+          Noise model coefficients for each CFA mosaic channel.
+          </description>
+          <details>
+          This key contains two noise model coefficients for each CFA channel
+          corresponding to the sensor amplification (S) and sensor readout
+          noise (O).  These are given as pairs of coefficients for each channel
+          in the same order as channels listed for the CFA layout key
+          (see android.sensor.info.colorFilterArrangement).  This is
+          represented as an array of Pair&amp;lt;Double, Double&amp;gt;, where
+          the first member of the Pair at index n is the S coefficient and the
+          second member is the O coefficient for the nth color channel in the CFA.
+
+          These coefficients are used in a two parameter noise model to describe
+          the amount of noise present in the image for each CFA channel.  The
+          noise model used here is:
+
+          N(x) = sqrt(Sx + O)
+
+          Where x represents the recorded signal of a CFA channel normalized to
+          the range [0, 1], and S and O are the noise model coeffiecients for
+          that channel.
+
+          A more detailed description of the noise model can be found in the
+          Adobe DNG specification for the NoiseProfile tag.
+          </details>
+          <hal_details>
+          For a CFA layout of RGGB, the list of coefficients would be given as
+          an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients
+          for the red channel, S1 and O1 are the coefficients for the first green
+          channel, etc.
+          </hal_details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="profileHueSatMap" type="float"
+        visibility="system" optional="true"
+        type_notes="Mapping for hue, saturation, and value"
+        container="array">
+          <array>
+            <size>hue_samples</size>
+            <size>saturation_samples</size>
+            <size>value_samples</size>
+            <size>3</size>
+          </array>
+          <description>
+          A mapping containing a hue shift, saturation scale, and value scale
+          for each pixel.
+          </description>
+          <units>
+          The hue shift is given in degrees; saturation and value scale factors are
+          unitless and are between 0 and 1 inclusive
+          </units>
+          <details>
+          hue_samples, saturation_samples, and value_samples are given in
+          android.sensor.profileHueSatMapDimensions.
+
+          Each entry of this map contains three floats corresponding to the
+          hue shift, saturation scale, and value scale, respectively; where the
+          hue shift has the lowest index. The map entries are stored in the key
+          in nested loop order, with the value divisions in the outer loop, the
+          hue divisions in the middle loop, and the saturation divisions in the
+          inner loop. All zero input saturation entries are required to have a
+          value scale factor of 1.0.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="profileToneCurve" type="float"
+        visibility="system" optional="true"
+        type_notes="Samples defining a spline for a tone-mapping curve"
+        container="array">
+          <array>
+            <size>samples</size>
+            <size>2</size>
+          </array>
+          <description>
+          A list of x,y samples defining a tone-mapping curve for gamma adjustment.
+          </description>
+          <range>
+          Each sample has an input range of `[0, 1]` and an output range of
+          `[0, 1]`.  The first sample is required to be `(0, 0)`, and the last
+          sample is required to be `(1, 1)`.
+          </range>
+          <details>
+          This key contains a default tone curve that can be applied while
+          processing the image as a starting point for user adjustments.
+          The curve is specified as a list of value pairs in linear gamma.
+          The curve is interpolated using a cubic spline.
+          </details>
+          <tag id="RAW" />
+        </entry>
+        <entry name="greenSplit" type="float" visibility="public" optional="true">
+          <description>
+          The worst-case divergence between Bayer green channels.
+          </description>
+          <range>
+          &amp;gt;= 0
+          </range>
+          <details>
+          This value is an estimate of the worst case split between the
+          Bayer green channels in the red and blue rows in the sensor color
+          filter array.
+
+          The green split is calculated as follows:
+
+          1. A 5x5 pixel (or larger) window W within the active sensor array is
+          chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
+          mosaic channels (R, Gr, Gb, B).  The location and size of the window
+          chosen is implementation defined, and should be chosen to provide a
+          green split estimate that is both representative of the entire image
+          for this camera sensor, and can be calculated quickly.
+          1. The arithmetic mean of the green channels from the red
+          rows (mean_Gr) within W is computed.
+          1. The arithmetic mean of the green channels from the blue
+          rows (mean_Gb) within W is computed.
+          1. The maximum ratio R of the two means is computed as follows:
+          `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))`
+
+          The ratio R is the green split divergence reported for this property,
+          which represents how much the green channels differ in the mosaic
+          pattern.  This value is typically used to determine the treatment of
+          the green mosaic channels when demosaicing.
+
+          The green split value can be roughly interpreted as follows:
+
+          * R &amp;lt; 1.03 is a negligible split (&amp;lt;3% divergence).
+          * 1.20 &amp;lt;= R &amp;gt;= 1.03 will require some software
+          correction to avoid demosaic errors (3-20% divergence).
+          * R &amp;gt; 1.20 will require strong software correction to produce
+          a usuable image (&amp;gt;20% divergence).
+          </details>
+          <hal_details>
+          The green split given may be a static value based on prior
+          characterization of the camera sensor using the green split
+          calculation method given here over a large, representative, sample
+          set of images.  Other methods of calculation that produce equivalent
+          results, and can be interpreted in the same manner, may be used.
+          </hal_details>
+          <tag id="RAW" />
+        </entry>
+      </dynamic>
+      <controls>
+        <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array">
+          <array>
+            <size>4</size>
+          </array>
+          <description>
+            A pixel `[R, G_even, G_odd, B]` that supplies the test pattern
+            when android.sensor.testPatternMode is SOLID_COLOR.
+          </description>
+          <details>
+          Each color channel is treated as an unsigned 32-bit integer.
+          The camera device then uses the most significant X bits
+          that correspond to how many bits are in its Bayer raw sensor
+          output.
+
+          For example, a sensor with RAW10 Bayer output would use the
+          10 most significant bits from each color channel.
+          </details>
+          <hal_details>
+          </hal_details>
+        </entry>
+        <entry name="testPatternMode" type="int32" visibility="public" optional="true"
+          enum="true">
+          <enum>
+            <value>OFF
+              <notes>No test pattern mode is used, and the camera
+              device returns captures from the image sensor.
+
+              This is the default if the key is not set.</notes>
+            </value>
+            <value>SOLID_COLOR
+              <notes>
+              Each pixel in `[R, G_even, G_odd, B]` is replaced by its
+              respective color channel provided in
+              android.sensor.testPatternData.
+
+              For example:
+
+                  android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
+
+              All green pixels are 100% green. All red/blue pixels are black.
+
+                  android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
+
+              All red pixels are 100% red. Only the odd green pixels
+              are 100% green. All blue pixels are 100% black.
+              </notes>
+            </value>
+            <value>COLOR_BARS
+              <notes>
+              All pixel data is replaced with an 8-bar color pattern.
+
+              The vertical bars (left-to-right) are as follows:
+
+              * 100% white
+              * yellow
+              * cyan
+              * green
+              * magenta
+              * red
+              * blue
+              * black
+
+              In general the image would look like the following:
+
+                 W Y C G M R B K
+                 W Y C G M R B K
+                 W Y C G M R B K
+                 W Y C G M R B K
+                 W Y C G M R B K
+                 . . . . . . . .
+                 . . . . . . . .
+                 . . . . . . . .
+
+                 (B = Blue, K = Black)
+
+             Each bar should take up 1/8 of the sensor pixel array width.
+             When this is not possible, the bar size should be rounded
+             down to the nearest integer and the pattern can repeat
+             on the right side.
+
+             Each bar's height must always take up the full sensor
+             pixel array height.
+
+             Each pixel in this test pattern must be set to either
+             0% intensity or 100% intensity.
+             </notes>
+            </value>
+            <value>COLOR_BARS_FADE_TO_GRAY
+              <notes>
+              The test pattern is similar to COLOR_BARS, except that
+              each bar should start at its specified color at the top,
+              and fade to gray at the bottom.
+
+              Furthermore each bar is further subdivided into a left and
+              right half. The left half should have a smooth gradient,
+              and the right half should have a quantized gradient.
+
+              In particular, the right half's should consist of blocks of the
+              same color for 1/16th active sensor pixel array width.
+
+              The least significant bits in the quantized gradient should
+              be copied from the most significant bits of the smooth gradient.
+
+              The height of each bar should always be a multiple of 128.
+              When this is not the case, the pattern should repeat at the bottom
+              of the image.
+              </notes>
+            </value>
+            <value>PN9
+              <notes>
+              All pixel data is replaced by a pseudo-random sequence
+              generated from a PN9 512-bit sequence (typically implemented
+              in hardware with a linear feedback shift register).
+
+              The generator should be reset at the beginning of each frame,
+              and thus each subsequent raw frame with this test pattern should
+              be exactly the same as the last.
+              </notes>
+            </value>
+            <value id="256">CUSTOM1
+              <notes>The first custom test pattern. All custom patterns that are
+              available only on this camera device are at least this numeric
+              value.
+
+              All of the custom test patterns will be static
+              (that is the raw image must not vary from frame to frame).
+              </notes>
+            </value>
+          </enum>
+          <description>When enabled, the sensor sends a test pattern instead of
+          doing a real exposure from the camera.
+          </description>
+          <range>android.sensor.availableTestPatternModes</range>
+          <details>
+          When a test pattern is enabled, all manual sensor controls specified
+          by android.sensor.* will be ignored. All other controls should
+          work as normal.
+
+          For example, if manual flash is enabled, flash firing should still
+          occur (and that the test pattern remain unmodified, since the flash
+          would not actually affect it).
+
+          Defaults to OFF.
+          </details>
+          <hal_details>
+          All test patterns are specified in the Bayer domain.
+
+          The HAL may choose to substitute test patterns from the sensor
+          with test patterns from on-device memory. In that case, it should be
+          indistinguishable to the ISP whether the data came from the
+          sensor interconnect bus (such as CSI2) or memory.
+          </hal_details>
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.sensor.testPatternData" kind="controls">
+        </clone>
+        <clone entry="android.sensor.testPatternMode" kind="controls">
+        </clone>
+      </dynamic>
+      <static>
+        <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true"
+          type_notes="list of enums" container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <description>List of sensor test pattern modes for android.sensor.testPatternMode
+          supported by this camera device.
+          </description>
+          <range>Any value listed in android.sensor.testPatternMode</range>
+          <details>
+            Defaults to OFF, and always includes OFF if defined.
+          </details>
+          <hal_details>
+            All custom modes must be >= CUSTOM1.
+          </hal_details>
+        </entry>
+      </static>
+      <dynamic>
+        <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited">
+          <description>Duration between the start of first row exposure
+          and the start of last row exposure.</description>
+          <units>Nanoseconds</units>
+          <range> &amp;gt;= 0 and &amp;lt;
+          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range>
+          <details>
+          This is the exposure time skew between the first and last
+          row exposure start times. The first row and the last row are
+          the first and last rows inside of the
+          android.sensor.info.activeArraySize.
+
+          For typical camera sensors that use rolling shutters, this is also equivalent
+          to the frame readout time.
+          </details>
+          <hal_details>
+          The HAL must report `0` if the sensor is using global shutter, where all pixels begin
+          exposure at the same time.
+          </hal_details>
+          <tag id="V1" />
+        </entry>
+      </dynamic>
+    </section>
+    <section name="shading">
+      <controls>
+        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
+          <enum>
+            <value>OFF
+            <notes>No lens shading correction is applied.</notes></value>
+            <value>FAST
+            <notes>Apply lens shading corrections, without slowing
+            frame rate relative to sensor raw output</notes></value>
+            <value>HIGH_QUALITY
+            <notes>Apply high-quality lens shading correction, at the
+            cost of possibly reduced frame rate.</notes></value>
+          </enum>
+          <description>Quality of lens shading correction applied
+          to the image data.</description>
+          <range>android.shading.availableModes</range>
+          <details>
+          When set to OFF mode, no lens shading correction will be applied by the
+          camera device, and an identity lens shading map data will be provided
+          if `android.statistics.lensShadingMapMode == ON`. For example, for lens
+          shading map with size of `[ 4, 3 ]`,
+          the output android.statistics.lensShadingCorrectionMap for this case will be an identity
+          map shown below:
+
+              [ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
+               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
+               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
+               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
+               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
+               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
+
+          When set to other modes, lens shading correction will be applied by the camera
+          device. Applications can request lens shading map data by setting
+          android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens
+          shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
+          data will be the one applied by the camera device for this capture request.
+
+          The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
+          the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
+          AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=`
+          OFF), to get best results, it is recommended that the applications wait for the AE and AWB
+          to be converged before using the returned shading map data.
+          </details>
+        </entry>
+        <entry name="strength" type="byte">
+          <description>Control the amount of shading correction
+          applied to the images</description>
+          <units>unitless: 1-10; 10 is full shading
+          compensation</units>
+          <tag id="FUTURE" />
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.shading.mode" kind="controls">
+        </clone>
+      </dynamic>
+      <static>
+        <entry name="availableModes" type="byte" visibility="public"
+            type_notes="List of enums (android.shading.mode)." container="array"
+            typedef="enumList" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of lens shading modes for android.shading.mode that are supported by this camera device.
+          </description>
+          <range>Any value listed in android.shading.mode</range>
+          <details>
+              This list contains lens shading modes that can be set for the camera device.
+              Camera devices that support the MANUAL_POST_PROCESSING capability will always
+              list OFF and FAST mode. This includes all FULL level devices.
+              LEGACY devices will always only support FAST mode.
+          </details>
+          <hal_details>
+            HAL must support both FAST and HIGH_QUALITY if lens shading correction control is
+            available on the camera device, but the underlying implementation can be the same for
+            both modes. That is, if the highest quality implementation on the camera device does not
+            slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.
+          </hal_details>
+        </entry>
+      </static>
+    </section>
+    <section name="statistics">
+      <controls>
+        <entry name="faceDetectMode" type="byte" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value>OFF
+            <notes>Do not include face detection statistics in capture
+            results.</notes></value>
+            <value optional="true">SIMPLE
+            <notes>Return face rectangle and confidence values only.
+            </notes></value>
+            <value optional="true">FULL
+            <notes>Return all face
+            metadata.
+
+            In this mode, face rectangles, scores, landmarks, and face IDs are all valid.
+            </notes></value>
+          </enum>
+          <description>Operating mode for the face detector
+          unit.</description>
+          <range>android.statistics.info.availableFaceDetectModes</range>
+          <details>Whether face detection is enabled, and whether it
+          should output just the basic fields or the full set of
+          fields.</details>
+          <hal_details>
+            SIMPLE mode must fill in android.statistics.faceRectangles and
+            android.statistics.faceScores.
+            FULL mode must also fill in android.statistics.faceIds, and
+            android.statistics.faceLandmarks.
+          </hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="histogramMode" type="byte" enum="true" typedef="boolean">
+          <enum>
+            <value>OFF</value>
+            <value>ON</value>
+          </enum>
+          <description>Operating mode for histogram
+          generation</description>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean">
+          <enum>
+            <value>OFF</value>
+            <value>ON</value>
+          </enum>
+          <description>Operating mode for sharpness map
+          generation</description>
+          <tag id="FUTURE" />
+        </entry>
+        <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true"
+        typedef="boolean">
+          <enum>
+            <value>OFF
+            <notes>Hot pixel map production is disabled.
+            </notes></value>
+            <value>ON
+            <notes>Hot pixel map production is enabled.
+            </notes></value>
+          </enum>
+          <description>
+          Operating mode for hot pixel map generation.
+          </description>
+          <range>android.statistics.info.availableHotPixelMapModes</range>
+          <details>
+          If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap.
+          If set to `false`, no hot pixel map will be returned.
+          </details>
+          <tag id="V1" />
+          <tag id="RAW" />
+        </entry>
+      </controls>
+      <static>
+        <namespace name="info">
+          <entry name="availableFaceDetectModes" type="byte"
+                 visibility="public"
+                 type_notes="List of enums from android.statistics.faceDetectMode"
+                 container="array"
+                 typedef="enumList"
+                 hwlevel="legacy">
+            <array>
+              <size>n</size>
+            </array>
+            <description>List of face detection modes for android.statistics.faceDetectMode that are
+            supported by this camera device.
+            </description>
+            <range>Any value listed in android.statistics.faceDetectMode</range>
+            <details>OFF is always supported.
+            </details>
+          </entry>
+          <entry name="histogramBucketCount" type="int32">
+            <description>Number of histogram buckets
+            supported</description>
+            <range>&amp;gt;= 64</range>
+            <tag id="FUTURE" />
+          </entry>
+          <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy">
+            <description>The maximum number of simultaneously detectable
+            faces.</description>
+            <range>0 for cameras without available face detection; otherwise:
+            `&gt;=4` for LIMITED or FULL hwlevel devices or
+            `&gt;0` for LEGACY devices.</range>
+            <tag id="BC" />
+          </entry>
+          <entry name="maxHistogramCount" type="int32">
+            <description>Maximum value possible for a histogram
+            bucket</description>
+            <tag id="FUTURE" />
+          </entry>
+          <entry name="maxSharpnessMapValue" type="int32">
+            <description>Maximum value possible for a sharpness map
+            region.</description>
+            <tag id="FUTURE" />
+          </entry>
+          <entry name="sharpnessMapSize" type="int32"
+          type_notes="width x height" container="array" typedef="size">
+            <array>
+              <size>2</size>
+            </array>
+            <description>Dimensions of the sharpness
+            map</description>
+            <range>Must be at least 32 x 32</range>
+            <tag id="FUTURE" />
+          </entry>
+          <entry name="availableHotPixelMapModes" type="byte" visibility="public"
+                 type_notes="list of enums" container="array" typedef="boolean">
+            <array>
+              <size>n</size>
+            </array>
+            <description>
+            List of hot pixel map output modes for android.statistics.hotPixelMapMode that are
+            supported by this camera device.
+            </description>
+            <range>Any value listed in android.statistics.hotPixelMapMode</range>
+            <details>
+            If no hotpixel map output is available for this camera device, this will contain only
+            `false`.
+
+            ON is always supported on devices with the RAW capability.
+            </details>
+            <tag id="V1" />
+            <tag id="RAW" />
+          </entry>
+          <entry name="availableLensShadingMapModes" type="byte" visibility="public"
+                 type_notes="list of enums" container="array" typedef="enumList">
+            <array>
+              <size>n</size>
+            </array>
+            <description>
+            List of lens shading map output modes for android.statistics.lensShadingMapMode that
+            are supported by this camera device.
+            </description>
+            <range>Any value listed in android.statistics.lensShadingMapMode</range>
+            <details>
+            If no lens shading map output is available for this camera device, this key will
+            contain only OFF.
+
+            ON is always supported on devices with the RAW capability.
+            LEGACY mode devices will always only support OFF.
+            </details>
+          </entry>
+        </namespace>
+      </static>
+      <dynamic>
+        <clone entry="android.statistics.faceDetectMode"
+               kind="controls"></clone>
+        <entry name="faceIds" type="int32" visibility="hidden" container="array"
+               hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>List of unique IDs for detected faces.</description>
+          <details>
+          Each detected face is given a unique ID that is valid for as long as the face is visible
+          to the camera device.  A face that leaves the field of view and later returns may be
+          assigned a new ID.
+
+          Only available if android.statistics.faceDetectMode == FULL</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="faceLandmarks" type="int32" visibility="hidden"
+        type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)"
+        container="array" hwlevel="legacy">
+          <array>
+            <size>n</size>
+            <size>6</size>
+          </array>
+          <description>List of landmarks for detected
+          faces.</description>
+          <details>
+            The coordinate system is that of android.sensor.info.activeArraySize, with
+            `(0, 0)` being the top-left pixel of the active array.
+
+            Only available if android.statistics.faceDetectMode == FULL</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="faceRectangles" type="int32" visibility="hidden"
+        type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area"
+        container="array" typedef="rectangle" hwlevel="legacy">
+          <array>
+            <size>n</size>
+            <size>4</size>
+          </array>
+          <description>List of the bounding rectangles for detected
+          faces.</description>
+          <details>
+            The coordinate system is that of android.sensor.info.activeArraySize, with
+            `(0, 0)` being the top-left pixel of the active array.
+
+            Only available if android.statistics.faceDetectMode != OFF</details>
+          <tag id="BC" />
+        </entry>
+        <entry name="faceScores" type="byte" visibility="hidden" container="array"
+               hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>List of the face confidence scores for
+          detected faces</description>
+          <range>1-100</range>
+          <details>Only available if android.statistics.faceDetectMode != OFF.
+          </details>
+          <hal_details>
+          The value should be meaningful (for example, setting 100 at
+          all times is illegal).</hal_details>
+          <tag id="BC" />
+        </entry>
+        <entry name="faces" type="int32" visibility="public" synthetic="true"
+               container="array" typedef="face" hwlevel="legacy">
+          <array>
+            <size>n</size>
+          </array>
+          <description>List of the faces detected through camera face detection
+          in this capture.</description>
+          <details>
+          Only available if android.statistics.faceDetectMode `!=` OFF.
+          </details>
+        </entry>
+        <entry name="histogram" type="int32"
+        type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount"
+        container="array">
+          <array>
+            <size>n</size>
+            <size>3</size>
+          </array>
+          <description>A 3-channel histogram based on the raw
+          sensor data</description>
+          <details>The k'th bucket (0-based) covers the input range
+          (with w = android.sensor.info.whiteLevel) of [ k * w/N,
+          (k + 1) * w / N ). If only a monochrome sharpness map is
+          supported, all channels should have the same data</details>
+          <tag id="FUTURE" />
+        </entry>
+        <clone entry="android.statistics.histogramMode"
+        kind="controls"></clone>
+        <entry name="sharpnessMap" type="int32"
+        type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)"
+        container="array">
+          <array>
+            <size>n</size>
+            <size>m</size>
+            <size>3</size>
+          </array>
+          <description>A 3-channel sharpness map, based on the raw
+          sensor data</description>
+          <details>If only a monochrome sharpness map is supported,
+          all channels should have the same data</details>
+          <tag id="FUTURE" />
+        </entry>
+        <clone entry="android.statistics.sharpnessMapMode"
+               kind="controls"></clone>
+        <entry name="lensShadingCorrectionMap" type="byte" visibility="public"
+               typedef="lensShadingMap" hwlevel="full">
+          <description>The shading map is a low-resolution floating-point map
+          that lists the coefficients used to correct for vignetting, for each
+          Bayer color channel.</description>
+          <range>Each gain factor is &amp;gt;= 1</range>
+          <details>The least shaded section of the image should have a gain factor
+          of 1; all other sections should have gains above 1.
+
+          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
+          must take into account the colorCorrection settings.
+
+          The shading map is for the entire active pixel array, and is not
+          affected by the crop region specified in the request. Each shading map
+          entry is the value of the shading compensation map over a specific
+          pixel on the sensor.  Specifically, with a (N x M) resolution shading
+          map, and an active pixel array size (W x H), shading map entry
+          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+          The map is assumed to be bilinearly interpolated between the sample points.
+
+          The channel order is [R, Geven, Godd, B], where Geven is the green
+          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+          The shading map is stored in a fully interleaved format.
+
+          The shading map should have on the order of 30-40 rows and columns,
+          and must be smaller than 64x64.
+
+          As an example, given a very small map defined as:
+
+              width,height = [ 4, 3 ]
+              values =
+              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
+                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
+                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
+                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
+                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
+                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
+
+          The low-resolution scaling map images for each channel are
+          (displayed using nearest-neighbor interpolation):
+
+          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
+          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
+          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
+          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
+
+          As a visualization only, inverting the full-color map to recover an
+          image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
+
+          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
+          </details>
+        </entry>
+        <entry name="lensShadingMap" type="float" visibility="hidden"
+               type_notes="2D array of float gain factors per channel to correct lens shading"
+               container="array" hwlevel="full">
+          <array>
+            <size>4</size>
+            <size>n</size>
+            <size>m</size>
+          </array>
+          <description>The shading map is a low-resolution floating-point map
+          that lists the coefficients used to correct for vignetting, for each
+          Bayer color channel of RAW image data.</description>
+          <range>Each gain factor is &amp;gt;= 1</range>
+          <details>The least shaded section of the image should have a gain factor
+          of 1; all other sections should have gains above 1.
+
+          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
+          must take into account the colorCorrection settings.
+
+          The shading map is for the entire active pixel array, and is not
+          affected by the crop region specified in the request. Each shading map
+          entry is the value of the shading compensation map over a specific
+          pixel on the sensor.  Specifically, with a (N x M) resolution shading
+          map, and an active pixel array size (W x H), shading map entry
+          (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+          The map is assumed to be bilinearly interpolated between the sample points.
+
+          The channel order is [R, Geven, Godd, B], where Geven is the green
+          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+          The shading map is stored in a fully interleaved format, and its size
+          is provided in the camera static metadata by android.lens.info.shadingMapSize.
+
+          The shading map should have on the order of 30-40 rows and columns,
+          and must be smaller than 64x64.
+
+          As an example, given a very small map defined as:
+
+              android.lens.info.shadingMapSize = [ 4, 3 ]
+              android.statistics.lensShadingMap =
+              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
+                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
+                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
+                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
+                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
+                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
+
+          The low-resolution scaling map images for each channel are
+          (displayed using nearest-neighbor interpolation):
+
+          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
+          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
+          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
+          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
+
+          As a visualization only, inverting the full-color map to recover an
+          image of a gray wall (using bicubic interpolation for visual quality)
+          as captured by the sensor gives:
+
+          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
+
+          Note that the RAW image data might be subject to lens shading
+          correction not reported on this map. Query
+          android.sensor.info.lensShadingApplied to see if RAW image data has subject
+          to lens shading correction. If android.sensor.info.lensShadingApplied
+          is TRUE, the RAW image data is subject to partial or full lens shading
+          correction. In the case full lens shading correction is applied to RAW
+          images, the gain factor map reported in this key will contain all 1.0 gains.
+          In other words, the map reported in this key is the remaining lens shading
+          that needs to be applied on the RAW image to get images without lens shading
+          artifacts. See android.request.maxNumOutputRaw for a list of RAW image
+          formats.
+          </details>
+          <hal_details>
+          The lens shading map calculation may depend on exposure and white balance statistics.
+          When AE and AWB are in AUTO modes
+          (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL
+          may have all the information it need to generate most accurate lens shading map. When
+          AE or AWB are in manual mode
+          (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map
+          may be adversely impacted by manual exposure or white balance parameters. To avoid
+          generating unreliable shading map data, the HAL may choose to lock the shading map with
+          the latest known good map generated when the AE and AWB are in AUTO modes.
+          </hal_details>
+        </entry>
+        <entry name="predictedColorGains" type="float"
+               visibility="hidden"
+               deprecated="true"
+               optional="true"
+               type_notes="A 1D array of floats for 4 color channel gains"
+               container="array">
+          <array>
+            <size>4</size>
+          </array>
+          <description>The best-fit color channel gains calculated
+          by the camera device's statistics units for the current output frame.
+          </description>
+          <details>
+          This may be different than the gains used for this frame,
+          since statistics processing on data from a new frame
+          typically completes after the transform has already been
+          applied to that frame.
+
+          The 4 channel gains are defined in Bayer domain,
+          see android.colorCorrection.gains for details.
+
+          This value should always be calculated by the auto-white balance (AWB) block,
+          regardless of the android.control.* current values.
+          </details>
+        </entry>
+        <entry name="predictedColorTransform" type="rational"
+               visibility="hidden"
+               deprecated="true"
+               optional="true"
+               type_notes="3x3 rational matrix in row-major order"
+               container="array">
+          <array>
+            <size>3</size>
+            <size>3</size>
+          </array>
+          <description>The best-fit color transform matrix estimate
+          calculated by the camera device's statistics units for the current
+          output frame.</description>
+          <details>The camera device will provide the estimate from its
+          statistics unit on the white balance transforms to use
+          for the next frame. These are the values the camera device believes
+          are the best fit for the current output frame. This may
+          be different than the transform used for this frame, since
+          statistics processing on data from a new frame typically
+          completes after the transform has already been applied to
+          that frame.
+
+          These estimates must be provided for all frames, even if
+          capture settings and color transforms are set by the application.
+
+          This value should always be calculated by the auto-white balance (AWB) block,
+          regardless of the android.control.* current values.
+          </details>
+        </entry>
+        <entry name="sceneFlicker" type="byte" visibility="public" enum="true"
+               hwlevel="full">
+          <enum>
+            <value>NONE
+            <notes>The camera device does not detect any flickering illumination
+            in the current scene.</notes></value>
+            <value>50HZ
+            <notes>The camera device detects illumination flickering at 50Hz
+            in the current scene.</notes></value>
+            <value>60HZ
+            <notes>The camera device detects illumination flickering at 60Hz
+            in the current scene.</notes></value>
+          </enum>
+          <description>The camera device estimated scene illumination lighting
+          frequency.</description>
+          <details>
+          Many light sources, such as most fluorescent lights, flicker at a rate
+          that depends on the local utility power standards. This flicker must be
+          accounted for by auto-exposure routines to avoid artifacts in captured images.
+          The camera device uses this entry to tell the application what the scene
+          illuminant frequency is.
+
+          When manual exposure control is enabled
+          (`android.control.aeMode == OFF` or `android.control.mode ==
+          OFF`), the android.control.aeAntibandingMode doesn't perform
+          antibanding, and the application can ensure it selects
+          exposure times that do not cause banding issues by looking
+          into this metadata field. See
+          android.control.aeAntibandingMode for more details.
+
+          Reports NONE if there doesn't appear to be flickering illumination.
+          </details>
+        </entry>
+        <clone entry="android.statistics.hotPixelMapMode" kind="controls">
+        </clone>
+        <entry name="hotPixelMap" type="int32" visibility="public"
+        type_notes="list of coordinates based on android.sensor.pixelArraySize"
+        container="array" typedef="point">
+          <array>
+            <size>2</size>
+            <size>n</size>
+          </array>
+          <description>
+          List of `(x, y)` coordinates of hot/defective pixels on the sensor.
+          </description>
+          <range>
+          n &lt;= number of pixels on the sensor.
+          The `(x, y)` coordinates must be bounded by
+          android.sensor.info.pixelArraySize.
+          </range>
+          <details>
+          A coordinate `(x, y)` must lie between `(0, 0)`, and
+          `(width - 1, height - 1)` (inclusive), which are the top-left and
+          bottom-right of the pixel array, respectively. The width and
+          height dimensions are given in android.sensor.info.pixelArraySize.
+          This may include hot pixels that lie outside of the active array
+          bounds given by android.sensor.info.activeArraySize.
+          </details>
+          <hal_details>
+          A hotpixel map contains the coordinates of pixels on the camera
+          sensor that do report valid values (usually due to defects in
+          the camera sensor). This includes pixels that are stuck at certain
+          values, or have a response that does not accuractly encode the
+          incoming light from the scene.
+
+          To avoid performance issues, there should be significantly fewer hot
+          pixels than actual pixels on the camera sensor.
+          </hal_details>
+          <tag id="V1" />
+          <tag id="RAW" />
+        </entry>
+      </dynamic>
+      <controls>
+        <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full">
+          <enum>
+            <value>OFF
+            <notes>Do not include a lens shading map in the capture result.</notes></value>
+            <value>ON
+            <notes>Include a lens shading map in the capture result.</notes></value>
+          </enum>
+          <description>Whether the camera device will output the lens
+          shading map in output result metadata.</description>
+          <range>android.statistics.info.availableLensShadingMapModes</range>
+          <details>When set to ON,
+          android.statistics.lensShadingMap will be provided in
+          the output result metadata.
+
+          ON is always supported on devices with the RAW capability.
+          </details>
+          <tag id="RAW" />
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.statistics.lensShadingMapMode" kind="controls">
+        </clone>
+      </dynamic>
+    </section>
+    <section name="tonemap">
+      <controls>
+        <entry name="curveBlue" type="float" visibility="hidden"
+        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
+        container="array" hwlevel="full">
+          <array>
+            <size>n</size>
+            <size>2</size>
+          </array>
+          <description>Tonemapping / contrast / gamma curve for the blue
+          channel, to use when android.tonemap.mode is
+          CONTRAST_CURVE.</description>
+          <details>See android.tonemap.curveRed for more details.</details>
+        </entry>
+        <entry name="curveGreen" type="float" visibility="hidden"
+        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
+        container="array" hwlevel="full">
+          <array>
+            <size>n</size>
+            <size>2</size>
+          </array>
+          <description>Tonemapping / contrast / gamma curve for the green
+          channel, to use when android.tonemap.mode is
+          CONTRAST_CURVE.</description>
+          <details>See android.tonemap.curveRed for more details.</details>
+        </entry>
+        <entry name="curveRed" type="float" visibility="hidden"
+        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
+        container="array" hwlevel="full">
+          <array>
+            <size>n</size>
+            <size>2</size>
+          </array>
+          <description>Tonemapping / contrast / gamma curve for the red
+          channel, to use when android.tonemap.mode is
+          CONTRAST_CURVE.</description>
+          <range>0-1 on both input and output coordinates, normalized
+          as a floating-point value such that 0 == black and 1 == white.
+          </range>
+          <details>
+          Each channel's curve is defined by an array of control points:
+
+              android.tonemap.curveRed =
+                [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
+              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
+
+          These are sorted in order of increasing `Pin`; it is
+          required that input values 0.0 and 1.0 are included in the list to
+          define a complete mapping. For input values between control points,
+          the camera device must linearly interpolate between the control
+          points.
+
+          Each curve can have an independent number of points, and the number
+          of points can be less than max (that is, the request doesn't have to
+          always provide a curve with number of points equivalent to
+          android.tonemap.maxCurvePoints).
+
+          A few examples, and their corresponding graphical mappings; these
+          only specify the red channel and the precision is limited to 4
+          digits, for conciseness.
+
+          Linear mapping:
+
+              android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
+
+          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
+
+          Invert mapping:
+
+              android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
+
+          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
+
+          Gamma 1/2.2 mapping, with 16 control points:
+
+              android.tonemap.curveRed = [
+                0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
+                0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
+                0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
+                0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
+
+          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
+
+          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
+
+              android.tonemap.curveRed = [
+                0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
+                0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
+                0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
+                0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
+
+          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
+        </details>
+        <hal_details>
+          For good quality of mapping, at least 128 control points are
+          preferred.
+
+          A typical use case of this would be a gamma-1/2.2 curve, with as many
+          control points used as are available.
+        </hal_details>
+        </entry>
+        <entry name="curve" type="float" visibility="public" synthetic="true"
+               typedef="tonemapCurve"
+               hwlevel="full">
+          <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode
+          is CONTRAST_CURVE.</description>
+          <details>
+          The tonemapCurve consist of three curves for each of red, green, and blue
+          channels respectively. The following example uses the red channel as an
+          example. The same logic applies to green and blue channel.
+          Each channel's curve is defined by an array of control points:
+
+              curveRed =
+                [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
+              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
+
+          These are sorted in order of increasing `Pin`; it is always
+          guaranteed that input values 0.0 and 1.0 are included in the list to
+          define a complete mapping. For input values between control points,
+          the camera device must linearly interpolate between the control
+          points.
+
+          Each curve can have an independent number of points, and the number
+          of points can be less than max (that is, the request doesn't have to
+          always provide a curve with number of points equivalent to
+          android.tonemap.maxCurvePoints).
+
+          A few examples, and their corresponding graphical mappings; these
+          only specify the red channel and the precision is limited to 4
+          digits, for conciseness.
+
+          Linear mapping:
+
+              curveRed = [ (0, 0), (1.0, 1.0) ]
+
+          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
+
+          Invert mapping:
+
+              curveRed = [ (0, 1.0), (1.0, 0) ]
+
+          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
+
+          Gamma 1/2.2 mapping, with 16 control points:
+
+              curveRed = [
+                (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
+                (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
+                (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
+                (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
+
+          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
+
+          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
+
+              curveRed = [
+                (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
+                (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
+                (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
+                (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
+
+          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
+        </details>
+        <hal_details>
+            This entry is created by the framework from the curveRed, curveGreen and
+            curveBlue entries.
+        </hal_details>
+        </entry>
+        <entry name="mode" type="byte" visibility="public" enum="true"
+               hwlevel="full">
+          <enum>
+            <value>CONTRAST_CURVE
+              <notes>Use the tone mapping curve specified in
+              the android.tonemap.curve* entries.
+
+              All color enhancement and tonemapping must be disabled, except
+              for applying the tonemapping curve specified by
+              android.tonemap.curve.
+
+              Must not slow down frame rate relative to raw
+              sensor output.
+              </notes>
+            </value>
+            <value>FAST
+              <notes>
+              Advanced gamma mapping and color enhancement may be applied, without
+              reducing frame rate compared to raw sensor output.
+              </notes>
+            </value>
+            <value>HIGH_QUALITY
+              <notes>
+              High-quality gamma mapping and color enhancement will be applied, at
+              the cost of possibly reduced frame rate compared to raw sensor output.
+              </notes>
+            </value>
+            <value>GAMMA_VALUE
+              <notes>
+              Use the gamma value specified in android.tonemap.gamma to peform
+              tonemapping.
+
+              All color enhancement and tonemapping must be disabled, except
+              for applying the tonemapping curve specified by android.tonemap.gamma.
+
+              Must not slow down frame rate relative to raw sensor output.
+              </notes>
+            </value>
+            <value>PRESET_CURVE
+              <notes>
+              Use the preset tonemapping curve specified in
+              android.tonemap.presetCurve to peform tonemapping.
+
+              All color enhancement and tonemapping must be disabled, except
+              for applying the tonemapping curve specified by
+              android.tonemap.presetCurve.
+
+              Must not slow down frame rate relative to raw sensor output.
+              </notes>
+            </value>
+          </enum>
+          <description>High-level global contrast/gamma/tonemapping control.
+          </description>
+          <range>android.tonemap.availableToneMapModes</range>
+          <details>
+          When switching to an application-defined contrast curve by setting
+          android.tonemap.mode to CONTRAST_CURVE, the curve is defined
+          per-channel with a set of `(in, out)` points that specify the
+          mapping from input high-bit-depth pixel value to the output
+          low-bit-depth value.  Since the actual pixel ranges of both input
+          and output may change depending on the camera pipeline, the values
+          are specified by normalized floating-point numbers.
+
+          More-complex color mapping operations such as 3D color look-up
+          tables, selective chroma enhancement, or other non-linear color
+          transforms will be disabled when android.tonemap.mode is
+          CONTRAST_CURVE.
+
+          When using either FAST or HIGH_QUALITY, the camera device will
+          emit its own tonemap curve in android.tonemap.curve.
+          These values are always available, and as close as possible to the
+          actually used nonlinear/nonglobal transforms.
+
+          If a request is sent with CONTRAST_CURVE with the camera device's
+          provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
+          roughly the same.</details>
+        </entry>
+      </controls>
+      <static>
+        <entry name="maxCurvePoints" type="int32" visibility="public"
+               hwlevel="full">
+          <description>Maximum number of supported points in the
+            tonemap curve that can be used for android.tonemap.curve.
+          </description>
+          <details>
+          If the actual number of points provided by the application (in android.tonemap.curve*) is
+          less than this maximum, the camera device will resample the curve to its internal
+          representation, using linear interpolation.
+
+          The output curves in the result metadata may have a different number
+          of points than the input curves, and will represent the actual
+          hardware curves used as closely as possible when linearly interpolated.
+          </details>
+          <hal_details>
+          This value must be at least 64. This should be at least 128.
+          </hal_details>
+        </entry>
+        <entry name="availableToneMapModes" type="byte" visibility="public"
+        type_notes="list of enums" container="array" typedef="enumList" hwlevel="full">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of tonemapping modes for android.tonemap.mode that are supported by this camera
+          device.
+          </description>
+          <range>Any value listed in android.tonemap.mode</range>
+          <details>
+          Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
+          at least one of below mode combinations:
+
+          * CONTRAST_CURVE, FAST and HIGH_QUALITY
+          * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
+
+          This includes all FULL level devices.
+          </details>
+          <hal_details>
+            HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available
+            on the camera device, but the underlying implementation can be the same for both modes.
+            That is, if the highest quality implementation on the camera device does not slow down
+            capture rate, then FAST and HIGH_QUALITY will generate the same output.
+          </hal_details>
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.tonemap.curveBlue" kind="controls">
+        </clone>
+        <clone entry="android.tonemap.curveGreen" kind="controls">
+        </clone>
+        <clone entry="android.tonemap.curveRed" kind="controls">
+        </clone>
+        <clone entry="android.tonemap.curve" kind="controls">
+        </clone>
+        <clone entry="android.tonemap.mode" kind="controls">
+        </clone>
+      </dynamic>
+      <controls>
+        <entry name="gamma" type="float" visibility="public">
+          <description> Tonemapping curve to use when android.tonemap.mode is
+          GAMMA_VALUE
+          </description>
+          <details>
+          The tonemap curve will be defined the following formula:
+          * OUT = pow(IN, 1.0 / gamma)
+          where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+          pow is the power function and gamma is the gamma value specified by this
+          key.
+
+          The same curve will be applied to all color channels. The camera device
+          may clip the input gamma value to its supported range. The actual applied
+          value will be returned in capture result.
+
+          The valid range of gamma value varies on different devices, but values
+          within [1.0, 5.0] are guaranteed not to be clipped.
+          </details>
+        </entry>
+        <entry name="presetCurve" type="byte" visibility="public" enum="true">
+          <enum>
+            <value>SRGB
+              <notes>Tonemapping curve is defined by sRGB</notes>
+            </value>
+            <value>REC709
+              <notes>Tonemapping curve is defined by ITU-R BT.709</notes>
+            </value>
+          </enum>
+          <description> Tonemapping curve to use when android.tonemap.mode is
+          PRESET_CURVE
+          </description>
+          <details>
+          The tonemap curve will be defined by specified standard.
+
+          sRGB (approximated by 16 control points):
+
+          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
+
+          Rec. 709 (approximated by 16 control points):
+
+          ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png)
+
+          Note that above figures show a 16 control points approximation of preset
+          curves. Camera devices may apply a different approximation to the curve.
+          </details>
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.tonemap.gamma" kind="controls">
+        </clone>
+        <clone entry="android.tonemap.presetCurve" kind="controls">
+        </clone>
+      </dynamic>
+    </section>
+    <section name="led">
+      <controls>
+        <entry name="transmit" type="byte" visibility="hidden" optional="true"
+               enum="true" typedef="boolean">
+          <enum>
+            <value>OFF</value>
+            <value>ON</value>
+          </enum>
+          <description>This LED is nominally used to indicate to the user
+          that the camera is powered on and may be streaming images back to the
+          Application Processor. In certain rare circumstances, the OS may
+          disable this when video is processed locally and not transmitted to
+          any untrusted applications.
+
+          In particular, the LED *must* always be on when the data could be
+          transmitted off the device. The LED *should* always be on whenever
+          data is stored locally on the device.
+
+          The LED *may* be off if a trusted application is using the data that
+          doesn't violate the above rules.
+          </description>
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.led.transmit" kind="controls"></clone>
+      </dynamic>
+      <static>
+        <entry name="availableLeds" type="byte" visibility="hidden" optional="true"
+               enum="true"
+               container="array">
+          <array>
+            <size>n</size>
+          </array>
+          <enum>
+            <value>TRANSMIT
+              <notes>android.led.transmit control is used.</notes>
+            </value>
+          </enum>
+          <description>A list of camera LEDs that are available on this system.
+          </description>
+        </entry>
+      </static>
+    </section>
+    <section name="info">
+      <static>
+        <entry name="supportedHardwareLevel" type="byte" visibility="public"
+               enum="true" hwlevel="legacy">
+          <enum>
+            <value>
+              LIMITED
+              <notes>
+              This camera device has only limited capabilities.
+              </notes>
+            </value>
+            <value>
+              FULL
+              <notes>
+              This camera device is capable of supporting advanced imaging applications.
+              </notes>
+            </value>
+            <value>
+              LEGACY
+              <notes>
+              This camera device is running in backward compatibility mode.
+              </notes>
+            </value>
+          </enum>
+          <description>
+          Generally classifies the overall set of the camera device functionality.
+          </description>
+          <details>
+          Camera devices will come in three flavors: LEGACY, LIMITED and FULL.
+
+          A FULL device will support below capabilities:
+
+          * BURST_CAPTURE capability (android.request.availableCapabilities contains BURST_CAPTURE)
+          * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL)
+          * Manual sensor control (android.request.availableCapabilities contains MANUAL_SENSOR)
+          * Manual post-processing control (android.request.availableCapabilities contains
+            MANUAL_POST_PROCESSING)
+          * At least 3 processed (but not stalling) format output streams
+            (android.request.maxNumOutputProc `&gt;=` 3)
+          * The required stream configurations defined in android.scaler.availableStreamConfigurations
+          * The required exposure time range defined in android.sensor.info.exposureTimeRange
+          * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration
+
+          A LIMITED device may have some or none of the above characteristics.
+          To find out more refer to android.request.availableCapabilities.
+
+          Some features are not part of any particular hardware level or capability and must be
+          queried separately. These include:
+
+          * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME)
+          * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED)
+          * Face detection (android.statistics.info.availableFaceDetectModes)
+          * Optical or electrical image stabilization
+            (android.lens.info.availableOpticalStabilization,
+             android.control.availableVideoStabilizationModes)
+
+          A LEGACY device does not support per-frame control, manual sensor control, manual
+          post-processing, arbitrary cropping regions, and has relaxed performance constraints.
+
+          Each higher level supports everything the lower level supports
+          in this order: FULL `&gt;` LIMITED `&gt;` LEGACY.
+
+          Note:
+          Pre-API level 23, FULL devices also supported arbitrary cropping region
+          (android.scaler.croppingType `==` FREEFORM); this requirement was relaxed in API level 23,
+          and FULL devices may only support CENTERED cropping.
+          </details>
+          <hal_details>
+          The camera 3 HAL device can implement one of two possible
+          operational modes; limited and full. Full support is
+          expected from new higher-end devices. Limited mode has
+          hardware requirements roughly in line with those for a
+          camera HAL device v1 implementation, and is expected from
+          older or inexpensive devices. Full is a strict superset of
+          limited, and they share the same essential operational flow.
+
+          For full details refer to "S3. Operational Modes" in camera3.h
+
+          Camera HAL3+ must not implement LEGACY mode. It is there
+          for backwards compatibility in the `android.hardware.camera2`
+          user-facing API only.
+          </hal_details>
+        </entry>
+      </static>
+    </section>
+    <section name="blackLevel">
+      <controls>
+        <entry name="lock" type="byte" visibility="public" enum="true"
+               typedef="boolean" hwlevel="full">
+          <enum>
+            <value>OFF</value>
+            <value>ON</value>
+          </enum>
+          <description> Whether black-level compensation is locked
+          to its current values, or is free to vary.</description>
+          <details>When set to `true` (ON), the values used for black-level
+          compensation will not change until the lock is set to
+          `false` (OFF).
+
+          Since changes to certain capture parameters (such as
+          exposure time) may require resetting of black level
+          compensation, the camera device must report whether setting
+          the black level lock was successful in the output result
+          metadata.
+
+          For example, if a sequence of requests is as follows:
+
+          * Request 1: Exposure = 10ms, Black level lock = OFF
+          * Request 2: Exposure = 10ms, Black level lock = ON
+          * Request 3: Exposure = 10ms, Black level lock = ON
+          * Request 4: Exposure = 20ms, Black level lock = ON
+          * Request 5: Exposure = 20ms, Black level lock = ON
+          * Request 6: Exposure = 20ms, Black level lock = ON
+
+          And the exposure change in Request 4 requires the camera
+          device to reset the black level offsets, then the output
+          result metadata is expected to be:
+
+          * Result 1: Exposure = 10ms, Black level lock = OFF
+          * Result 2: Exposure = 10ms, Black level lock = ON
+          * Result 3: Exposure = 10ms, Black level lock = ON
+          * Result 4: Exposure = 20ms, Black level lock = OFF
+          * Result 5: Exposure = 20ms, Black level lock = ON
+          * Result 6: Exposure = 20ms, Black level lock = ON
+
+          This indicates to the application that on frame 4, black
+          levels were reset due to exposure value changes, and pixel
+          values may not be consistent across captures.
+
+          The camera device will maintain the lock to the extent
+          possible, only overriding the lock to OFF when changes to
+          other request parameters require a black level recalculation
+          or reset.
+          </details>
+          <hal_details>
+          If for some reason black level locking is no longer possible
+          (for example, the analog gain has changed, which forces
+          black level offsets to be recalculated), then the HAL must
+          override this request (and it must report 'OFF' when this
+          does happen) until the next capture for which locking is
+          possible again.</hal_details>
+          <tag id="HAL2" />
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.blackLevel.lock"
+          kind="controls">
+          <details>
+            Whether the black level offset was locked for this frame.  Should be
+            ON if android.blackLevel.lock was ON in the capture request, unless
+            a change in other capture settings forced the camera device to
+            perform a black level reset.
+          </details>
+        </clone>
+      </dynamic>
+    </section>
+    <section name="sync">
+      <dynamic>
+        <entry name="frameNumber" type="int64" visibility="hidden" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value id="-1">CONVERGING
+              <notes>
+              The current result is not yet fully synchronized to any request.
+
+              Synchronization is in progress, and reading metadata from this
+              result may include a mix of data that have taken effect since the
+              last synchronization time.
+
+              In some future result, within android.sync.maxLatency frames,
+              this value will update to the actual frame number frame number
+              the result is guaranteed to be synchronized to (as long as the
+              request settings remain constant).
+            </notes>
+            </value>
+            <value id="-2">UNKNOWN
+              <notes>
+              The current result's synchronization status is unknown.
+
+              The result may have already converged, or it may be in
+              progress.  Reading from this result may include some mix
+              of settings from past requests.
+
+              After a settings change, the new settings will eventually all
+              take effect for the output buffers and results. However, this
+              value will not change when that happens. Altering settings
+              rapidly may provide outcomes using mixes of settings from recent
+              requests.
+
+              This value is intended primarily for backwards compatibility with
+              the older camera implementations (for android.hardware.Camera).
+            </notes>
+            </value>
+          </enum>
+          <description>The frame number corresponding to the last request
+          with which the output result (metadata + buffers) has been fully
+          synchronized.</description>
+          <range>Either a non-negative value corresponding to a
+          `frame_number`, or one of the two enums (CONVERGING / UNKNOWN).
+          </range>
+          <details>
+          When a request is submitted to the camera device, there is usually a
+          delay of several frames before the controls get applied. A camera
+          device may either choose to account for this delay by implementing a
+          pipeline and carefully submit well-timed atomic control updates, or
+          it may start streaming control changes that span over several frame
+          boundaries.
+
+          In the latter case, whenever a request's settings change relative to
+          the previous submitted request, the full set of changes may take
+          multiple frame durations to fully take effect. Some settings may
+          take effect sooner (in less frame durations) than others.
+
+          While a set of control changes are being propagated, this value
+          will be CONVERGING.
+
+          Once it is fully known that a set of control changes have been
+          finished propagating, and the resulting updated control settings
+          have been read back by the camera device, this value will be set
+          to a non-negative frame number (corresponding to the request to
+          which the results have synchronized to).
+
+          Older camera device implementations may not have a way to detect
+          when all camera controls have been applied, and will always set this
+          value to UNKNOWN.
+
+          FULL capability devices will always have this value set to the
+          frame number of the request corresponding to this result.
+
+          _Further details_:
+
+          * Whenever a request differs from the last request, any future
+          results not yet returned may have this value set to CONVERGING (this
+          could include any in-progress captures not yet returned by the camera
+          device, for more details see pipeline considerations below).
+          * Submitting a series of multiple requests that differ from the
+          previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
+          moves the new synchronization frame to the last non-repeating
+          request (using the smallest frame number from the contiguous list of
+          repeating requests).
+          * Submitting the same request repeatedly will not change this value
+          to CONVERGING, if it was already a non-negative value.
+          * When this value changes to non-negative, that means that all of the
+          metadata controls from the request have been applied, all of the
+          metadata controls from the camera device have been read to the
+          updated values (into the result), and all of the graphics buffers
+          corresponding to this result are also synchronized to the request.
+
+          _Pipeline considerations_:
+
+          Submitting a request with updated controls relative to the previously
+          submitted requests may also invalidate the synchronization state
+          of all the results corresponding to currently in-flight requests.
+
+          In other words, results for this current request and up to
+          android.request.pipelineMaxDepth prior requests may have their
+          android.sync.frameNumber change to CONVERGING.
+          </details>
+          <hal_details>
+          Using UNKNOWN here is illegal unless android.sync.maxLatency
+          is also UNKNOWN.
+
+          FULL capability devices should simply set this value to the
+          `frame_number` of the request this result corresponds to.
+          </hal_details>
+          <tag id="V1" />
+        </entry>
+      </dynamic>
+      <static>
+        <entry name="maxLatency" type="int32" visibility="public" enum="true"
+               hwlevel="legacy">
+          <enum>
+            <value id="0">PER_FRAME_CONTROL
+              <notes>
+              Every frame has the requests immediately applied.
+
+              Changing controls over multiple requests one after another will
+              produce results that have those controls applied atomically
+              each frame.
+
+              All FULL capability devices will have this as their maxLatency.
+              </notes>
+            </value>
+            <value id="-1">UNKNOWN
+              <notes>
+              Each new frame has some subset (potentially the entire set)
+              of the past requests applied to the camera settings.
+
+              By submitting a series of identical requests, the camera device
+              will eventually have the camera settings applied, but it is
+              unknown when that exact point will be.
+
+              All LEGACY capability devices will have this as their maxLatency.
+              </notes>
+            </value>
+          </enum>
+          <description>
+          The maximum number of frames that can occur after a request
+          (different than the previous) has been submitted, and before the
+          result's state becomes synchronized.
+          </description>
+          <units>Frame counts</units>
+          <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range>
+          <details>
+          This defines the maximum distance (in number of metadata results),
+          between the frame number of the request that has new controls to apply
+          and the frame number of the result that has all the controls applied.
+
+          In other words this acts as an upper boundary for how many frames
+          must occur before the camera device knows for a fact that the new
+          submitted camera settings have been applied in outgoing frames.
+          </details>
+          <hal_details>
+          For example if maxLatency was 2,
+
+              initial request = X (repeating)
+              request1 = X
+              request2 = Y
+              request3 = Y
+              request4 = Y
+
+              where requestN has frameNumber N, and the first of the repeating
+              initial request's has frameNumber F (and F &lt; 1).
+
+              initial result = X' + { android.sync.frameNumber == F }
+              result1 = X' + { android.sync.frameNumber == F }
+              result2 = X' + { android.sync.frameNumber == CONVERGING }
+              result3 = X' + { android.sync.frameNumber == CONVERGING }
+              result4 = X' + { android.sync.frameNumber == 2 }
+
+              where resultN has frameNumber N.
+
+          Since `result4` has a `frameNumber == 4` and
+          `android.sync.frameNumber == 2`, the distance is clearly
+          `4 - 2 = 2`.
+
+          Use `frame_count` from camera3_request_t instead of
+          android.request.frameCount or
+          `@link{android.hardware.camera2.CaptureResult#getFrameNumber}`.
+
+          LIMITED devices are strongly encouraged to use a non-negative
+          value. If UNKNOWN is used here then app developers do not have a way
+          to know when sensor settings have been applied.
+          </hal_details>
+          <tag id="V1" />
+        </entry>
+      </static>
+    </section>
+    <section name="reprocess">
+      <controls>
+        <entry name="effectiveExposureFactor" type="float" visibility="public" hwlevel="limited">
+            <description>
+            The amount of exposure time increase factor applied to the original output
+            frame by the application processing before sending for reprocessing.
+            </description>
+            <units>Relative exposure time increase factor.</units>
+            <range> &amp;gt;= 1.0</range>
+            <details>
+            This is optional, and will be supported if the camera device supports YUV_REPROCESSING
+            capability (android.request.availableCapabilities contains YUV_REPROCESSING).
+
+            For some YUV reprocessing use cases, the application may choose to filter the original
+            output frames to effectively reduce the noise to the same level as a frame that was
+            captured with longer exposure time. To be more specific, assuming the original captured
+            images were captured with a sensitivity of S and an exposure time of T, the model in
+            the camera device is that the amount of noise in the image would be approximately what
+            would be expected if the original capture parameters had been a sensitivity of
+            S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
+            than S and T respectively. If the captured images were processed by the application
+            before being sent for reprocessing, then the application may have used image processing
+            algorithms and/or multi-frame image fusion to reduce the noise in the
+            application-processed images (input images). By using the effectiveExposureFactor
+            control, the application can communicate to the camera device the actual noise level
+            improvement in the application-processed image. With this information, the camera
+            device can select appropriate noise reduction and edge enhancement parameters to avoid
+            excessive noise reduction (android.noiseReduction.mode) and insufficient edge
+            enhancement (android.edge.mode) being applied to the reprocessed frames.
+
+            For example, for multi-frame image fusion use case, the application may fuse
+            multiple output frames together to a final frame for reprocessing. When N image are
+            fused into 1 image for reprocessing, the exposure time increase factor could be up to
+            square root of N (based on a simple photon shot noise model). The camera device will
+            adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
+            produce the best quality images.
+
+            This is relative factor, 1.0 indicates the application hasn't processed the input
+            buffer in a way that affects its effective exposure time.
+
+            This control is only effective for YUV reprocessing capture request. For noise
+            reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`.
+            Similarly, for edge enhancement reprocessing, it is only effective when
+            `android.edge.mode != OFF`.
+            </details>
+          <tag id="REPROC" />
+        </entry>
+      </controls>
+      <dynamic>
+      <clone entry="android.reprocess.effectiveExposureFactor" kind="controls">
+      </clone>
+      </dynamic>
+      <static>
+        <entry name="maxCaptureStall" type="int32" visibility="public" hwlevel="limited">
+          <description>
+          The maximal camera capture pipeline stall (in unit of frame count) introduced by a
+          reprocess capture request.
+          </description>
+          <units>Number of frames.</units>
+          <range> &amp;lt;= 4</range>
+          <details>
+          The key describes the maximal interference that one reprocess (input) request
+          can introduce to the camera simultaneous streaming of regular (output) capture
+          requests, including repeating requests.
+
+          When a reprocessing capture request is submitted while a camera output repeating request
+          (e.g. preview) is being served by the camera device, it may preempt the camera capture
+          pipeline for at least one frame duration so that the camera device is unable to process
+          the following capture request in time for the next sensor start of exposure boundary.
+          When this happens, the application may observe a capture time gap (longer than one frame
+          duration) between adjacent capture output frames, which usually exhibits as preview
+          glitch if the repeating request output targets include a preview surface. This key gives
+          the worst-case number of frame stall introduced by one reprocess request with any kind of
+          formats/sizes combination.
+
+          If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
+          ongoing camera repeating request outputs, as if this reprocess request is never issued.
+
+          This key is supported if the camera device supports PRIVATE or YUV reprocessing (
+          i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or
+          YUV_REPROCESSING).
+          </details>
+          <tag id="REPROC" />
+        </entry>
+      </static>
+    </section>
+    <section name="depth">
+      <static>
+        <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited">
+          <description>Maximum number of points that a depth point cloud may contain.
+          </description>
+          <details>
+            If a camera device supports outputting depth range data in the form of a depth point
+            cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum
+            number of points an output buffer may contain.
+
+            Any given buffer may contain between 0 and maxDepthSamples points, inclusive.
+            If output in the depth point cloud format is not supported, this entry will
+            not be defined.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+        <entry name="availableDepthStreamConfigurations" type="int32" visibility="hidden"
+          enum="true" container="array"
+          typedef="streamConfiguration" hwlevel="limited">
+          <array>
+            <size>n</size>
+            <size>4</size>
+          </array>
+          <enum>
+            <value>OUTPUT</value>
+            <value>INPUT</value>
+          </enum>
+          <description>The available depth dataspace stream
+          configurations that this camera device supports
+          (i.e. format, width, height, output/input stream).
+          </description>
+          <details>
+            These are output stream configurations for use with
+            dataSpace HAL_DATASPACE_DEPTH. The configurations are
+            listed as `(format, width, height, input?)` tuples.
+
+            Only devices that support depth output for at least
+            the HAL_PIXEL_FORMAT_Y16 dense depth map may include
+            this entry.
+
+            A device that also supports the HAL_PIXEL_FORMAT_BLOB
+            sparse depth point cloud must report a single entry for
+            the format in this list as `(HAL_PIXEL_FORMAT_BLOB,
+            android.depth.maxDepthSamples, 1, OUTPUT)` in addition to
+            the entries for HAL_PIXEL_FORMAT_Y16.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+        <entry name="availableDepthMinFrameDurations" type="int64" visibility="hidden"
+               container="array"
+               typedef="streamConfigurationDuration" hwlevel="limited">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the minimum frame duration for each
+          format/size combination for depth output formats.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          This should correspond to the frame duration when only that
+          stream is active, with all processing (typically in android.*.mode)
+          set to either OFF or FAST.
+
+          When multiple streams are used in a request, the minimum frame
+          duration will be max(individual stream min durations).
+
+          The minimum frame duration of a stream (of a particular format, size)
+          is the same regardless of whether the stream is input or output.
+
+          See android.sensor.frameDuration and
+          android.scaler.availableStallDurations for more details about
+          calculating the max frame rate.
+
+          (Keep in sync with {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+        <entry name="availableDepthStallDurations" type="int64" visibility="hidden"
+               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
+          <array>
+            <size>4</size>
+            <size>n</size>
+          </array>
+          <description>This lists the maximum stall duration for each
+          output format/size combination for depth streams.
+          </description>
+          <units>(format, width, height, ns) x n</units>
+          <details>
+          A stall duration is how much extra time would get added
+          to the normal minimum frame duration for a repeating request
+          that has streams with non-zero stall.
+
+          This functions similarly to
+          android.scaler.availableStallDurations for depth
+          streams.
+
+          All depth output stream formats may have a nonzero stall
+          duration.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+        <entry name="depthIsExclusive" type="byte" visibility="public"
+               enum="true" typedef="boolean" hwlevel="limited">
+          <enum>
+            <value>FALSE</value>
+            <value>TRUE</value>
+          </enum>
+          <description>Indicates whether a capture request may target both a
+          DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
+          YUV_420_888, JPEG, or RAW) simultaneously.
+          </description>
+          <details>
+          If TRUE, including both depth and color outputs in a single
+          capture request is not supported. An application must interleave color
+          and depth requests.  If FALSE, a single request can target both types
+          of output.
+
+          Typically, this restriction exists on camera devices that
+          need to emit a specific pattern or wavelength of light to
+          measure depth values, which causes the color image to be
+          corrupted during depth measurement.
+          </details>
+        </entry>
+      </static>
+    </section>
+  </namespace>
+</metadata>
diff --git a/media/camera/docs/metadata_properties.xsd b/media/camera/docs/metadata_properties.xsd
new file mode 100644
index 0000000..a71a6c9
--- /dev/null
+++ b/media/camera/docs/metadata_properties.xsd
@@ -0,0 +1,313 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2012 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<schema xmlns="http://www.w3.org/2001/XMLSchema"
+    xmlns:tns="http://schemas.android.com/service/camera/metadata/"
+    targetNamespace="http://schemas.android.com/service/camera/metadata/"
+    elementFormDefault="qualified">
+
+    <element name="metadata" type="tns:MetadataType">
+        <key name="TypeNameKey">
+            <selector xpath="tns:types/tns:typedef" />
+            <field xpath="@name" />
+        </key>
+
+        <!-- ensure that <entry typedef="..."> refers to a valid <typedef name='..."/> -->
+        <keyref name="TypeNameKeyRef" refer="tns:TypeNameKey">
+            <selector xpath=".//tns:entry" /> <!-- recursively find any descendant entry -->
+            <field xpath="@typedef" />
+        </keyref>
+    </element>
+
+    <complexType name="MetadataType">
+        <sequence>
+            <element name="tags" type="tns:TagsType" maxOccurs="1" minOccurs="0"></element>
+            <element name="types" type="tns:TypesType" maxOccurs="1" minOccurs="0"></element>
+            <element name="namespace" type="tns:NamespaceType"
+                maxOccurs="unbounded" minOccurs="1">
+            </element>
+        </sequence>
+    </complexType>
+
+    <complexType name="NamespaceType">
+        <sequence>
+            <element name="section" type="tns:SectionType" maxOccurs="unbounded" minOccurs="1"></element>
+        </sequence>
+        <attribute name="name" type="string" use="required"></attribute>
+    </complexType>
+
+    <complexType name="SectionType">
+        <sequence>
+            <choice maxOccurs="unbounded">
+                <element name="controls" type="tns:SectionKindType" maxOccurs="unbounded" minOccurs="0"></element>
+                <element name="static" type="tns:SectionKindType" maxOccurs="unbounded" minOccurs="0"></element>
+                <element name="dynamic" type="tns:SectionKindType" maxOccurs="unbounded" minOccurs="0"></element>
+            </choice>
+        </sequence>
+        <attribute name="name" type="string" use="required"></attribute>
+    </complexType>
+
+    <complexType name="SectionKindType">
+        <complexContent>
+            <extension base="tns:BaseNamespaceOrSectionKindType">
+            </extension>
+        </complexContent>
+    </complexType>
+
+    <complexType name="InnerNamespaceType">
+        <complexContent>
+            <extension base="tns:BaseNamespaceOrSectionKindType">
+                <attribute name="name" type="string" use="required"></attribute>
+            </extension>
+        </complexContent>
+    </complexType>
+
+    <complexType name="BaseNamespaceOrSectionKindType">
+        <sequence maxOccurs="unbounded">
+            <choice>
+                <element name="namespace" type="tns:InnerNamespaceType"></element>
+                <element name="entry" type="tns:EntryType"></element>
+                <element name="clone" type="tns:CloneType"></element>
+            </choice>
+        </sequence>
+    </complexType>
+
+    <complexType name="TagsType">
+        <sequence>
+            <element name="tag" type="tns:TagType" maxOccurs="unbounded" minOccurs="0"></element>
+        </sequence>
+    </complexType>
+
+    <complexType name="TagType">
+        <simpleContent>
+            <extension base="string">
+                <attribute name="id" type="string" use="required"></attribute>
+            </extension>
+        </simpleContent>
+    </complexType>
+
+    <complexType name="TypesType">
+        <sequence>
+            <element name="typedef" type="tns:TypedefType" maxOccurs="unbounded" minOccurs="0">
+            </element>
+        </sequence>
+    </complexType>
+
+    <complexType name="TypedefType">
+        <sequence>
+            <element name="language" type="tns:LanguageType" maxOccurs="unbounded" minOccurs="1"></element>
+        </sequence>
+        <attribute name="name" type="string" use="required" />
+    </complexType>
+
+    <complexType name="LanguageType">
+        <simpleContent>
+            <extension base="string">
+                <attribute name="name" use="required">
+                    <simpleType>
+                        <restriction base="string">
+                            <enumeration value="java" />
+                            <enumeration value="c" />
+                            <enumeration value="c++" />
+                        </restriction>
+                    </simpleType>
+                </attribute>
+            </extension>
+        </simpleContent>
+    </complexType>
+
+    <group name="BaseEntryGroup">
+        <sequence>
+            <element name="description" type="string" maxOccurs="1"
+                minOccurs="0">
+            </element>
+            <element name="units" type="string" maxOccurs="1"
+                minOccurs="0">
+            </element>
+            <element name="range" type="string" maxOccurs="1"
+                minOccurs="0">
+            </element>
+            <element name="details" type="string" maxOccurs="1"
+                minOccurs="0">
+            </element>
+            <element name="hal_details" type="string" maxOccurs="1"
+                minOccurs="0">
+            </element>
+
+            <element name="tag" type="tns:TagType" maxOccurs="unbounded"
+                minOccurs="0">
+            </element>
+        </sequence>
+    </group>
+
+    <complexType name="EntryType">
+        <sequence>
+            <element name="array" type="tns:ArrayType" maxOccurs="1" minOccurs="0"></element>
+            <element name="enum" type="tns:EnumType" maxOccurs="1" minOccurs="0"></element>
+            <element name="tuple" type="tns:TupleType" maxOccurs="1" minOccurs="0"></element>
+
+            <group ref="tns:BaseEntryGroup" />
+        </sequence>
+
+        <attribute name="name" type="string" use="required" />
+        <attribute name="type" use="required">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="byte" />
+                    <enumeration value="int32" />
+                    <enumeration value="int64" />
+                    <enumeration value="float" />
+                    <enumeration value="double" />
+                    <enumeration value="rational" />
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="type_notes" type="string" />
+        <attribute name="container">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="array" />
+                    <enumeration value="tuple" />
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="enum">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="true"></enumeration>
+                    <enumeration value="false"></enumeration>
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="visibility">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="system" /> <!-- do not expose to java -->
+                    <enumeration value="hidden" /> <!-- java as @hide -->
+                    <enumeration value="public" /> <!-- java as public SDK -->
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="synthetic" default="false">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="false" /> <!-- expose to C -->
+                    <enumeration value="true" /> <!-- do not expose to C -->
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="deprecated" default="false">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="false" /> <!-- normal -->
+                    <enumeration value="true" /> <!-- mark @Deprecated -->
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="optional" default="false">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="false" />
+                    <enumeration value="true" />
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="typedef" type="string" />
+        <attribute name="hwlevel" default="full">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="full" />
+                    <enumeration value="limited" />
+                    <enumeration value="legacy" />
+                </restriction>
+            </simpleType>
+        </attribute>
+    </complexType>
+
+    <complexType name="EnumType">
+        <sequence>
+            <element name="value" type="tns:EnumValueType" maxOccurs="unbounded"></element>
+        </sequence>
+    </complexType>
+
+    <complexType name="TupleType">
+        <sequence>
+            <element name="value" type="string" minOccurs="1" maxOccurs="unbounded"></element>
+        </sequence>
+    </complexType>
+
+    <complexType name="ArrayType">
+        <sequence>
+            <element name="size" type="string" minOccurs="1" maxOccurs="unbounded"></element>
+        </sequence>
+    </complexType>
+
+    <complexType name="EnumValueType" mixed="true">
+
+        <sequence>
+            <element name="notes" type="string" minOccurs="0" maxOccurs="1" />
+        </sequence>
+
+        <attribute name="deprecated" default="false">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="true"></enumeration>
+                    <enumeration value="false"></enumeration>
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="optional">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="true"></enumeration>
+                    <enumeration value="false"></enumeration>
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="hidden">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="true"></enumeration>
+                    <enumeration value="false"></enumeration>
+                </restriction>
+            </simpleType>
+        </attribute>
+        <attribute name="id" type="string" />
+    </complexType>
+
+    <complexType name="CloneType">
+        <sequence>
+                <group ref="tns:BaseEntryGroup" />
+        </sequence>
+
+        <!--
+             the semantic correctness of the next 2 attributes
+             are validated by metadata_validate.py
+
+             due to the inability of XSD to generate paths recursively
+        -->
+        <attribute name="entry">
+        </attribute>
+        <attribute name="kind">
+            <simpleType>
+                <restriction base="string">
+                    <enumeration value="controls"></enumeration>
+                    <enumeration value="static"></enumeration>
+                    <enumeration value="dynamic"></enumeration>
+                </restriction>
+            </simpleType>
+        </attribute>
+    </complexType>
+</schema>
diff --git a/media/camera/docs/metadata_template.mako b/media/camera/docs/metadata_template.mako
new file mode 100644
index 0000000..360e1e4
--- /dev/null
+++ b/media/camera/docs/metadata_template.mako
@@ -0,0 +1,196 @@
+## -*- coding: utf-8 -*-
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2012 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<metadata
+    xmlns="http://schemas.android.com/service/camera/metadata/"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
+
+<tags>
+% for tag in metadata.tags:
+  % if tag.description and tag.description.strip():
+  <tag id="${tag.id}">${tag.description | x}</tag>
+  % else:
+  <tag id="${tag.id}"><!-- TODO: fill the tag description --></tag>
+  % endif
+% endfor
+</tags>
+
+<types>
+% for typedef in metadata.types:
+  <typedef name="${typedef.name}">
+    % for (language, klass) in typedef.languages.iteritems():
+      <language name="${language}">${klass | h}</language>
+    % endfor
+  </typedef>
+% endfor
+</types>
+
+% for root in metadata.outer_namespaces:
+<namespace name="${root.name}">
+  % for section in root.sections:
+  <section name="${section.name}">
+
+    % if section.description is not None:
+      <description>${section.description}</description>
+    % endif
+
+    % for kind in section.kinds: # dynamic,static,controls
+      <${kind.name}>
+
+        <%def name="insert_body(node)">
+            % for nested in node.namespaces:
+                ${insert_namespace(nested)}
+            % endfor
+
+            % for entry in node.entries:
+                ${insert_entry(entry)}
+            % endfor
+        </%def>
+
+        <%def name="insert_namespace(namespace)">
+        <namespace name="${namespace.name}">
+            ${insert_body(namespace)}
+        </namespace>
+        </%def>
+
+        <%def name="insert_entry(prop)">
+        % if prop.is_clone():
+            <clone entry="${prop.name}" kind="${prop.target_kind}">
+
+              % if prop.details is not None:
+                <details>${prop.details}</details>
+              % endif
+
+              % if prop.hal_details is not None:
+                <hal_details>${prop.hal_details}</hal_details>
+              % endif
+
+              % for tag in prop.tags:
+                <tag id="${tag.id}" />
+              % endfor
+
+            </clone>
+        % else:
+            <entry name="${prop.name_short}" type="${prop.type}"
+          % if prop.visibility:
+                visibility="${prop.visibility}"
+          % endif
+          % if prop.synthetic:
+                synthetic="true"
+          % endif
+          % if prop.deprecated:
+                deprecated="true"
+          % endif
+          % if prop.optional:
+                optional="${str(prop.optional).lower()}"
+          % endif
+          % if prop.enum:
+                enum="true"
+          % endif
+          % if prop.type_notes is not None:
+                type_notes="${prop.type_notes}"
+          % endif
+          % if prop.container is not None:
+                container="${prop.container}"
+          % endif
+
+          % if prop.typedef is not None:
+                typedef="${prop.typedef.name}"
+          % endif
+
+          % if prop.hwlevel:
+                hwlevel="${prop.hwlevel}"
+          % endif
+            >
+
+              % if prop.container == 'array':
+                <array>
+                  % for size in prop.container_sizes:
+                    <size>${size}</size>
+                  % endfor
+                </array>
+              % elif prop.container == 'tuple':
+                <tuple>
+                  % for size in prop.container_sizes:
+                    <value /> <!-- intentionally generated empty. manually fix -->
+                  % endfor
+                </tuple>
+              % endif
+              % if prop.enum:
+                <enum>
+                  % for value in prop.enum.values:
+                      <value
+                    % if value.deprecated:
+                             deprecated="true"
+                    % endif:
+                    % if value.optional:
+                             optional="true"
+                    % endif:
+                    % if value.hidden:
+                             hidden="true"
+                    % endif:
+                    % if value.id is not None:
+                             id="${value.id}"
+                    % endif
+                      >${value.name}
+                    % if value.notes is not None:
+                             <notes>${value.notes}</notes>
+                    % endif
+                      </value>
+                  % endfor
+                </enum>
+              % endif
+
+              % if prop.description is not None:
+                <description>${prop.description | x}</description>
+              % endif
+
+              % if prop.units is not None:
+                <units>${prop.units | x}</units>
+              % endif
+
+              % if prop.range is not None:
+                <range>${prop.range | x}</range>
+              % endif
+
+              % if prop.details is not None:
+                <details>${prop.details | x}</details>
+              % endif
+
+              % if prop.hal_details is not None:
+                <hal_details>${prop.hal_details | x}</hal_details>
+              % endif
+
+              % for tag in prop.tags:
+                <tag id="${tag.id}" />
+              % endfor
+
+            </entry>
+        % endif
+        </%def>
+
+        ${insert_body(kind)}
+
+      </${kind.name}>
+    % endfor # for each kind
+
+  </section>
+  % endfor
+</namespace>
+% endfor
+
+</metadata>
diff --git a/media/camera/docs/metadata_validate.py b/media/camera/docs/metadata_validate.py
new file mode 100755
index 0000000..8260005
--- /dev/null
+++ b/media/camera/docs/metadata_validate.py
@@ -0,0 +1,324 @@
+#!/usr/bin/python
+
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+Usage:
+  metadata_validate.py <filename.xml>
+  - validates that the metadata properties defined in filename.xml are
+    semantically correct.
+  - does not do any XSD validation, use xmllint for that (in metadata-validate)
+
+Module:
+  A set of helpful functions for dealing with BeautifulSoup element trees.
+  Especially the find_* and fully_qualified_name functions.
+
+Dependencies:
+  BeautifulSoup - an HTML/XML parser available to download from
+                  http://www.crummy.com/software/BeautifulSoup/
+"""
+
+from bs4 import BeautifulSoup
+from bs4 import Tag
+import sys
+
+
+#####################
+#####################
+
+def fully_qualified_name(entry):
+  """
+  Calculates the fully qualified name for an entry by walking the path
+  to the root node.
+
+  Args:
+    entry: a BeautifulSoup Tag corresponding to an <entry ...> XML node,
+           or a <clone ...> XML node.
+
+  Raises:
+    ValueError: if entry does not correspond to one of the above XML nodes
+
+  Returns:
+    A string with the full name, e.g. "android.lens.info.availableApertureSizes"
+  """
+
+  filter_tags = ['namespace', 'section']
+  parents = [i['name'] for i in entry.parents if i.name in filter_tags]
+
+  if entry.name == 'entry':
+    name = entry['name']
+  elif entry.name == 'clone':
+    name = entry['entry'].split(".")[-1] # "a.b.c" => "c"
+  else:
+    raise ValueError("Unsupported tag type '%s' for element '%s'" \
+                        %(entry.name, entry))
+
+  parents.reverse()
+  parents.append(name)
+
+  fqn = ".".join(parents)
+
+  return fqn
+
+def find_parent_by_name(element, names):
+  """
+  Find the ancestor for an element whose name matches one of those
+  in names.
+
+  Args:
+    element: A BeautifulSoup Tag corresponding to an XML node
+
+  Returns:
+    A BeautifulSoup element corresponding to the matched parent, or None.
+
+    For example, assuming the following XML structure:
+      <static>
+        <anything>
+          <entry name="Hello" />   # this is in variable 'Hello'
+        </anything>
+      </static>
+
+      el = find_parent_by_name(Hello, ['static'])
+      # el is now a value pointing to the '<static>' element
+  """
+  matching_parents = [i.name for i in element.parents if i.name in names]
+
+  if matching_parents:
+    return matching_parents[0]
+  else:
+    return None
+
+def find_all_child_tags(element, tag):
+    """
+    Finds all the children that are a Tag (as opposed to a NavigableString),
+    with a name of tag. This is useful to filter out the NavigableString out
+    of the children.
+
+    Args:
+      element: A BeautifulSoup Tag corresponding to an XML node
+      tag: A string representing the name of the tag
+
+    Returns:
+      A list of Tag instances
+
+      For example, given the following XML structure:
+        <enum>                    # This is the variable el
+          Hello world             # NavigableString
+          <value>Apple</value>    # this is the variale apple (Tag)
+          <value>Orange</value>   # this is the variable orange (Tag)
+          Hello world again       # NavigableString
+        </enum>
+
+        lst = find_all_child_tags(el, 'value')
+        # lst is [apple, orange]
+
+    """
+    matching_tags = [i for i in element.children if isinstance(i, Tag) and i.name == tag]
+    return matching_tags
+
+def find_child_tag(element, tag):
+    """
+    Finds the first child that is a Tag with the matching name.
+
+    Args:
+      element: a BeautifulSoup Tag
+      tag: A String representing the name of the tag
+
+    Returns:
+      An instance of a Tag, or None if there was no matches.
+
+      For example, given the following XML structure:
+        <enum>                    # This is the variable el
+          Hello world             # NavigableString
+          <value>Apple</value>    # this is the variale apple (Tag)
+          <value>Orange</value>   # this is the variable orange (Tag)
+          Hello world again       # NavigableString
+        </enum>
+
+        res = find_child_tag(el, 'value')
+        # res is apple
+    """
+    matching_tags = find_all_child_tags(element, tag)
+    if matching_tags:
+        return matching_tags[0]
+    else:
+        return None
+
+def find_kind(element):
+  """
+  Finds the kind Tag ancestor for an element.
+
+  Args:
+    element: a BeautifulSoup Tag
+
+  Returns:
+    a BeautifulSoup tag, or None if there was no matches
+
+  Remarks:
+    This function only makes sense to be called for an Entry, Clone, or
+    InnerNamespace XML types. It will always return 'None' for other nodes.
+  """
+  kinds = ['dynamic', 'static', 'controls']
+  parent_kind = find_parent_by_name(element, kinds)
+  return parent_kind
+
+def validate_error(msg):
+  """
+  Print a validation error to stderr.
+
+  Args:
+    msg: a string you want to be printed
+  """
+  print >> sys.stderr, "ERROR: " + msg
+
+
+def validate_clones(soup):
+  """
+  Validate that all <clone> elements point to an existing <entry> element.
+
+  Args:
+    soup - an instance of BeautifulSoup
+
+  Returns:
+    True if the validation succeeds, False otherwise
+  """
+  success = True
+
+  for clone in soup.find_all("clone"):
+    clone_entry = clone['entry']
+    clone_kind = clone['kind']
+
+    parent_kind = find_kind(clone)
+
+    find_entry = lambda x: x.name == 'entry'                           \
+                       and find_kind(x) == clone_kind                  \
+                       and fully_qualified_name(x) == clone_entry
+    matching_entry = soup.find(find_entry)
+
+    if matching_entry is None:
+      error_msg = ("Did not find corresponding clone entry '%s' " +    \
+               "with kind '%s'") %(clone_entry, clone_kind)
+      validate_error(error_msg)
+      success = False
+
+    clone_name = fully_qualified_name(clone)
+    if clone_name != clone_entry:
+      error_msg = ("Clone entry target '%s' did not match fully qualified "  + \
+                   "name '%s'.") %(clone_entry, clone_name)
+      validate_error(error_msg)
+      success = False
+
+  return success
+
+# All <entry> elements with container=$foo have a <$foo> child
+# If type="enum", <enum> tag is present
+# In <enum> for all <value id="$x">, $x is numeric
+def validate_entries(soup):
+  """
+  Validate all <entry> elements with the following rules:
+    * If there is a container="$foo" attribute, there is a <$foo> child
+    * If there is a type="enum" attribute, there is an <enum> child
+    * In the <enum> child, all <value id="$x"> have a numeric $x
+
+  Args:
+    soup - an instance of BeautifulSoup
+
+  Returns:
+    True if the validation succeeds, False otherwise
+  """
+  success = True
+  for entry in soup.find_all("entry"):
+    entry_container = entry.attrs.get('container')
+
+    if entry_container is not None:
+      container_tag = entry.find(entry_container)
+
+      if container_tag is None:
+        success = False
+        validate_error(("Entry '%s' in kind '%s' has type '%s' but " +  \
+                 "missing child element <%s>")                          \
+                 %(fully_qualified_name(entry), find_kind(entry),       \
+                 entry_container, entry_container))
+
+    enum = entry.attrs.get('enum')
+    if enum and enum == 'true':
+      if entry.enum is None:
+        validate_error(("Entry '%s' in kind '%s' is missing enum")     \
+                               % (fully_qualified_name(entry), find_kind(entry),
+                                  ))
+        success = False
+
+      else:
+        for value in entry.enum.find_all('value'):
+          value_id = value.attrs.get('id')
+
+          if value_id is not None:
+            try:
+              id_int = int(value_id, 0) #autoguess base
+            except ValueError:
+              validate_error(("Entry '%s' has id '%s', which is not" + \
+                                        " numeric.")                   \
+                             %(fully_qualified_name(entry), value_id))
+              success = False
+    else:
+      if entry.enum:
+        validate_error(("Entry '%s' kind '%s' has enum el, but no enum attr")  \
+                               % (fully_qualified_name(entry), find_kind(entry),
+                                  ))
+        success = False
+
+  return success
+
+def validate_xml(xml):
+  """
+  Validate all XML nodes according to the rules in validate_clones and
+  validate_entries.
+
+  Args:
+    xml - A string containing a block of XML to validate
+
+  Returns:
+    a BeautifulSoup instance if validation succeeds, None otherwise
+  """
+
+  soup = BeautifulSoup(xml, features='xml')
+
+  succ = validate_clones(soup)
+  succ = validate_entries(soup) and succ
+
+  if succ:
+    return soup
+  else:
+    return None
+
+#####################
+#####################
+
+if __name__ == "__main__":
+  if len(sys.argv) <= 1:
+    print >> sys.stderr, "Usage: %s <filename.xml>" % (sys.argv[0])
+    sys.exit(0)
+
+  file_name = sys.argv[1]
+  succ = validate_xml(file(file_name).read()) is not None
+
+  if succ:
+    print "%s: SUCCESS! Document validated" %(file_name)
+    sys.exit(0)
+  else:
+    print >> sys.stderr, "%s: ERRORS: Document failed to validate" %(file_name)
+    sys.exit(1)
diff --git a/media/camera/include/system/camera_metadata.h b/media/camera/include/system/camera_metadata.h
new file mode 100644
index 0000000..9de902b
--- /dev/null
+++ b/media/camera/include/system/camera_metadata.h
@@ -0,0 +1,547 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_METADATA_H
+#define SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_METADATA_H
+
+#include <string.h>
+#include <stdint.h>
+#include <cutils/compiler.h>
+#include <system/camera_vendor_tags.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * Tag hierarchy and enum definitions for camera_metadata_entry
+ * =============================================================================
+ */
+
+/**
+ * Main enum definitions are in a separate file to make it easy to
+ * maintain
+ */
+#include "camera_metadata_tags.h"
+
+/**
+ * Enum range for each top-level category
+ */
+ANDROID_API
+extern unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2];
+ANDROID_API
+extern const char *camera_metadata_section_names[ANDROID_SECTION_COUNT];
+
+/**
+ * Type definitions for camera_metadata_entry
+ * =============================================================================
+ */
+enum {
+    // Unsigned 8-bit integer (uint8_t)
+    TYPE_BYTE = 0,
+    // Signed 32-bit integer (int32_t)
+    TYPE_INT32 = 1,
+    // 32-bit float (float)
+    TYPE_FLOAT = 2,
+    // Signed 64-bit integer (int64_t)
+    TYPE_INT64 = 3,
+    // 64-bit float (double)
+    TYPE_DOUBLE = 4,
+    // A 64-bit fraction (camera_metadata_rational_t)
+    TYPE_RATIONAL = 5,
+    // Number of type fields
+    NUM_TYPES
+};
+
+typedef struct camera_metadata_rational {
+    int32_t numerator;
+    int32_t denominator;
+} camera_metadata_rational_t;
+
+/**
+ * A reference to a metadata entry in a buffer.
+ *
+ * The data union pointers point to the real data in the buffer, and can be
+ * modified in-place if the count does not need to change. The count is the
+ * number of entries in data of the entry's type, not a count of bytes.
+ */
+typedef struct camera_metadata_entry {
+    size_t   index;
+    uint32_t tag;
+    uint8_t  type;
+    size_t   count;
+    union {
+        uint8_t *u8;
+        int32_t *i32;
+        float   *f;
+        int64_t *i64;
+        double  *d;
+        camera_metadata_rational_t *r;
+    } data;
+} camera_metadata_entry_t;
+
+/**
+ * A read-only reference to a metadata entry in a buffer. Identical to
+ * camera_metadata_entry in layout
+ */
+typedef struct camera_metadata_ro_entry {
+    size_t   index;
+    uint32_t tag;
+    uint8_t  type;
+    size_t   count;
+    union {
+        const uint8_t *u8;
+        const int32_t *i32;
+        const float   *f;
+        const int64_t *i64;
+        const double  *d;
+        const camera_metadata_rational_t *r;
+    } data;
+} camera_metadata_ro_entry_t;
+
+/**
+ * Size in bytes of each entry type
+ */
+ANDROID_API
+extern const size_t camera_metadata_type_size[NUM_TYPES];
+
+/**
+ * Human-readable name of each entry type
+ */
+ANDROID_API
+extern const char* camera_metadata_type_names[NUM_TYPES];
+
+/**
+ * Main definitions for the metadata entry and array structures
+ * =============================================================================
+ */
+
+/**
+ * A packet of metadata. This is a list of metadata entries, each of which has
+ * an integer tag to identify its meaning, 'type' and 'count' field, and the
+ * data, which contains a 'count' number of entries of type 'type'. The packet
+ * has a fixed capacity for entries and for extra data.  A new entry uses up one
+ * entry slot, and possibly some amount of data capacity; the function
+ * calculate_camera_metadata_entry_data_size() provides the amount of data
+ * capacity that would be used up by an entry.
+ *
+ * Entries are not sorted by default, and are not forced to be unique - multiple
+ * entries with the same tag are allowed. The packet will not dynamically resize
+ * when full.
+ *
+ * The packet is contiguous in memory, with size in bytes given by
+ * get_camera_metadata_size(). Therefore, it can be copied safely with memcpy()
+ * to a buffer of sufficient size. The copy_camera_metadata() function is
+ * intended for eliminating unused capacity in the destination packet.
+ */
+struct camera_metadata;
+typedef struct camera_metadata camera_metadata_t;
+
+/**
+ * Functions for manipulating camera metadata
+ * =============================================================================
+ *
+ * NOTE: Unless otherwise specified, functions that return type "int"
+ * return 0 on success, and non-0 value on error.
+ */
+
+/**
+ * Allocate a new camera_metadata structure, with some initial space for entries
+ * and extra data. The entry_capacity is measured in entry counts, and
+ * data_capacity in bytes. The resulting structure is all contiguous in memory,
+ * and can be freed with free_camera_metadata().
+ */
+ANDROID_API
+camera_metadata_t *allocate_camera_metadata(size_t entry_capacity,
+        size_t data_capacity);
+
+/**
+ * Get the required alignment of a packet of camera metadata, which is the
+ * maximal alignment of the embedded camera_metadata, camera_metadata_buffer_entry,
+ * and camera_metadata_data.
+ */
+ANDROID_API
+size_t get_camera_metadata_alignment();
+
+/**
+ * Allocate a new camera_metadata structure of size src_size. Copy the data,
+ * ignoring alignment, and then attempt validation. If validation
+ * fails, free the memory and return NULL. Otherwise return the pointer.
+ *
+ * The resulting pointer can be freed with free_camera_metadata().
+ */
+ANDROID_API
+camera_metadata_t *allocate_copy_camera_metadata_checked(
+        const camera_metadata_t *src,
+        size_t src_size);
+
+/**
+ * Place a camera metadata structure into an existing buffer. Returns NULL if
+ * the buffer is too small for the requested number of reserved entries and
+ * bytes of data. The entry_capacity is measured in entry counts, and
+ * data_capacity in bytes. If the buffer is larger than the required space,
+ * unused space will be left at the end. If successful, returns a pointer to the
+ * metadata header placed at the start of the buffer. It is the caller's
+ * responsibility to free the original buffer; do not call
+ * free_camera_metadata() with the returned pointer.
+ */
+ANDROID_API
+camera_metadata_t *place_camera_metadata(void *dst, size_t dst_size,
+        size_t entry_capacity,
+        size_t data_capacity);
+
+/**
+ * Free a camera_metadata structure. Should only be used with structures
+ * allocated with allocate_camera_metadata().
+ */
+ANDROID_API
+void free_camera_metadata(camera_metadata_t *metadata);
+
+/**
+ * Calculate the buffer size needed for a metadata structure of entry_count
+ * metadata entries, needing a total of data_count bytes of extra data storage.
+ */
+ANDROID_API
+size_t calculate_camera_metadata_size(size_t entry_count,
+        size_t data_count);
+
+/**
+ * Get current size of entire metadata structure in bytes, including reserved
+ * but unused space.
+ */
+ANDROID_API
+size_t get_camera_metadata_size(const camera_metadata_t *metadata);
+
+/**
+ * Get size of entire metadata buffer in bytes, not including reserved but
+ * unused space. This is the amount of space needed by copy_camera_metadata for
+ * its dst buffer.
+ */
+ANDROID_API
+size_t get_camera_metadata_compact_size(const camera_metadata_t *metadata);
+
+/**
+ * Get the current number of entries in the metadata packet.
+ *
+ * metadata packet must be valid, which can be checked before the call with
+ * validate_camera_metadata_structure().
+ */
+ANDROID_API
+size_t get_camera_metadata_entry_count(const camera_metadata_t *metadata);
+
+/**
+ * Get the maximum number of entries that could fit in the metadata packet.
+ */
+ANDROID_API
+size_t get_camera_metadata_entry_capacity(const camera_metadata_t *metadata);
+
+/**
+ * Get the current count of bytes used for value storage in the metadata packet.
+ */
+ANDROID_API
+size_t get_camera_metadata_data_count(const camera_metadata_t *metadata);
+
+/**
+ * Get the maximum count of bytes that could be used for value storage in the
+ * metadata packet.
+ */
+ANDROID_API
+size_t get_camera_metadata_data_capacity(const camera_metadata_t *metadata);
+
+/**
+ * Copy a metadata structure to a memory buffer, compacting it along the
+ * way. That is, in the copied structure, entry_count == entry_capacity, and
+ * data_count == data_capacity.
+ *
+ * If dst_size > get_camera_metadata_compact_size(), the unused bytes are at the
+ * end of the buffer. If dst_size < get_camera_metadata_compact_size(), returns
+ * NULL. Otherwise returns a pointer to the metadata structure header placed at
+ * the start of dst.
+ *
+ * Since the buffer was not allocated by allocate_camera_metadata, the caller is
+ * responsible for freeing the underlying buffer when needed; do not call
+ * free_camera_metadata.
+ */
+ANDROID_API
+camera_metadata_t *copy_camera_metadata(void *dst, size_t dst_size,
+        const camera_metadata_t *src);
+
+/**
+ * Validate that a metadata is structurally sane. That is, its internal
+ * state is such that we won't get buffer overflows or run into other
+ * 'impossible' issues when calling the other API functions.
+ *
+ * This is useful in particular after copying the binary metadata blob
+ * from an untrusted source, since passing this check means the data is at least
+ * consistent.
+ *
+ * The expected_size argument is optional.
+ *
+ * Returns 0 on success. A non-0 value is returned on error.
+ */
+ANDROID_API
+int validate_camera_metadata_structure(const camera_metadata_t *metadata,
+                                       const size_t *expected_size);
+
+/**
+ * Append camera metadata in src to an existing metadata structure in dst.  This
+ * does not resize the destination structure, so if it is too small, a non-zero
+ * value is returned. On success, 0 is returned. Appending onto a sorted
+ * structure results in a non-sorted combined structure.
+ */
+ANDROID_API
+int append_camera_metadata(camera_metadata_t *dst, const camera_metadata_t *src);
+
+/**
+ * Clone an existing metadata buffer, compacting along the way. This is
+ * equivalent to allocating a new buffer of the minimum needed size, then
+ * appending the buffer to be cloned into the new buffer. The resulting buffer
+ * can be freed with free_camera_metadata(). Returns NULL if cloning failed.
+ */
+ANDROID_API
+camera_metadata_t *clone_camera_metadata(const camera_metadata_t *src);
+
+/**
+ * Calculate the number of bytes of extra data a given metadata entry will take
+ * up. That is, if entry of 'type' with a payload of 'data_count' values is
+ * added, how much will the value returned by get_camera_metadata_data_count()
+ * be increased? This value may be zero, if no extra data storage is needed.
+ */
+ANDROID_API
+size_t calculate_camera_metadata_entry_data_size(uint8_t type,
+        size_t data_count);
+
+/**
+ * Add a metadata entry to a metadata structure. Returns 0 if the addition
+ * succeeded. Returns a non-zero value if there is insufficient reserved space
+ * left to add the entry, or if the tag is unknown.  data_count is the number of
+ * entries in the data array of the tag's type, not a count of
+ * bytes. Vendor-defined tags can not be added using this method, unless
+ * set_vendor_tag_query_ops() has been called first. Entries are always added to
+ * the end of the structure (highest index), so after addition, a
+ * previously-sorted array will be marked as unsorted.
+ *
+ * Returns 0 on success. A non-0 value is returned on error.
+ */
+ANDROID_API
+int add_camera_metadata_entry(camera_metadata_t *dst,
+        uint32_t tag,
+        const void *data,
+        size_t data_count);
+
+/**
+ * Sort the metadata buffer for fast searching. If already marked as sorted,
+ * does nothing. Adding or appending entries to the buffer will place the buffer
+ * back into an unsorted state.
+ *
+ * Returns 0 on success. A non-0 value is returned on error.
+ */
+ANDROID_API
+int sort_camera_metadata(camera_metadata_t *dst);
+
+/**
+ * Get metadata entry at position index in the metadata buffer.
+ * Index must be less than entry count, which is returned by
+ * get_camera_metadata_entry_count().
+ *
+ * src and index are inputs; the passed-in entry is updated with the details of
+ * the entry. The data pointer points to the real data in the buffer, and can be
+ * updated as long as the data count does not change.
+ *
+ * Returns 0 on success. A non-0 value is returned on error.
+ */
+ANDROID_API
+int get_camera_metadata_entry(camera_metadata_t *src,
+        size_t index,
+        camera_metadata_entry_t *entry);
+
+/**
+ * Get metadata entry at position index, but disallow editing the data.
+ */
+ANDROID_API
+int get_camera_metadata_ro_entry(const camera_metadata_t *src,
+        size_t index,
+        camera_metadata_ro_entry_t *entry);
+
+/**
+ * Find an entry with given tag value. If not found, returns -ENOENT. Otherwise,
+ * returns entry contents like get_camera_metadata_entry.
+ *
+ * If multiple entries with the same tag exist, does not have any guarantees on
+ * which is returned. To speed up searching for tags, sort the metadata
+ * structure first by calling sort_camera_metadata().
+ */
+ANDROID_API
+int find_camera_metadata_entry(camera_metadata_t *src,
+        uint32_t tag,
+        camera_metadata_entry_t *entry);
+
+/**
+ * Find an entry with given tag value, but disallow editing the data
+ */
+ANDROID_API
+int find_camera_metadata_ro_entry(const camera_metadata_t *src,
+        uint32_t tag,
+        camera_metadata_ro_entry_t *entry);
+
+/**
+ * Delete an entry at given index. This is an expensive operation, since it
+ * requires repacking entries and possibly entry data. This also invalidates any
+ * existing camera_metadata_entry.data pointers to this buffer. Sorting is
+ * maintained.
+ */
+ANDROID_API
+int delete_camera_metadata_entry(camera_metadata_t *dst,
+        size_t index);
+
+/**
+ * Updates a metadata entry with new data. If the data size is changing, may
+ * need to adjust the data array, making this an O(N) operation. If the data
+ * size is the same or still fits in the entry space, this is O(1). Maintains
+ * sorting, but invalidates camera_metadata_entry instances that point to the
+ * updated entry. If a non-NULL value is passed in to entry, the entry structure
+ * is updated to match the new buffer state.  Returns a non-zero value if there
+ * is no room for the new data in the buffer.
+ */
+ANDROID_API
+int update_camera_metadata_entry(camera_metadata_t *dst,
+        size_t index,
+        const void *data,
+        size_t data_count,
+        camera_metadata_entry_t *updated_entry);
+
+/**
+ * Retrieve human-readable name of section the tag is in. Returns NULL if
+ * no such tag is defined. Returns NULL for tags in the vendor section, unless
+ * set_vendor_tag_query_ops() has been used.
+ */
+ANDROID_API
+const char *get_camera_metadata_section_name(uint32_t tag);
+
+/**
+ * Retrieve human-readable name of tag (not including section). Returns NULL if
+ * no such tag is defined. Returns NULL for tags in the vendor section, unless
+ * set_vendor_tag_query_ops() has been used.
+ */
+ANDROID_API
+const char *get_camera_metadata_tag_name(uint32_t tag);
+
+/**
+ * Retrieve the type of a tag. Returns -1 if no such tag is defined. Returns -1
+ * for tags in the vendor section, unless set_vendor_tag_query_ops() has been
+ * used.
+ */
+ANDROID_API
+int get_camera_metadata_tag_type(uint32_t tag);
+
+/**
+ * Set up vendor-specific tag query methods. These are needed to properly add
+ * entries with vendor-specified tags and to use the
+ * get_camera_metadata_section_name, _tag_name, and _tag_type methods with
+ * vendor tags. Returns 0 on success.
+ *
+ * **DEPRECATED** - Please use vendor_tag_ops defined in camera_vendor_tags.h
+ *        instead.
+ */
+typedef struct vendor_tag_query_ops vendor_tag_query_ops_t;
+struct vendor_tag_query_ops {
+    /**
+     * Get vendor section name for a vendor-specified entry tag. Only called for
+     * tags >= 0x80000000. The section name must start with the name of the
+     * vendor in the Java package style. For example, CameraZoom inc must prefix
+     * their sections with "com.camerazoom." Must return NULL if the tag is
+     * outside the bounds of vendor-defined sections.
+     */
+    const char *(*get_camera_vendor_section_name)(
+        const vendor_tag_query_ops_t *v,
+        uint32_t tag);
+    /**
+     * Get tag name for a vendor-specified entry tag. Only called for tags >=
+     * 0x80000000. Must return NULL if the tag is outside the bounds of
+     * vendor-defined sections.
+     */
+    const char *(*get_camera_vendor_tag_name)(
+        const vendor_tag_query_ops_t *v,
+        uint32_t tag);
+    /**
+     * Get tag type for a vendor-specified entry tag. Only called for tags >=
+     * 0x80000000. Must return -1 if the tag is outside the bounds of
+     * vendor-defined sections.
+     */
+    int (*get_camera_vendor_tag_type)(
+        const vendor_tag_query_ops_t *v,
+        uint32_t tag);
+    /**
+     * Get the number of vendor tags supported on this platform. Used to
+     * calculate the size of buffer needed for holding the array of all tags
+     * returned by get_camera_vendor_tags().
+     */
+    int (*get_camera_vendor_tag_count)(
+        const vendor_tag_query_ops_t *v);
+    /**
+     * Fill an array with all the supported vendor tags on this platform.
+     * get_camera_vendor_tag_count() returns the number of tags supported, and
+     * tag_array should be allocated with enough space to hold all of the tags.
+     */
+    void (*get_camera_vendor_tags)(
+        const vendor_tag_query_ops_t *v,
+        uint32_t *tag_array);
+};
+
+/**
+ * **DEPRECATED** - This should only be used by the camera framework. Camera
+ *      metadata will transition to using vendor_tag_ops defined in
+ *      camera_vendor_tags.h instead.
+ */
+ANDROID_API
+int set_camera_metadata_vendor_tag_ops(const vendor_tag_query_ops_t *query_ops);
+
+/**
+ * Print fields in the metadata to the log.
+ * verbosity = 0: Only tag entry information
+ * verbosity = 1: Tag entry information plus at most 16 data values
+ * verbosity = 2: All information
+ */
+ANDROID_API
+void dump_camera_metadata(const camera_metadata_t *metadata,
+        int fd,
+        int verbosity);
+
+/**
+ * Print fields in the metadata to the log; adds indentation parameter, which
+ * specifies the number of spaces to insert before each line of the dump
+ */
+ANDROID_API
+void dump_indented_camera_metadata(const camera_metadata_t *metadata,
+        int fd,
+        int verbosity,
+        int indentation);
+
+/**
+ * Prints the specified tag value as a string. Only works for enum tags.
+ * Returns 0 on success, -1 on failure.
+ */
+ANDROID_API
+int camera_metadata_enum_snprint(uint32_t tag,
+                                 uint32_t value,
+                                 char *dst,
+                                 size_t size);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/media/camera/include/system/camera_metadata_tags.h b/media/camera/include/system/camera_metadata_tags.h
new file mode 100644
index 0000000..334610b
--- /dev/null
+++ b/media/camera/include/system/camera_metadata_tags.h
@@ -0,0 +1,919 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * !! Do not include this file directly !!
+ *
+ * Include camera_metadata.h instead.
+ */
+
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from camera_metadata_tags.mako
+ */
+
+/** TODO: Nearly every enum in this file needs a description */
+
+/**
+ * Top level hierarchy definitions for camera metadata. *_INFO sections are for
+ * the static metadata that can be retrived without opening the camera device.
+ * New sections must be added right before ANDROID_SECTION_COUNT to maintain
+ * existing enumerations.
+ */
+typedef enum camera_metadata_section {
+    ANDROID_COLOR_CORRECTION,
+    ANDROID_CONTROL,
+    ANDROID_DEMOSAIC,
+    ANDROID_EDGE,
+    ANDROID_FLASH,
+    ANDROID_FLASH_INFO,
+    ANDROID_HOT_PIXEL,
+    ANDROID_JPEG,
+    ANDROID_LENS,
+    ANDROID_LENS_INFO,
+    ANDROID_NOISE_REDUCTION,
+    ANDROID_QUIRKS,
+    ANDROID_REQUEST,
+    ANDROID_SCALER,
+    ANDROID_SENSOR,
+    ANDROID_SENSOR_INFO,
+    ANDROID_SHADING,
+    ANDROID_STATISTICS,
+    ANDROID_STATISTICS_INFO,
+    ANDROID_TONEMAP,
+    ANDROID_LED,
+    ANDROID_INFO,
+    ANDROID_BLACK_LEVEL,
+    ANDROID_SYNC,
+    ANDROID_REPROCESS,
+    ANDROID_DEPTH,
+    ANDROID_SECTION_COUNT,
+
+    VENDOR_SECTION = 0x8000
+} camera_metadata_section_t;
+
+/**
+ * Hierarchy positions in enum space. All vendor extension tags must be
+ * defined with tag >= VENDOR_SECTION_START
+ */
+typedef enum camera_metadata_section_start {
+    ANDROID_COLOR_CORRECTION_START = ANDROID_COLOR_CORRECTION  << 16,
+    ANDROID_CONTROL_START          = ANDROID_CONTROL           << 16,
+    ANDROID_DEMOSAIC_START         = ANDROID_DEMOSAIC          << 16,
+    ANDROID_EDGE_START             = ANDROID_EDGE              << 16,
+    ANDROID_FLASH_START            = ANDROID_FLASH             << 16,
+    ANDROID_FLASH_INFO_START       = ANDROID_FLASH_INFO        << 16,
+    ANDROID_HOT_PIXEL_START        = ANDROID_HOT_PIXEL         << 16,
+    ANDROID_JPEG_START             = ANDROID_JPEG              << 16,
+    ANDROID_LENS_START             = ANDROID_LENS              << 16,
+    ANDROID_LENS_INFO_START        = ANDROID_LENS_INFO         << 16,
+    ANDROID_NOISE_REDUCTION_START  = ANDROID_NOISE_REDUCTION   << 16,
+    ANDROID_QUIRKS_START           = ANDROID_QUIRKS            << 16,
+    ANDROID_REQUEST_START          = ANDROID_REQUEST           << 16,
+    ANDROID_SCALER_START           = ANDROID_SCALER            << 16,
+    ANDROID_SENSOR_START           = ANDROID_SENSOR            << 16,
+    ANDROID_SENSOR_INFO_START      = ANDROID_SENSOR_INFO       << 16,
+    ANDROID_SHADING_START          = ANDROID_SHADING           << 16,
+    ANDROID_STATISTICS_START       = ANDROID_STATISTICS        << 16,
+    ANDROID_STATISTICS_INFO_START  = ANDROID_STATISTICS_INFO   << 16,
+    ANDROID_TONEMAP_START          = ANDROID_TONEMAP           << 16,
+    ANDROID_LED_START              = ANDROID_LED               << 16,
+    ANDROID_INFO_START             = ANDROID_INFO              << 16,
+    ANDROID_BLACK_LEVEL_START      = ANDROID_BLACK_LEVEL       << 16,
+    ANDROID_SYNC_START             = ANDROID_SYNC              << 16,
+    ANDROID_REPROCESS_START        = ANDROID_REPROCESS         << 16,
+    ANDROID_DEPTH_START            = ANDROID_DEPTH             << 16,
+    VENDOR_SECTION_START           = VENDOR_SECTION            << 16
+} camera_metadata_section_start_t;
+
+/**
+ * Main enum for defining camera metadata tags.  New entries must always go
+ * before the section _END tag to preserve existing enumeration values.  In
+ * addition, the name and type of the tag needs to be added to
+ * system/media/camera/src/camera_metadata_tag_info.c
+ */
+typedef enum camera_metadata_tag {
+    ANDROID_COLOR_CORRECTION_MODE =                   // enum         | public
+            ANDROID_COLOR_CORRECTION_START,
+    ANDROID_COLOR_CORRECTION_TRANSFORM,               // rational[]   | public
+    ANDROID_COLOR_CORRECTION_GAINS,                   // float[]      | public
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE,         // enum         | public
+    ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+                                                      // byte[]       | public
+    ANDROID_COLOR_CORRECTION_END,
+
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE =             // enum         | public
+            ANDROID_CONTROL_START,
+    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,         // int32        | public
+    ANDROID_CONTROL_AE_LOCK,                          // enum         | public
+    ANDROID_CONTROL_AE_MODE,                          // enum         | public
+    ANDROID_CONTROL_AE_REGIONS,                       // int32[]      | public
+    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,              // int32[]      | public
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,            // enum         | public
+    ANDROID_CONTROL_AF_MODE,                          // enum         | public
+    ANDROID_CONTROL_AF_REGIONS,                       // int32[]      | public
+    ANDROID_CONTROL_AF_TRIGGER,                       // enum         | public
+    ANDROID_CONTROL_AWB_LOCK,                         // enum         | public
+    ANDROID_CONTROL_AWB_MODE,                         // enum         | public
+    ANDROID_CONTROL_AWB_REGIONS,                      // int32[]      | public
+    ANDROID_CONTROL_CAPTURE_INTENT,                   // enum         | public
+    ANDROID_CONTROL_EFFECT_MODE,                      // enum         | public
+    ANDROID_CONTROL_MODE,                             // enum         | public
+    ANDROID_CONTROL_SCENE_MODE,                       // enum         | public
+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,         // enum         | public
+    ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,   // byte[]       | public
+    ANDROID_CONTROL_AE_AVAILABLE_MODES,               // byte[]       | public
+    ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,   // int32[]      | public
+    ANDROID_CONTROL_AE_COMPENSATION_RANGE,            // int32[]      | public
+    ANDROID_CONTROL_AE_COMPENSATION_STEP,             // rational     | public
+    ANDROID_CONTROL_AF_AVAILABLE_MODES,               // byte[]       | public
+    ANDROID_CONTROL_AVAILABLE_EFFECTS,                // byte[]       | public
+    ANDROID_CONTROL_AVAILABLE_SCENE_MODES,            // byte[]       | public
+    ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+                                                      // byte[]       | public
+    ANDROID_CONTROL_AWB_AVAILABLE_MODES,              // byte[]       | public
+    ANDROID_CONTROL_MAX_REGIONS,                      // int32[]      | hidden
+    ANDROID_CONTROL_SCENE_MODE_OVERRIDES,             // byte[]       | system
+    ANDROID_CONTROL_AE_PRECAPTURE_ID,                 // int32        | system
+    ANDROID_CONTROL_AE_STATE,                         // enum         | public
+    ANDROID_CONTROL_AF_STATE,                         // enum         | public
+    ANDROID_CONTROL_AF_TRIGGER_ID,                    // int32        | system
+    ANDROID_CONTROL_AWB_STATE,                        // enum         | public
+    ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
+                                                      // int32[]      | hidden
+    ANDROID_CONTROL_AE_LOCK_AVAILABLE,                // enum         | public
+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE,               // enum         | public
+    ANDROID_CONTROL_AVAILABLE_MODES,                  // byte[]       | public
+    ANDROID_CONTROL_END,
+
+    ANDROID_DEMOSAIC_MODE =                           // enum         | system
+            ANDROID_DEMOSAIC_START,
+    ANDROID_DEMOSAIC_END,
+
+    ANDROID_EDGE_MODE =                               // enum         | public
+            ANDROID_EDGE_START,
+    ANDROID_EDGE_STRENGTH,                            // byte         | system
+    ANDROID_EDGE_AVAILABLE_EDGE_MODES,                // byte[]       | public
+    ANDROID_EDGE_END,
+
+    ANDROID_FLASH_FIRING_POWER =                      // byte         | system
+            ANDROID_FLASH_START,
+    ANDROID_FLASH_FIRING_TIME,                        // int64        | system
+    ANDROID_FLASH_MODE,                               // enum         | public
+    ANDROID_FLASH_COLOR_TEMPERATURE,                  // byte         | system
+    ANDROID_FLASH_MAX_ENERGY,                         // byte         | system
+    ANDROID_FLASH_STATE,                              // enum         | public
+    ANDROID_FLASH_END,
+
+    ANDROID_FLASH_INFO_AVAILABLE =                    // enum         | public
+            ANDROID_FLASH_INFO_START,
+    ANDROID_FLASH_INFO_CHARGE_DURATION,               // int64        | system
+    ANDROID_FLASH_INFO_END,
+
+    ANDROID_HOT_PIXEL_MODE =                          // enum         | public
+            ANDROID_HOT_PIXEL_START,
+    ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,      // byte[]       | public
+    ANDROID_HOT_PIXEL_END,
+
+    ANDROID_JPEG_GPS_COORDINATES =                    // double[]     | hidden
+            ANDROID_JPEG_START,
+    ANDROID_JPEG_GPS_PROCESSING_METHOD,               // byte         | hidden
+    ANDROID_JPEG_GPS_TIMESTAMP,                       // int64        | hidden
+    ANDROID_JPEG_ORIENTATION,                         // int32        | public
+    ANDROID_JPEG_QUALITY,                             // byte         | public
+    ANDROID_JPEG_THUMBNAIL_QUALITY,                   // byte         | public
+    ANDROID_JPEG_THUMBNAIL_SIZE,                      // int32[]      | public
+    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,           // int32[]      | public
+    ANDROID_JPEG_MAX_SIZE,                            // int32        | system
+    ANDROID_JPEG_SIZE,                                // int32        | system
+    ANDROID_JPEG_END,
+
+    ANDROID_LENS_APERTURE =                           // float        | public
+            ANDROID_LENS_START,
+    ANDROID_LENS_FILTER_DENSITY,                      // float        | public
+    ANDROID_LENS_FOCAL_LENGTH,                        // float        | public
+    ANDROID_LENS_FOCUS_DISTANCE,                      // float        | public
+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE,          // enum         | public
+    ANDROID_LENS_FACING,                              // enum         | public
+    ANDROID_LENS_POSE_ROTATION,                       // float[]      | public
+    ANDROID_LENS_POSE_TRANSLATION,                    // float[]      | public
+    ANDROID_LENS_FOCUS_RANGE,                         // float[]      | public
+    ANDROID_LENS_STATE,                               // enum         | public
+    ANDROID_LENS_INTRINSIC_CALIBRATION,               // float[]      | public
+    ANDROID_LENS_RADIAL_DISTORTION,                   // float[]      | public
+    ANDROID_LENS_END,
+
+    ANDROID_LENS_INFO_AVAILABLE_APERTURES =           // float[]      | public
+            ANDROID_LENS_INFO_START,
+    ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,     // float[]      | public
+    ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,        // float[]      | public
+    ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,// byte[]       | public
+    ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,            // float        | public
+    ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,         // float        | public
+    ANDROID_LENS_INFO_SHADING_MAP_SIZE,               // int32[]      | hidden
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,     // enum         | public
+    ANDROID_LENS_INFO_END,
+
+    ANDROID_NOISE_REDUCTION_MODE =                    // enum         | public
+            ANDROID_NOISE_REDUCTION_START,
+    ANDROID_NOISE_REDUCTION_STRENGTH,                 // byte         | system
+    ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+                                                      // byte[]       | public
+    ANDROID_NOISE_REDUCTION_END,
+
+    ANDROID_QUIRKS_METERING_CROP_REGION =             // byte         | system
+            ANDROID_QUIRKS_START,
+    ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO,              // byte         | system
+    ANDROID_QUIRKS_USE_ZSL_FORMAT,                    // byte         | system
+    ANDROID_QUIRKS_USE_PARTIAL_RESULT,                // byte         | hidden
+    ANDROID_QUIRKS_PARTIAL_RESULT,                    // enum         | hidden
+    ANDROID_QUIRKS_END,
+
+    ANDROID_REQUEST_FRAME_COUNT =                     // int32        | hidden
+            ANDROID_REQUEST_START,
+    ANDROID_REQUEST_ID,                               // int32        | hidden
+    ANDROID_REQUEST_INPUT_STREAMS,                    // int32[]      | system
+    ANDROID_REQUEST_METADATA_MODE,                    // enum         | system
+    ANDROID_REQUEST_OUTPUT_STREAMS,                   // int32[]      | system
+    ANDROID_REQUEST_TYPE,                             // enum         | system
+    ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,           // int32[]      | hidden
+    ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS,        // int32[]      | system
+    ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,            // int32        | public
+    ANDROID_REQUEST_PIPELINE_DEPTH,                   // byte         | public
+    ANDROID_REQUEST_PIPELINE_MAX_DEPTH,               // byte         | public
+    ANDROID_REQUEST_PARTIAL_RESULT_COUNT,             // int32        | public
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES,           // enum[]       | public
+    ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,           // int32[]      | hidden
+    ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,            // int32[]      | hidden
+    ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,   // int32[]      | hidden
+    ANDROID_REQUEST_END,
+
+    ANDROID_SCALER_CROP_REGION =                      // int32[]      | public
+            ANDROID_SCALER_START,
+    ANDROID_SCALER_AVAILABLE_FORMATS,                 // enum[]       | hidden
+    ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,      // int64[]      | hidden
+    ANDROID_SCALER_AVAILABLE_JPEG_SIZES,              // int32[]      | hidden
+    ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,        // float        | public
+    ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, // int64[]      | hidden
+    ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,         // int32[]      | hidden
+    ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,       // int64[]      | system
+    ANDROID_SCALER_AVAILABLE_RAW_SIZES,               // int32[]      | system
+    ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,// int32        | hidden
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,   // enum[]       | hidden
+    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,     // int64[]      | hidden
+    ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,         // int64[]      | hidden
+    ANDROID_SCALER_CROPPING_TYPE,                     // enum         | public
+    ANDROID_SCALER_END,
+
+    ANDROID_SENSOR_EXPOSURE_TIME =                    // int64        | public
+            ANDROID_SENSOR_START,
+    ANDROID_SENSOR_FRAME_DURATION,                    // int64        | public
+    ANDROID_SENSOR_SENSITIVITY,                       // int32        | public
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1,             // enum         | public
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT2,             // byte         | public
+    ANDROID_SENSOR_CALIBRATION_TRANSFORM1,            // rational[]   | public
+    ANDROID_SENSOR_CALIBRATION_TRANSFORM2,            // rational[]   | public
+    ANDROID_SENSOR_COLOR_TRANSFORM1,                  // rational[]   | public
+    ANDROID_SENSOR_COLOR_TRANSFORM2,                  // rational[]   | public
+    ANDROID_SENSOR_FORWARD_MATRIX1,                   // rational[]   | public
+    ANDROID_SENSOR_FORWARD_MATRIX2,                   // rational[]   | public
+    ANDROID_SENSOR_BASE_GAIN_FACTOR,                  // rational     | system
+    ANDROID_SENSOR_BLACK_LEVEL_PATTERN,               // int32[]      | public
+    ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,            // int32        | public
+    ANDROID_SENSOR_ORIENTATION,                       // int32        | public
+    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS,    // int32[]      | system
+    ANDROID_SENSOR_TIMESTAMP,                         // int64        | public
+    ANDROID_SENSOR_TEMPERATURE,                       // float        | system
+    ANDROID_SENSOR_NEUTRAL_COLOR_POINT,               // rational[]   | public
+    ANDROID_SENSOR_NOISE_PROFILE,                     // double[]     | public
+    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,               // float[]      | system
+    ANDROID_SENSOR_PROFILE_TONE_CURVE,                // float[]      | system
+    ANDROID_SENSOR_GREEN_SPLIT,                       // float        | public
+    ANDROID_SENSOR_TEST_PATTERN_DATA,                 // int32[]      | public
+    ANDROID_SENSOR_TEST_PATTERN_MODE,                 // enum         | public
+    ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,      // int32[]      | public
+    ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,              // int64        | public
+    ANDROID_SENSOR_END,
+
+    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE =           // int32[]      | public
+            ANDROID_SENSOR_INFO_START,
+    ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,            // int32[]      | public
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,     // enum         | public
+    ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,          // int64[]      | public
+    ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,           // int64        | public
+    ANDROID_SENSOR_INFO_PHYSICAL_SIZE,                // float[]      | public
+    ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,             // int32[]      | public
+    ANDROID_SENSOR_INFO_WHITE_LEVEL,                  // int32        | public
+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,             // enum         | public
+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED,         // enum         | public
+    ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+                                                      // int32[]      | public
+    ANDROID_SENSOR_INFO_END,
+
+    ANDROID_SHADING_MODE =                            // enum         | public
+            ANDROID_SHADING_START,
+    ANDROID_SHADING_STRENGTH,                         // byte         | system
+    ANDROID_SHADING_AVAILABLE_MODES,                  // byte[]       | public
+    ANDROID_SHADING_END,
+
+    ANDROID_STATISTICS_FACE_DETECT_MODE =             // enum         | public
+            ANDROID_STATISTICS_START,
+    ANDROID_STATISTICS_HISTOGRAM_MODE,                // enum         | system
+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE,            // enum         | system
+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,            // enum         | public
+    ANDROID_STATISTICS_FACE_IDS,                      // int32[]      | hidden
+    ANDROID_STATISTICS_FACE_LANDMARKS,                // int32[]      | hidden
+    ANDROID_STATISTICS_FACE_RECTANGLES,               // int32[]      | hidden
+    ANDROID_STATISTICS_FACE_SCORES,                   // byte[]       | hidden
+    ANDROID_STATISTICS_HISTOGRAM,                     // int32[]      | system
+    ANDROID_STATISTICS_SHARPNESS_MAP,                 // int32[]      | system
+    ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP,   // byte         | public
+    ANDROID_STATISTICS_LENS_SHADING_MAP,              // float[]      | hidden
+    ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,         // float[]      | hidden
+    ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,     // rational[]   | hidden
+    ANDROID_STATISTICS_SCENE_FLICKER,                 // enum         | public
+    ANDROID_STATISTICS_HOT_PIXEL_MAP,                 // int32[]      | public
+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,         // enum         | public
+    ANDROID_STATISTICS_END,
+
+    ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = 
+                                                      // byte[]       | public
+            ANDROID_STATISTICS_INFO_START,
+    ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,   // int32        | system
+    ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,           // int32        | public
+    ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,      // int32        | system
+    ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,  // int32        | system
+    ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,       // int32[]      | system
+    ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
+                                                      // byte[]       | public
+    ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
+                                                      // byte[]       | public
+    ANDROID_STATISTICS_INFO_END,
+
+    ANDROID_TONEMAP_CURVE_BLUE =                      // float[]      | hidden
+            ANDROID_TONEMAP_START,
+    ANDROID_TONEMAP_CURVE_GREEN,                      // float[]      | hidden
+    ANDROID_TONEMAP_CURVE_RED,                        // float[]      | hidden
+    ANDROID_TONEMAP_MODE,                             // enum         | public
+    ANDROID_TONEMAP_MAX_CURVE_POINTS,                 // int32        | public
+    ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,         // byte[]       | public
+    ANDROID_TONEMAP_GAMMA,                            // float        | public
+    ANDROID_TONEMAP_PRESET_CURVE,                     // enum         | public
+    ANDROID_TONEMAP_END,
+
+    ANDROID_LED_TRANSMIT =                            // enum         | hidden
+            ANDROID_LED_START,
+    ANDROID_LED_AVAILABLE_LEDS,                       // enum[]       | hidden
+    ANDROID_LED_END,
+
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL =           // enum         | public
+            ANDROID_INFO_START,
+    ANDROID_INFO_END,
+
+    ANDROID_BLACK_LEVEL_LOCK =                        // enum         | public
+            ANDROID_BLACK_LEVEL_START,
+    ANDROID_BLACK_LEVEL_END,
+
+    ANDROID_SYNC_FRAME_NUMBER =                       // enum         | hidden
+            ANDROID_SYNC_START,
+    ANDROID_SYNC_MAX_LATENCY,                         // enum         | public
+    ANDROID_SYNC_END,
+
+    ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =     // float        | public
+            ANDROID_REPROCESS_START,
+    ANDROID_REPROCESS_MAX_CAPTURE_STALL,              // int32        | public
+    ANDROID_REPROCESS_END,
+
+    ANDROID_DEPTH_MAX_DEPTH_SAMPLES =                 // int32        | system
+            ANDROID_DEPTH_START,
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
+                                                      // enum[]       | hidden
+    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,// int64[]      | hidden
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,    // int64[]      | hidden
+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,                 // enum         | public
+    ANDROID_DEPTH_END,
+
+} camera_metadata_tag_t;
+
+/**
+ * Enumeration definitions for the various entries that need them
+ */
+
+// ANDROID_COLOR_CORRECTION_MODE
+typedef enum camera_metadata_enum_android_color_correction_mode {
+    ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX,
+    ANDROID_COLOR_CORRECTION_MODE_FAST,
+    ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY,
+} camera_metadata_enum_android_color_correction_mode_t;
+
+// ANDROID_COLOR_CORRECTION_ABERRATION_MODE
+typedef enum camera_metadata_enum_android_color_correction_aberration_mode {
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
+} camera_metadata_enum_android_color_correction_aberration_mode_t;
+
+
+// ANDROID_CONTROL_AE_ANTIBANDING_MODE
+typedef enum camera_metadata_enum_android_control_ae_antibanding_mode {
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
+} camera_metadata_enum_android_control_ae_antibanding_mode_t;
+
+// ANDROID_CONTROL_AE_LOCK
+typedef enum camera_metadata_enum_android_control_ae_lock {
+    ANDROID_CONTROL_AE_LOCK_OFF,
+    ANDROID_CONTROL_AE_LOCK_ON,
+} camera_metadata_enum_android_control_ae_lock_t;
+
+// ANDROID_CONTROL_AE_MODE
+typedef enum camera_metadata_enum_android_control_ae_mode {
+    ANDROID_CONTROL_AE_MODE_OFF,
+    ANDROID_CONTROL_AE_MODE_ON,
+    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,
+    ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,
+    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE,
+} camera_metadata_enum_android_control_ae_mode_t;
+
+// ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER
+typedef enum camera_metadata_enum_android_control_ae_precapture_trigger {
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE,
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START,
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL,
+} camera_metadata_enum_android_control_ae_precapture_trigger_t;
+
+// ANDROID_CONTROL_AF_MODE
+typedef enum camera_metadata_enum_android_control_af_mode {
+    ANDROID_CONTROL_AF_MODE_OFF,
+    ANDROID_CONTROL_AF_MODE_AUTO,
+    ANDROID_CONTROL_AF_MODE_MACRO,
+    ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
+    ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
+    ANDROID_CONTROL_AF_MODE_EDOF,
+} camera_metadata_enum_android_control_af_mode_t;
+
+// ANDROID_CONTROL_AF_TRIGGER
+typedef enum camera_metadata_enum_android_control_af_trigger {
+    ANDROID_CONTROL_AF_TRIGGER_IDLE,
+    ANDROID_CONTROL_AF_TRIGGER_START,
+    ANDROID_CONTROL_AF_TRIGGER_CANCEL,
+} camera_metadata_enum_android_control_af_trigger_t;
+
+// ANDROID_CONTROL_AWB_LOCK
+typedef enum camera_metadata_enum_android_control_awb_lock {
+    ANDROID_CONTROL_AWB_LOCK_OFF,
+    ANDROID_CONTROL_AWB_LOCK_ON,
+} camera_metadata_enum_android_control_awb_lock_t;
+
+// ANDROID_CONTROL_AWB_MODE
+typedef enum camera_metadata_enum_android_control_awb_mode {
+    ANDROID_CONTROL_AWB_MODE_OFF,
+    ANDROID_CONTROL_AWB_MODE_AUTO,
+    ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
+    ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
+    ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,
+    ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
+    ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT,
+    ANDROID_CONTROL_AWB_MODE_TWILIGHT,
+    ANDROID_CONTROL_AWB_MODE_SHADE,
+} camera_metadata_enum_android_control_awb_mode_t;
+
+// ANDROID_CONTROL_CAPTURE_INTENT
+typedef enum camera_metadata_enum_android_control_capture_intent {
+    ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM,
+    ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW,
+    ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE,
+    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD,
+    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT,
+    ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG,
+    ANDROID_CONTROL_CAPTURE_INTENT_MANUAL,
+} camera_metadata_enum_android_control_capture_intent_t;
+
+// ANDROID_CONTROL_EFFECT_MODE
+typedef enum camera_metadata_enum_android_control_effect_mode {
+    ANDROID_CONTROL_EFFECT_MODE_OFF,
+    ANDROID_CONTROL_EFFECT_MODE_MONO,
+    ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,
+    ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,
+    ANDROID_CONTROL_EFFECT_MODE_SEPIA,
+    ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,
+    ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD,
+    ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD,
+    ANDROID_CONTROL_EFFECT_MODE_AQUA,
+} camera_metadata_enum_android_control_effect_mode_t;
+
+// ANDROID_CONTROL_MODE
+typedef enum camera_metadata_enum_android_control_mode {
+    ANDROID_CONTROL_MODE_OFF,
+    ANDROID_CONTROL_MODE_AUTO,
+    ANDROID_CONTROL_MODE_USE_SCENE_MODE,
+    ANDROID_CONTROL_MODE_OFF_KEEP_STATE,
+} camera_metadata_enum_android_control_mode_t;
+
+// ANDROID_CONTROL_SCENE_MODE
+typedef enum camera_metadata_enum_android_control_scene_mode {
+    ANDROID_CONTROL_SCENE_MODE_DISABLED                         = 0,
+    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,
+    ANDROID_CONTROL_SCENE_MODE_ACTION,
+    ANDROID_CONTROL_SCENE_MODE_PORTRAIT,
+    ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,
+    ANDROID_CONTROL_SCENE_MODE_NIGHT,
+    ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT,
+    ANDROID_CONTROL_SCENE_MODE_THEATRE,
+    ANDROID_CONTROL_SCENE_MODE_BEACH,
+    ANDROID_CONTROL_SCENE_MODE_SNOW,
+    ANDROID_CONTROL_SCENE_MODE_SUNSET,
+    ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,
+    ANDROID_CONTROL_SCENE_MODE_FIREWORKS,
+    ANDROID_CONTROL_SCENE_MODE_SPORTS,
+    ANDROID_CONTROL_SCENE_MODE_PARTY,
+    ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,
+    ANDROID_CONTROL_SCENE_MODE_BARCODE,
+    ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO,
+    ANDROID_CONTROL_SCENE_MODE_HDR,
+    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT,
+} camera_metadata_enum_android_control_scene_mode_t;
+
+// ANDROID_CONTROL_VIDEO_STABILIZATION_MODE
+typedef enum camera_metadata_enum_android_control_video_stabilization_mode {
+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON,
+} camera_metadata_enum_android_control_video_stabilization_mode_t;
+
+// ANDROID_CONTROL_AE_STATE
+typedef enum camera_metadata_enum_android_control_ae_state {
+    ANDROID_CONTROL_AE_STATE_INACTIVE,
+    ANDROID_CONTROL_AE_STATE_SEARCHING,
+    ANDROID_CONTROL_AE_STATE_CONVERGED,
+    ANDROID_CONTROL_AE_STATE_LOCKED,
+    ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED,
+    ANDROID_CONTROL_AE_STATE_PRECAPTURE,
+} camera_metadata_enum_android_control_ae_state_t;
+
+// ANDROID_CONTROL_AF_STATE
+typedef enum camera_metadata_enum_android_control_af_state {
+    ANDROID_CONTROL_AF_STATE_INACTIVE,
+    ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN,
+    ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED,
+    ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN,
+    ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED,
+    ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED,
+    ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED,
+} camera_metadata_enum_android_control_af_state_t;
+
+// ANDROID_CONTROL_AWB_STATE
+typedef enum camera_metadata_enum_android_control_awb_state {
+    ANDROID_CONTROL_AWB_STATE_INACTIVE,
+    ANDROID_CONTROL_AWB_STATE_SEARCHING,
+    ANDROID_CONTROL_AWB_STATE_CONVERGED,
+    ANDROID_CONTROL_AWB_STATE_LOCKED,
+} camera_metadata_enum_android_control_awb_state_t;
+
+// ANDROID_CONTROL_AE_LOCK_AVAILABLE
+typedef enum camera_metadata_enum_android_control_ae_lock_available {
+    ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE,
+    ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE,
+} camera_metadata_enum_android_control_ae_lock_available_t;
+
+// ANDROID_CONTROL_AWB_LOCK_AVAILABLE
+typedef enum camera_metadata_enum_android_control_awb_lock_available {
+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE,
+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE,
+} camera_metadata_enum_android_control_awb_lock_available_t;
+
+
+// ANDROID_DEMOSAIC_MODE
+typedef enum camera_metadata_enum_android_demosaic_mode {
+    ANDROID_DEMOSAIC_MODE_FAST,
+    ANDROID_DEMOSAIC_MODE_HIGH_QUALITY,
+} camera_metadata_enum_android_demosaic_mode_t;
+
+
+// ANDROID_EDGE_MODE
+typedef enum camera_metadata_enum_android_edge_mode {
+    ANDROID_EDGE_MODE_OFF,
+    ANDROID_EDGE_MODE_FAST,
+    ANDROID_EDGE_MODE_HIGH_QUALITY,
+    ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG,
+} camera_metadata_enum_android_edge_mode_t;
+
+
+// ANDROID_FLASH_MODE
+typedef enum camera_metadata_enum_android_flash_mode {
+    ANDROID_FLASH_MODE_OFF,
+    ANDROID_FLASH_MODE_SINGLE,
+    ANDROID_FLASH_MODE_TORCH,
+} camera_metadata_enum_android_flash_mode_t;
+
+// ANDROID_FLASH_STATE
+typedef enum camera_metadata_enum_android_flash_state {
+    ANDROID_FLASH_STATE_UNAVAILABLE,
+    ANDROID_FLASH_STATE_CHARGING,
+    ANDROID_FLASH_STATE_READY,
+    ANDROID_FLASH_STATE_FIRED,
+    ANDROID_FLASH_STATE_PARTIAL,
+} camera_metadata_enum_android_flash_state_t;
+
+
+// ANDROID_FLASH_INFO_AVAILABLE
+typedef enum camera_metadata_enum_android_flash_info_available {
+    ANDROID_FLASH_INFO_AVAILABLE_FALSE,
+    ANDROID_FLASH_INFO_AVAILABLE_TRUE,
+} camera_metadata_enum_android_flash_info_available_t;
+
+
+// ANDROID_HOT_PIXEL_MODE
+typedef enum camera_metadata_enum_android_hot_pixel_mode {
+    ANDROID_HOT_PIXEL_MODE_OFF,
+    ANDROID_HOT_PIXEL_MODE_FAST,
+    ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY,
+} camera_metadata_enum_android_hot_pixel_mode_t;
+
+
+
+// ANDROID_LENS_OPTICAL_STABILIZATION_MODE
+typedef enum camera_metadata_enum_android_lens_optical_stabilization_mode {
+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON,
+} camera_metadata_enum_android_lens_optical_stabilization_mode_t;
+
+// ANDROID_LENS_FACING
+typedef enum camera_metadata_enum_android_lens_facing {
+    ANDROID_LENS_FACING_FRONT,
+    ANDROID_LENS_FACING_BACK,
+    ANDROID_LENS_FACING_EXTERNAL,
+} camera_metadata_enum_android_lens_facing_t;
+
+// ANDROID_LENS_STATE
+typedef enum camera_metadata_enum_android_lens_state {
+    ANDROID_LENS_STATE_STATIONARY,
+    ANDROID_LENS_STATE_MOVING,
+} camera_metadata_enum_android_lens_state_t;
+
+
+// ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+typedef enum camera_metadata_enum_android_lens_info_focus_distance_calibration {
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
+} camera_metadata_enum_android_lens_info_focus_distance_calibration_t;
+
+
+// ANDROID_NOISE_REDUCTION_MODE
+typedef enum camera_metadata_enum_android_noise_reduction_mode {
+    ANDROID_NOISE_REDUCTION_MODE_OFF,
+    ANDROID_NOISE_REDUCTION_MODE_FAST,
+    ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
+    ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
+    ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG,
+} camera_metadata_enum_android_noise_reduction_mode_t;
+
+
+// ANDROID_QUIRKS_PARTIAL_RESULT
+typedef enum camera_metadata_enum_android_quirks_partial_result {
+    ANDROID_QUIRKS_PARTIAL_RESULT_FINAL,
+    ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL,
+} camera_metadata_enum_android_quirks_partial_result_t;
+
+
+// ANDROID_REQUEST_METADATA_MODE
+typedef enum camera_metadata_enum_android_request_metadata_mode {
+    ANDROID_REQUEST_METADATA_MODE_NONE,
+    ANDROID_REQUEST_METADATA_MODE_FULL,
+} camera_metadata_enum_android_request_metadata_mode_t;
+
+// ANDROID_REQUEST_TYPE
+typedef enum camera_metadata_enum_android_request_type {
+    ANDROID_REQUEST_TYPE_CAPTURE,
+    ANDROID_REQUEST_TYPE_REPROCESS,
+} camera_metadata_enum_android_request_type_t;
+
+// ANDROID_REQUEST_AVAILABLE_CAPABILITIES
+typedef enum camera_metadata_enum_android_request_available_capabilities {
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO,
+} camera_metadata_enum_android_request_available_capabilities_t;
+
+
+// ANDROID_SCALER_AVAILABLE_FORMATS
+typedef enum camera_metadata_enum_android_scaler_available_formats {
+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW16                      = 0x20,
+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE                 = 0x24,
+    ANDROID_SCALER_AVAILABLE_FORMATS_YV12                       = 0x32315659,
+    ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP               = 0x11,
+    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED     = 0x22,
+    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888              = 0x23,
+    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB                       = 0x21,
+} camera_metadata_enum_android_scaler_available_formats_t;
+
+// ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+typedef enum camera_metadata_enum_android_scaler_available_stream_configurations {
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
+} camera_metadata_enum_android_scaler_available_stream_configurations_t;
+
+// ANDROID_SCALER_CROPPING_TYPE
+typedef enum camera_metadata_enum_android_scaler_cropping_type {
+    ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY,
+    ANDROID_SCALER_CROPPING_TYPE_FREEFORM,
+} camera_metadata_enum_android_scaler_cropping_type_t;
+
+
+// ANDROID_SENSOR_REFERENCE_ILLUMINANT1
+typedef enum camera_metadata_enum_android_sensor_reference_illuminant1 {
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT               = 1,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT            = 2,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN               = 3,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH                  = 4,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER           = 9,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER         = 10,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE                  = 11,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT   = 12,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT  = 13,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT      = 15,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A             = 17,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B             = 18,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C             = 19,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55                    = 20,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65                    = 21,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75                    = 22,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50                    = 23,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN    = 24,
+} camera_metadata_enum_android_sensor_reference_illuminant1_t;
+
+// ANDROID_SENSOR_TEST_PATTERN_MODE
+typedef enum camera_metadata_enum_android_sensor_test_pattern_mode {
+    ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
+    ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,
+    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,
+    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY,
+    ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,
+    ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1                    = 256,
+} camera_metadata_enum_android_sensor_test_pattern_mode_t;
+
+
+// ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+typedef enum camera_metadata_enum_android_sensor_info_color_filter_arrangement {
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB,
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG,
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG,
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR,
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB,
+} camera_metadata_enum_android_sensor_info_color_filter_arrangement_t;
+
+// ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE
+typedef enum camera_metadata_enum_android_sensor_info_timestamp_source {
+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN,
+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME,
+} camera_metadata_enum_android_sensor_info_timestamp_source_t;
+
+// ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED
+typedef enum camera_metadata_enum_android_sensor_info_lens_shading_applied {
+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE,
+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE,
+} camera_metadata_enum_android_sensor_info_lens_shading_applied_t;
+
+
+// ANDROID_SHADING_MODE
+typedef enum camera_metadata_enum_android_shading_mode {
+    ANDROID_SHADING_MODE_OFF,
+    ANDROID_SHADING_MODE_FAST,
+    ANDROID_SHADING_MODE_HIGH_QUALITY,
+} camera_metadata_enum_android_shading_mode_t;
+
+
+// ANDROID_STATISTICS_FACE_DETECT_MODE
+typedef enum camera_metadata_enum_android_statistics_face_detect_mode {
+    ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
+    ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
+    ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,
+} camera_metadata_enum_android_statistics_face_detect_mode_t;
+
+// ANDROID_STATISTICS_HISTOGRAM_MODE
+typedef enum camera_metadata_enum_android_statistics_histogram_mode {
+    ANDROID_STATISTICS_HISTOGRAM_MODE_OFF,
+    ANDROID_STATISTICS_HISTOGRAM_MODE_ON,
+} camera_metadata_enum_android_statistics_histogram_mode_t;
+
+// ANDROID_STATISTICS_SHARPNESS_MAP_MODE
+typedef enum camera_metadata_enum_android_statistics_sharpness_map_mode {
+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF,
+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON,
+} camera_metadata_enum_android_statistics_sharpness_map_mode_t;
+
+// ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE
+typedef enum camera_metadata_enum_android_statistics_hot_pixel_map_mode {
+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF,
+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON,
+} camera_metadata_enum_android_statistics_hot_pixel_map_mode_t;
+
+// ANDROID_STATISTICS_SCENE_FLICKER
+typedef enum camera_metadata_enum_android_statistics_scene_flicker {
+    ANDROID_STATISTICS_SCENE_FLICKER_NONE,
+    ANDROID_STATISTICS_SCENE_FLICKER_50HZ,
+    ANDROID_STATISTICS_SCENE_FLICKER_60HZ,
+} camera_metadata_enum_android_statistics_scene_flicker_t;
+
+// ANDROID_STATISTICS_LENS_SHADING_MAP_MODE
+typedef enum camera_metadata_enum_android_statistics_lens_shading_map_mode {
+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON,
+} camera_metadata_enum_android_statistics_lens_shading_map_mode_t;
+
+
+
+// ANDROID_TONEMAP_MODE
+typedef enum camera_metadata_enum_android_tonemap_mode {
+    ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
+    ANDROID_TONEMAP_MODE_FAST,
+    ANDROID_TONEMAP_MODE_HIGH_QUALITY,
+    ANDROID_TONEMAP_MODE_GAMMA_VALUE,
+    ANDROID_TONEMAP_MODE_PRESET_CURVE,
+} camera_metadata_enum_android_tonemap_mode_t;
+
+// ANDROID_TONEMAP_PRESET_CURVE
+typedef enum camera_metadata_enum_android_tonemap_preset_curve {
+    ANDROID_TONEMAP_PRESET_CURVE_SRGB,
+    ANDROID_TONEMAP_PRESET_CURVE_REC709,
+} camera_metadata_enum_android_tonemap_preset_curve_t;
+
+
+// ANDROID_LED_TRANSMIT
+typedef enum camera_metadata_enum_android_led_transmit {
+    ANDROID_LED_TRANSMIT_OFF,
+    ANDROID_LED_TRANSMIT_ON,
+} camera_metadata_enum_android_led_transmit_t;
+
+// ANDROID_LED_AVAILABLE_LEDS
+typedef enum camera_metadata_enum_android_led_available_leds {
+    ANDROID_LED_AVAILABLE_LEDS_TRANSMIT,
+} camera_metadata_enum_android_led_available_leds_t;
+
+
+// ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL
+typedef enum camera_metadata_enum_android_info_supported_hardware_level {
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
+} camera_metadata_enum_android_info_supported_hardware_level_t;
+
+
+// ANDROID_BLACK_LEVEL_LOCK
+typedef enum camera_metadata_enum_android_black_level_lock {
+    ANDROID_BLACK_LEVEL_LOCK_OFF,
+    ANDROID_BLACK_LEVEL_LOCK_ON,
+} camera_metadata_enum_android_black_level_lock_t;
+
+
+// ANDROID_SYNC_FRAME_NUMBER
+typedef enum camera_metadata_enum_android_sync_frame_number {
+    ANDROID_SYNC_FRAME_NUMBER_CONVERGING                        = -1,
+    ANDROID_SYNC_FRAME_NUMBER_UNKNOWN                           = -2,
+} camera_metadata_enum_android_sync_frame_number_t;
+
+// ANDROID_SYNC_MAX_LATENCY
+typedef enum camera_metadata_enum_android_sync_max_latency {
+    ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL                  = 0,
+    ANDROID_SYNC_MAX_LATENCY_UNKNOWN                            = -1,
+} camera_metadata_enum_android_sync_max_latency_t;
+
+
+
+// ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS
+typedef enum camera_metadata_enum_android_depth_available_depth_stream_configurations {
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT,
+} camera_metadata_enum_android_depth_available_depth_stream_configurations_t;
+
+// ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
+typedef enum camera_metadata_enum_android_depth_depth_is_exclusive {
+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE,
+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE,
+} camera_metadata_enum_android_depth_depth_is_exclusive_t;
+
+
diff --git a/media/camera/include/system/camera_vendor_tags.h b/media/camera/include/system/camera_vendor_tags.h
new file mode 100644
index 0000000..57cba49
--- /dev/null
+++ b/media/camera/include/system/camera_vendor_tags.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H
+#define SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define CAMERA_METADATA_VENDOR_TAG_BOUNDARY 0x80000000u
+
+/**
+ * Vendor tags:
+ *
+ * This structure contains basic functions for enumerating an immutable set of
+ * vendor-defined camera metadata tags, and querying static information about
+ * their structure/type.  The intended use of this information is to validate
+ * the structure of metadata returned by the camera HAL, and to allow vendor-
+ * defined metadata tags to be visible in application facing camera API.
+ */
+typedef struct vendor_tag_ops vendor_tag_ops_t;
+struct vendor_tag_ops {
+    /**
+     * Get the number of vendor tags supported on this platform. Used to
+     * calculate the size of buffer needed for holding the array of all tags
+     * returned by get_all_tags().  This must return -1 on error.
+     */
+    int (*get_tag_count)(const vendor_tag_ops_t *v);
+
+    /**
+     * Fill an array with all of the supported vendor tags on this platform.
+     * get_tag_count() must return the number of tags supported, and
+     * tag_array will be allocated with enough space to hold the number of tags
+     * returned by get_tag_count().
+     */
+    void (*get_all_tags)(const vendor_tag_ops_t *v, uint32_t *tag_array);
+
+    /**
+     * Get the vendor section name for a vendor-specified entry tag. This will
+     * only be called for vendor-defined tags.
+     *
+     * The naming convention for the vendor-specific section names should
+     * follow a style similar to the Java package style.  For example,
+     * CameraZoom Inc. must prefix their sections with "com.camerazoom."
+     * This must return NULL if the tag is outside the bounds of
+     * vendor-defined sections.
+     *
+     * There may be different vendor-defined tag sections, for example the
+     * phone maker, the chipset maker, and the camera module maker may each
+     * have their own "com.vendor."-prefixed section.
+     *
+     * The memory pointed to by the return value must remain valid for the
+     * lifetime of the module, and is owned by the module.
+     */
+    const char *(*get_section_name)(const vendor_tag_ops_t *v, uint32_t tag);
+
+    /**
+     * Get the tag name for a vendor-specified entry tag. This is only called
+     * for vendor-defined tags, and must return NULL if it is not a
+     * vendor-defined tag.
+     *
+     * The memory pointed to by the return value must remain valid for the
+     * lifetime of the module, and is owned by the module.
+     */
+    const char *(*get_tag_name)(const vendor_tag_ops_t *v, uint32_t tag);
+
+    /**
+     * Get tag type for a vendor-specified entry tag. The type returned must be
+     * a valid type defined in camera_metadata.h.  This method is only called
+     * for tags >= CAMERA_METADATA_VENDOR_TAG_BOUNDARY, and must return
+     * -1 if the tag is outside the bounds of the vendor-defined sections.
+     */
+    int (*get_tag_type)(const vendor_tag_ops_t *v, uint32_t tag);
+
+    /* Reserved for future use.  These must be initialized to NULL. */
+    void* reserved[8];
+};
+
+#ifdef __cplusplus
+} /* extern "C" */
+#endif
+
+#endif /* SYSTEM_MEDIA_INCLUDE_ANDROID_CAMERA_VENDOR_TAGS_H */
+
diff --git a/media/camera/src/Android.mk b/media/camera/src/Android.mk
new file mode 100644
index 0000000..77f9f3e
--- /dev/null
+++ b/media/camera/src/Android.mk
@@ -0,0 +1,32 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+	camera_metadata.c
+
+LOCAL_C_INCLUDES:= \
+	system/media/camera/include \
+	system/media/private/camera/include
+
+LOCAL_SHARED_LIBRARIES := \
+	libcutils \
+	liblog
+
+LOCAL_MODULE := libcamera_metadata
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS += \
+	-Wall \
+	-Wno-unused-parameter \
+	-fvisibility=hidden \
+	-std=c99
+
+ifneq ($(filter userdebug eng,$(TARGET_BUILD_VARIANT)),)
+    # Enable assert()
+    LOCAL_CFLAGS += -UNDEBUG -DLOG_NDEBUG=1
+endif
+
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/../include
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/camera/src/camera_metadata.c b/media/camera/src/camera_metadata.c
new file mode 100644
index 0000000..3efc605
--- /dev/null
+++ b/media/camera/src/camera_metadata.c
@@ -0,0 +1,999 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+#include <system/camera_metadata.h>
+#include <camera_metadata_hidden.h>
+
+#define LOG_TAG "camera_metadata"
+#include <cutils/log.h>
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <errno.h>
+
+#define OK         0
+#define ERROR      1
+#define NOT_FOUND (-ENOENT)
+
+#define ALIGN_TO(val, alignment) \
+    (((uintptr_t)(val) + ((alignment) - 1)) & ~((alignment) - 1))
+
+/**
+ * A single metadata entry, storing an array of values of a given type. If the
+ * array is no larger than 4 bytes in size, it is stored in the data.value[]
+ * array; otherwise, it can found in the parent's data array at index
+ * data.offset.
+ */
+#define ENTRY_ALIGNMENT ((size_t) 4)
+typedef struct camera_metadata_buffer_entry {
+    uint32_t tag;
+    uint32_t count;
+    union {
+        uint32_t offset;
+        uint8_t  value[4];
+    } data;
+    uint8_t  type;
+    uint8_t  reserved[3];
+} camera_metadata_buffer_entry_t;
+
+typedef uint32_t metadata_uptrdiff_t;
+typedef uint32_t metadata_size_t;
+
+/**
+ * A packet of metadata. This is a list of entries, each of which may point to
+ * its values stored at an offset in data.
+ *
+ * It is assumed by the utility functions that the memory layout of the packet
+ * is as follows:
+ *
+ *   |-----------------------------------------------|
+ *   | camera_metadata_t                             |
+ *   |                                               |
+ *   |-----------------------------------------------|
+ *   | reserved for future expansion                 |
+ *   |-----------------------------------------------|
+ *   | camera_metadata_buffer_entry_t #0             |
+ *   |-----------------------------------------------|
+ *   | ....                                          |
+ *   |-----------------------------------------------|
+ *   | camera_metadata_buffer_entry_t #entry_count-1 |
+ *   |-----------------------------------------------|
+ *   | free space for                                |
+ *   | (entry_capacity-entry_count) entries          |
+ *   |-----------------------------------------------|
+ *   | start of camera_metadata.data                 |
+ *   |                                               |
+ *   |-----------------------------------------------|
+ *   | free space for                                |
+ *   | (data_capacity-data_count) bytes              |
+ *   |-----------------------------------------------|
+ *
+ * With the total length of the whole packet being camera_metadata.size bytes.
+ *
+ * In short, the entries and data are contiguous in memory after the metadata
+ * header.
+ */
+#define METADATA_ALIGNMENT ((size_t) 4)
+struct camera_metadata {
+    metadata_size_t          size;
+    uint32_t                 version;
+    uint32_t                 flags;
+    metadata_size_t          entry_count;
+    metadata_size_t          entry_capacity;
+    metadata_uptrdiff_t      entries_start; // Offset from camera_metadata
+    metadata_size_t          data_count;
+    metadata_size_t          data_capacity;
+    metadata_uptrdiff_t      data_start; // Offset from camera_metadata
+    uint8_t                  reserved[];
+};
+
+/**
+ * A datum of metadata. This corresponds to camera_metadata_entry_t::data
+ * with the difference that each element is not a pointer. We need to have a
+ * non-pointer type description in order to figure out the largest alignment
+ * requirement for data (DATA_ALIGNMENT).
+ */
+#define DATA_ALIGNMENT ((size_t) 8)
+typedef union camera_metadata_data {
+    uint8_t u8;
+    int32_t i32;
+    float   f;
+    int64_t i64;
+    double  d;
+    camera_metadata_rational_t r;
+} camera_metadata_data_t;
+
+/**
+ * The preferred alignment of a packet of camera metadata. In general,
+ * this is the lowest common multiple of the constituents of a metadata
+ * package, i.e, of DATA_ALIGNMENT and ENTRY_ALIGNMENT.
+ */
+#define MAX_ALIGNMENT(A, B) (((A) > (B)) ? (A) : (B))
+#define METADATA_PACKET_ALIGNMENT \
+    MAX_ALIGNMENT(MAX_ALIGNMENT(DATA_ALIGNMENT, METADATA_ALIGNMENT), ENTRY_ALIGNMENT);
+
+/** Versioning information */
+#define CURRENT_METADATA_VERSION 1
+
+/** Flag definitions */
+#define FLAG_SORTED 0x00000001
+
+/** Tag information */
+
+typedef struct tag_info {
+    const char *tag_name;
+    uint8_t     tag_type;
+} tag_info_t;
+
+#include "camera_metadata_tag_info.c"
+
+const size_t camera_metadata_type_size[NUM_TYPES] = {
+    [TYPE_BYTE]     = sizeof(uint8_t),
+    [TYPE_INT32]    = sizeof(int32_t),
+    [TYPE_FLOAT]    = sizeof(float),
+    [TYPE_INT64]    = sizeof(int64_t),
+    [TYPE_DOUBLE]   = sizeof(double),
+    [TYPE_RATIONAL] = sizeof(camera_metadata_rational_t)
+};
+
+const char *camera_metadata_type_names[NUM_TYPES] = {
+    [TYPE_BYTE]     = "byte",
+    [TYPE_INT32]    = "int32",
+    [TYPE_FLOAT]    = "float",
+    [TYPE_INT64]    = "int64",
+    [TYPE_DOUBLE]   = "double",
+    [TYPE_RATIONAL] = "rational"
+};
+
+static camera_metadata_buffer_entry_t *get_entries(
+        const camera_metadata_t *metadata) {
+    return (camera_metadata_buffer_entry_t*)
+            ((uint8_t*)metadata + metadata->entries_start);
+}
+
+static uint8_t *get_data(const camera_metadata_t *metadata) {
+    return (uint8_t*)metadata + metadata->data_start;
+}
+
+size_t get_camera_metadata_alignment() {
+    return METADATA_PACKET_ALIGNMENT;
+}
+
+camera_metadata_t *allocate_copy_camera_metadata_checked(
+        const camera_metadata_t *src,
+        size_t src_size) {
+
+    if (src == NULL) {
+        return NULL;
+    }
+
+    void *buffer = malloc(src_size);
+    memcpy(buffer, src, src_size);
+
+    camera_metadata_t *metadata = (camera_metadata_t*) buffer;
+    if (validate_camera_metadata_structure(metadata, &src_size) != OK) {
+        free(buffer);
+        return NULL;
+    }
+
+    return metadata;
+}
+
+camera_metadata_t *allocate_camera_metadata(size_t entry_capacity,
+                                            size_t data_capacity) {
+
+    size_t memory_needed = calculate_camera_metadata_size(entry_capacity,
+                                                          data_capacity);
+    void *buffer = malloc(memory_needed);
+    camera_metadata_t *metadata = place_camera_metadata(
+        buffer, memory_needed, entry_capacity, data_capacity);
+    if (!metadata) {
+        /* This should not happen when memory_needed is the same
+         * calculated in this function and in place_camera_metadata.
+         */
+        free(buffer);
+    }
+    return metadata;
+}
+
+camera_metadata_t *place_camera_metadata(void *dst,
+                                         size_t dst_size,
+                                         size_t entry_capacity,
+                                         size_t data_capacity) {
+    if (dst == NULL) return NULL;
+
+    size_t memory_needed = calculate_camera_metadata_size(entry_capacity,
+                                                          data_capacity);
+    if (memory_needed > dst_size) return NULL;
+
+    camera_metadata_t *metadata = (camera_metadata_t*)dst;
+    metadata->version = CURRENT_METADATA_VERSION;
+    metadata->flags = 0;
+    metadata->entry_count = 0;
+    metadata->entry_capacity = entry_capacity;
+    metadata->entries_start =
+            ALIGN_TO(sizeof(camera_metadata_t), ENTRY_ALIGNMENT);
+    metadata->data_count = 0;
+    metadata->data_capacity = data_capacity;
+    metadata->size = memory_needed;
+    size_t data_unaligned = (uint8_t*)(get_entries(metadata) +
+            metadata->entry_capacity) - (uint8_t*)metadata;
+    metadata->data_start = ALIGN_TO(data_unaligned, DATA_ALIGNMENT);
+
+    assert(validate_camera_metadata_structure(metadata, NULL) == OK);
+    return metadata;
+}
+void free_camera_metadata(camera_metadata_t *metadata) {
+    free(metadata);
+}
+
+size_t calculate_camera_metadata_size(size_t entry_count,
+                                      size_t data_count) {
+    size_t memory_needed = sizeof(camera_metadata_t);
+    // Start entry list at aligned boundary
+    memory_needed = ALIGN_TO(memory_needed, ENTRY_ALIGNMENT);
+    memory_needed += sizeof(camera_metadata_buffer_entry_t[entry_count]);
+    // Start buffer list at aligned boundary
+    memory_needed = ALIGN_TO(memory_needed, DATA_ALIGNMENT);
+    memory_needed += sizeof(uint8_t[data_count]);
+    return memory_needed;
+}
+
+size_t get_camera_metadata_size(const camera_metadata_t *metadata) {
+    if (metadata == NULL) return ERROR;
+
+    return metadata->size;
+}
+
+size_t get_camera_metadata_compact_size(const camera_metadata_t *metadata) {
+    if (metadata == NULL) return ERROR;
+
+    return calculate_camera_metadata_size(metadata->entry_count,
+                                          metadata->data_count);
+}
+
+size_t get_camera_metadata_entry_count(const camera_metadata_t *metadata) {
+    return metadata->entry_count;
+}
+
+size_t get_camera_metadata_entry_capacity(const camera_metadata_t *metadata) {
+    return metadata->entry_capacity;
+}
+
+size_t get_camera_metadata_data_count(const camera_metadata_t *metadata) {
+    return metadata->data_count;
+}
+
+size_t get_camera_metadata_data_capacity(const camera_metadata_t *metadata) {
+    return metadata->data_capacity;
+}
+
+camera_metadata_t* copy_camera_metadata(void *dst, size_t dst_size,
+        const camera_metadata_t *src) {
+    size_t memory_needed = get_camera_metadata_compact_size(src);
+
+    if (dst == NULL) return NULL;
+    if (dst_size < memory_needed) return NULL;
+
+    camera_metadata_t *metadata =
+        place_camera_metadata(dst, dst_size, src->entry_count, src->data_count);
+
+    metadata->flags = src->flags;
+    metadata->entry_count = src->entry_count;
+    metadata->data_count = src->data_count;
+
+    memcpy(get_entries(metadata), get_entries(src),
+            sizeof(camera_metadata_buffer_entry_t[metadata->entry_count]));
+    memcpy(get_data(metadata), get_data(src),
+            sizeof(uint8_t[metadata->data_count]));
+
+    assert(validate_camera_metadata_structure(metadata, NULL) == OK);
+    return metadata;
+}
+
+int validate_camera_metadata_structure(const camera_metadata_t *metadata,
+                                       const size_t *expected_size) {
+
+    if (metadata == NULL) {
+        ALOGE("%s: metadata is null!", __FUNCTION__);
+        return ERROR;
+    }
+
+    // Check that the metadata pointer is well-aligned first.
+    {
+        static const struct {
+            const char *name;
+            size_t alignment;
+        } alignments[] = {
+            {
+                .name = "camera_metadata",
+                .alignment = METADATA_ALIGNMENT
+            },
+            {
+                .name = "camera_metadata_buffer_entry",
+                .alignment = ENTRY_ALIGNMENT
+            },
+            {
+                .name = "camera_metadata_data",
+                .alignment = DATA_ALIGNMENT
+            },
+        };
+
+        for (size_t i = 0; i < sizeof(alignments)/sizeof(alignments[0]); ++i) {
+            uintptr_t aligned_ptr = ALIGN_TO(metadata, alignments[i].alignment);
+
+            if ((uintptr_t)metadata != aligned_ptr) {
+                ALOGE("%s: Metadata pointer is not aligned (actual %p, "
+                      "expected %p) to type %s",
+                      __FUNCTION__, metadata,
+                      (void*)aligned_ptr, alignments[i].name);
+                return ERROR;
+            }
+        }
+    }
+
+    /**
+     * Check that the metadata contents are correct
+     */
+
+    if (expected_size != NULL && metadata->size > *expected_size) {
+        ALOGE("%s: Metadata size (%" PRIu32 ") should be <= expected size (%zu)",
+              __FUNCTION__, metadata->size, *expected_size);
+        return ERROR;
+    }
+
+    if (metadata->entry_count > metadata->entry_capacity) {
+        ALOGE("%s: Entry count (%" PRIu32 ") should be <= entry capacity "
+              "(%" PRIu32 ")",
+              __FUNCTION__, metadata->entry_count, metadata->entry_capacity);
+        return ERROR;
+    }
+
+    const metadata_uptrdiff_t entries_end =
+        metadata->entries_start + metadata->entry_capacity;
+    if (entries_end < metadata->entries_start || // overflow check
+        entries_end > metadata->data_start) {
+
+        ALOGE("%s: Entry start + capacity (%" PRIu32 ") should be <= data start "
+              "(%" PRIu32 ")",
+               __FUNCTION__,
+              (metadata->entries_start + metadata->entry_capacity),
+              metadata->data_start);
+        return ERROR;
+    }
+
+    const metadata_uptrdiff_t data_end =
+        metadata->data_start + metadata->data_capacity;
+    if (data_end < metadata->data_start || // overflow check
+        data_end > metadata->size) {
+
+        ALOGE("%s: Data start + capacity (%" PRIu32 ") should be <= total size "
+              "(%" PRIu32 ")",
+               __FUNCTION__,
+              (metadata->data_start + metadata->data_capacity),
+              metadata->size);
+        return ERROR;
+    }
+
+    // Validate each entry
+    const metadata_size_t entry_count = metadata->entry_count;
+    camera_metadata_buffer_entry_t *entries = get_entries(metadata);
+
+    for (size_t i = 0; i < entry_count; ++i) {
+
+        if ((uintptr_t)&entries[i] != ALIGN_TO(&entries[i], ENTRY_ALIGNMENT)) {
+            ALOGE("%s: Entry index %zu had bad alignment (address %p),"
+                  " expected alignment %zu",
+                  __FUNCTION__, i, &entries[i], ENTRY_ALIGNMENT);
+            return ERROR;
+        }
+
+        camera_metadata_buffer_entry_t entry = entries[i];
+
+        if (entry.type >= NUM_TYPES) {
+            ALOGE("%s: Entry index %zu had a bad type %d",
+                  __FUNCTION__, i, entry.type);
+            return ERROR;
+        }
+
+        // TODO: fix vendor_tag_ops across processes so we don't need to special
+        //       case vendor-specific tags
+        uint32_t tag_section = entry.tag >> 16;
+        int tag_type = get_camera_metadata_tag_type(entry.tag);
+        if (tag_type != (int)entry.type && tag_section < VENDOR_SECTION) {
+            ALOGE("%s: Entry index %zu had tag type %d, but the type was %d",
+                  __FUNCTION__, i, tag_type, entry.type);
+            return ERROR;
+        }
+
+        size_t data_size =
+                calculate_camera_metadata_entry_data_size(entry.type,
+                                                          entry.count);
+
+        if (data_size != 0) {
+            camera_metadata_data_t *data =
+                    (camera_metadata_data_t*) (get_data(metadata) +
+                                               entry.data.offset);
+
+            if ((uintptr_t)data != ALIGN_TO(data, DATA_ALIGNMENT)) {
+                ALOGE("%s: Entry index %zu had bad data alignment (address %p),"
+                      " expected align %zu, (tag name %s, data size %zu)",
+                      __FUNCTION__, i, data, DATA_ALIGNMENT,
+                      get_camera_metadata_tag_name(entry.tag) ?: "unknown",
+                      data_size);
+                return ERROR;
+            }
+
+            size_t data_entry_end = entry.data.offset + data_size;
+            if (data_entry_end < entry.data.offset || // overflow check
+                data_entry_end > metadata->data_capacity) {
+
+                ALOGE("%s: Entry index %zu data ends (%zu) beyond the capacity "
+                      "%" PRIu32, __FUNCTION__, i, data_entry_end,
+                      metadata->data_capacity);
+                return ERROR;
+            }
+
+        } else if (entry.count == 0) {
+            if (entry.data.offset != 0) {
+                ALOGE("%s: Entry index %zu had 0 items, but offset was non-0 "
+                     "(%" PRIu32 "), tag name: %s", __FUNCTION__, i, entry.data.offset,
+                        get_camera_metadata_tag_name(entry.tag) ?: "unknown");
+                return ERROR;
+            }
+        } // else data stored inline, so we look at value which can be anything.
+    }
+
+    return OK;
+}
+
+int append_camera_metadata(camera_metadata_t *dst,
+        const camera_metadata_t *src) {
+    if (dst == NULL || src == NULL ) return ERROR;
+
+    if (dst->entry_capacity < src->entry_count + dst->entry_count) return ERROR;
+    if (dst->data_capacity < src->data_count + dst->data_count) return ERROR;
+
+    memcpy(get_entries(dst) + dst->entry_count, get_entries(src),
+            sizeof(camera_metadata_buffer_entry_t[src->entry_count]));
+    memcpy(get_data(dst) + dst->data_count, get_data(src),
+            sizeof(uint8_t[src->data_count]));
+    if (dst->data_count != 0) {
+        camera_metadata_buffer_entry_t *entry = get_entries(dst) + dst->entry_count;
+        for (size_t i = 0; i < src->entry_count; i++, entry++) {
+            if ( calculate_camera_metadata_entry_data_size(entry->type,
+                            entry->count) > 0 ) {
+                entry->data.offset += dst->data_count;
+            }
+        }
+    }
+    if (dst->entry_count == 0) {
+        // Appending onto empty buffer, keep sorted state
+        dst->flags |= src->flags & FLAG_SORTED;
+    } else if (src->entry_count != 0) {
+        // Both src, dst are nonempty, cannot assume sort remains
+        dst->flags &= ~FLAG_SORTED;
+    } else {
+        // Src is empty, keep dst sorted state
+    }
+    dst->entry_count += src->entry_count;
+    dst->data_count += src->data_count;
+
+    assert(validate_camera_metadata_structure(dst, NULL) == OK);
+    return OK;
+}
+
+camera_metadata_t *clone_camera_metadata(const camera_metadata_t *src) {
+    int res;
+    if (src == NULL) return NULL;
+    camera_metadata_t *clone = allocate_camera_metadata(
+        get_camera_metadata_entry_count(src),
+        get_camera_metadata_data_count(src));
+    if (clone != NULL) {
+        res = append_camera_metadata(clone, src);
+        if (res != OK) {
+            free_camera_metadata(clone);
+            clone = NULL;
+        }
+    }
+    assert(validate_camera_metadata_structure(clone, NULL) == OK);
+    return clone;
+}
+
+size_t calculate_camera_metadata_entry_data_size(uint8_t type,
+        size_t data_count) {
+    if (type >= NUM_TYPES) return 0;
+    size_t data_bytes = data_count *
+            camera_metadata_type_size[type];
+    return data_bytes <= 4 ? 0 : ALIGN_TO(data_bytes, DATA_ALIGNMENT);
+}
+
+static int add_camera_metadata_entry_raw(camera_metadata_t *dst,
+        uint32_t tag,
+        uint8_t  type,
+        const void *data,
+        size_t data_count) {
+
+    if (dst == NULL) return ERROR;
+    if (dst->entry_count == dst->entry_capacity) return ERROR;
+    if (data_count && data == NULL) return ERROR;
+
+    size_t data_bytes =
+            calculate_camera_metadata_entry_data_size(type, data_count);
+    if (data_bytes + dst->data_count > dst->data_capacity) return ERROR;
+
+    size_t data_payload_bytes =
+            data_count * camera_metadata_type_size[type];
+    camera_metadata_buffer_entry_t *entry = get_entries(dst) + dst->entry_count;
+    memset(entry, 0, sizeof(camera_metadata_buffer_entry_t));
+    entry->tag = tag;
+    entry->type = type;
+    entry->count = data_count;
+
+    if (data_bytes == 0) {
+        memcpy(entry->data.value, data,
+                data_payload_bytes);
+    } else {
+        entry->data.offset = dst->data_count;
+        memcpy(get_data(dst) + entry->data.offset, data,
+                data_payload_bytes);
+        dst->data_count += data_bytes;
+    }
+    dst->entry_count++;
+    dst->flags &= ~FLAG_SORTED;
+    assert(validate_camera_metadata_structure(dst, NULL) == OK);
+    return OK;
+}
+
+int add_camera_metadata_entry(camera_metadata_t *dst,
+        uint32_t tag,
+        const void *data,
+        size_t data_count) {
+
+    int type = get_camera_metadata_tag_type(tag);
+    if (type == -1) {
+        ALOGE("%s: Unknown tag %04x.", __FUNCTION__, tag);
+        return ERROR;
+    }
+
+    return add_camera_metadata_entry_raw(dst,
+            tag,
+            type,
+            data,
+            data_count);
+}
+
+static int compare_entry_tags(const void *p1, const void *p2) {
+    uint32_t tag1 = ((camera_metadata_buffer_entry_t*)p1)->tag;
+    uint32_t tag2 = ((camera_metadata_buffer_entry_t*)p2)->tag;
+    return  tag1 < tag2 ? -1 :
+            tag1 == tag2 ? 0 :
+            1;
+}
+
+int sort_camera_metadata(camera_metadata_t *dst) {
+    if (dst == NULL) return ERROR;
+    if (dst->flags & FLAG_SORTED) return OK;
+
+    qsort(get_entries(dst), dst->entry_count,
+            sizeof(camera_metadata_buffer_entry_t),
+            compare_entry_tags);
+    dst->flags |= FLAG_SORTED;
+
+    assert(validate_camera_metadata_structure(dst, NULL) == OK);
+    return OK;
+}
+
+int get_camera_metadata_entry(camera_metadata_t *src,
+        size_t index,
+        camera_metadata_entry_t *entry) {
+    if (src == NULL || entry == NULL) return ERROR;
+    if (index >= src->entry_count) return ERROR;
+
+    camera_metadata_buffer_entry_t *buffer_entry = get_entries(src) + index;
+
+    entry->index = index;
+    entry->tag = buffer_entry->tag;
+    entry->type = buffer_entry->type;
+    entry->count = buffer_entry->count;
+    if (buffer_entry->count *
+            camera_metadata_type_size[buffer_entry->type] > 4) {
+        entry->data.u8 = get_data(src) + buffer_entry->data.offset;
+    } else {
+        entry->data.u8 = buffer_entry->data.value;
+    }
+    return OK;
+}
+
+int get_camera_metadata_ro_entry(const camera_metadata_t *src,
+        size_t index,
+        camera_metadata_ro_entry_t *entry) {
+    return get_camera_metadata_entry((camera_metadata_t*)src, index,
+            (camera_metadata_entry_t*)entry);
+}
+
+int find_camera_metadata_entry(camera_metadata_t *src,
+        uint32_t tag,
+        camera_metadata_entry_t *entry) {
+    if (src == NULL) return ERROR;
+
+    uint32_t index;
+    if (src->flags & FLAG_SORTED) {
+        // Sorted entries, do a binary search
+        camera_metadata_buffer_entry_t *search_entry = NULL;
+        camera_metadata_buffer_entry_t key;
+        key.tag = tag;
+        search_entry = bsearch(&key,
+                get_entries(src),
+                src->entry_count,
+                sizeof(camera_metadata_buffer_entry_t),
+                compare_entry_tags);
+        if (search_entry == NULL) return NOT_FOUND;
+        index = search_entry - get_entries(src);
+    } else {
+        // Not sorted, linear search
+        camera_metadata_buffer_entry_t *search_entry = get_entries(src);
+        for (index = 0; index < src->entry_count; index++, search_entry++) {
+            if (search_entry->tag == tag) {
+                break;
+            }
+        }
+        if (index == src->entry_count) return NOT_FOUND;
+    }
+
+    return get_camera_metadata_entry(src, index,
+            entry);
+}
+
+int find_camera_metadata_ro_entry(const camera_metadata_t *src,
+        uint32_t tag,
+        camera_metadata_ro_entry_t *entry) {
+    return find_camera_metadata_entry((camera_metadata_t*)src, tag,
+            (camera_metadata_entry_t*)entry);
+}
+
+
+int delete_camera_metadata_entry(camera_metadata_t *dst,
+        size_t index) {
+    if (dst == NULL) return ERROR;
+    if (index >= dst->entry_count) return ERROR;
+
+    camera_metadata_buffer_entry_t *entry = get_entries(dst) + index;
+    size_t data_bytes = calculate_camera_metadata_entry_data_size(entry->type,
+            entry->count);
+
+    if (data_bytes > 0) {
+        // Shift data buffer to overwrite deleted data
+        uint8_t *start = get_data(dst) + entry->data.offset;
+        uint8_t *end = start + data_bytes;
+        size_t length = dst->data_count - entry->data.offset - data_bytes;
+        memmove(start, end, length);
+
+        // Update all entry indices to account for shift
+        camera_metadata_buffer_entry_t *e = get_entries(dst);
+        size_t i;
+        for (i = 0; i < dst->entry_count; i++) {
+            if (calculate_camera_metadata_entry_data_size(
+                    e->type, e->count) > 0 &&
+                    e->data.offset > entry->data.offset) {
+                e->data.offset -= data_bytes;
+            }
+            ++e;
+        }
+        dst->data_count -= data_bytes;
+    }
+    // Shift entry array
+    memmove(entry, entry + 1,
+            sizeof(camera_metadata_buffer_entry_t) *
+            (dst->entry_count - index - 1) );
+    dst->entry_count -= 1;
+
+    assert(validate_camera_metadata_structure(dst, NULL) == OK);
+    return OK;
+}
+
+int update_camera_metadata_entry(camera_metadata_t *dst,
+        size_t index,
+        const void *data,
+        size_t data_count,
+        camera_metadata_entry_t *updated_entry) {
+    if (dst == NULL) return ERROR;
+    if (index >= dst->entry_count) return ERROR;
+
+    camera_metadata_buffer_entry_t *entry = get_entries(dst) + index;
+
+    size_t data_bytes =
+            calculate_camera_metadata_entry_data_size(entry->type,
+                    data_count);
+    size_t data_payload_bytes =
+            data_count * camera_metadata_type_size[entry->type];
+
+    size_t entry_bytes =
+            calculate_camera_metadata_entry_data_size(entry->type,
+                    entry->count);
+    if (data_bytes != entry_bytes) {
+        // May need to shift/add to data array
+        if (dst->data_capacity < dst->data_count + data_bytes - entry_bytes) {
+            // No room
+            return ERROR;
+        }
+        if (entry_bytes != 0) {
+            // Remove old data
+            uint8_t *start = get_data(dst) + entry->data.offset;
+            uint8_t *end = start + entry_bytes;
+            size_t length = dst->data_count - entry->data.offset - entry_bytes;
+            memmove(start, end, length);
+            dst->data_count -= entry_bytes;
+
+            // Update all entry indices to account for shift
+            camera_metadata_buffer_entry_t *e = get_entries(dst);
+            size_t i;
+            for (i = 0; i < dst->entry_count; i++) {
+                if (calculate_camera_metadata_entry_data_size(
+                        e->type, e->count) > 0 &&
+                        e->data.offset > entry->data.offset) {
+                    e->data.offset -= entry_bytes;
+                }
+                ++e;
+            }
+        }
+
+        if (data_bytes != 0) {
+            // Append new data
+            entry->data.offset = dst->data_count;
+
+            memcpy(get_data(dst) + entry->data.offset, data, data_payload_bytes);
+            dst->data_count += data_bytes;
+        }
+    } else if (data_bytes != 0) {
+        // data size unchanged, reuse same data location
+        memcpy(get_data(dst) + entry->data.offset, data, data_payload_bytes);
+    }
+
+    if (data_bytes == 0) {
+        // Data fits into entry
+        memcpy(entry->data.value, data,
+                data_payload_bytes);
+    }
+
+    entry->count = data_count;
+
+    if (updated_entry != NULL) {
+        get_camera_metadata_entry(dst,
+                index,
+                updated_entry);
+    }
+
+    assert(validate_camera_metadata_structure(dst, NULL) == OK);
+    return OK;
+}
+
+static const vendor_tag_ops_t *vendor_tag_ops = NULL;
+
+const char *get_camera_metadata_section_name(uint32_t tag) {
+    uint32_t tag_section = tag >> 16;
+    if (tag_section >= VENDOR_SECTION && vendor_tag_ops != NULL) {
+        return vendor_tag_ops->get_section_name(
+            vendor_tag_ops,
+            tag);
+    }
+    if (tag_section >= ANDROID_SECTION_COUNT) {
+        return NULL;
+    }
+    return camera_metadata_section_names[tag_section];
+}
+
+const char *get_camera_metadata_tag_name(uint32_t tag) {
+    uint32_t tag_section = tag >> 16;
+    if (tag_section >= VENDOR_SECTION && vendor_tag_ops != NULL) {
+        return vendor_tag_ops->get_tag_name(
+            vendor_tag_ops,
+            tag);
+    }
+    if (tag_section >= ANDROID_SECTION_COUNT ||
+        tag >= camera_metadata_section_bounds[tag_section][1] ) {
+        return NULL;
+    }
+    uint32_t tag_index = tag & 0xFFFF;
+    return tag_info[tag_section][tag_index].tag_name;
+}
+
+int get_camera_metadata_tag_type(uint32_t tag) {
+    uint32_t tag_section = tag >> 16;
+    if (tag_section >= VENDOR_SECTION && vendor_tag_ops != NULL) {
+        return vendor_tag_ops->get_tag_type(
+            vendor_tag_ops,
+            tag);
+    }
+    if (tag_section >= ANDROID_SECTION_COUNT ||
+            tag >= camera_metadata_section_bounds[tag_section][1] ) {
+        return -1;
+    }
+    uint32_t tag_index = tag & 0xFFFF;
+    return tag_info[tag_section][tag_index].tag_type;
+}
+
+int set_camera_metadata_vendor_tag_ops(const vendor_tag_query_ops_t* ops) {
+    // **DEPRECATED**
+    ALOGE("%s: This function has been deprecated", __FUNCTION__);
+    return ERROR;
+}
+
+// Declared in system/media/private/camera/include/camera_metadata_hidden.h
+int set_camera_metadata_vendor_ops(const vendor_tag_ops_t* ops) {
+    vendor_tag_ops = ops;
+    return OK;
+}
+
+static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag, int type,
+        int count,
+        int indentation);
+
+void dump_camera_metadata(const camera_metadata_t *metadata,
+        int fd,
+        int verbosity) {
+    dump_indented_camera_metadata(metadata, fd, verbosity, 0);
+}
+
+void dump_indented_camera_metadata(const camera_metadata_t *metadata,
+        int fd,
+        int verbosity,
+        int indentation) {
+    if (metadata == NULL) {
+        dprintf(fd, "%*sDumping camera metadata array: Not allocated\n",
+                indentation, "");
+        return;
+    }
+    unsigned int i;
+    dprintf(fd,
+            "%*sDumping camera metadata array: %" PRIu32 " / %" PRIu32 " entries, "
+            "%" PRIu32 " / %" PRIu32 " bytes of extra data.\n", indentation, "",
+            metadata->entry_count, metadata->entry_capacity,
+            metadata->data_count, metadata->data_capacity);
+    dprintf(fd, "%*sVersion: %d, Flags: %08x\n",
+            indentation + 2, "",
+            metadata->version, metadata->flags);
+    camera_metadata_buffer_entry_t *entry = get_entries(metadata);
+    for (i=0; i < metadata->entry_count; i++, entry++) {
+
+        const char *tag_name, *tag_section;
+        tag_section = get_camera_metadata_section_name(entry->tag);
+        if (tag_section == NULL) {
+            tag_section = "unknownSection";
+        }
+        tag_name = get_camera_metadata_tag_name(entry->tag);
+        if (tag_name == NULL) {
+            tag_name = "unknownTag";
+        }
+        const char *type_name;
+        if (entry->type >= NUM_TYPES) {
+            type_name = "unknown";
+        } else {
+            type_name = camera_metadata_type_names[entry->type];
+        }
+        dprintf(fd, "%*s%s.%s (%05x): %s[%" PRIu32 "]\n",
+             indentation + 2, "",
+             tag_section,
+             tag_name,
+             entry->tag,
+             type_name,
+             entry->count);
+
+        if (verbosity < 1) continue;
+
+        if (entry->type >= NUM_TYPES) continue;
+
+        size_t type_size = camera_metadata_type_size[entry->type];
+        uint8_t *data_ptr;
+        if ( type_size * entry->count > 4 ) {
+            if (entry->data.offset >= metadata->data_count) {
+                ALOGE("%s: Malformed entry data offset: %" PRIu32 " (max %" PRIu32 ")",
+                        __FUNCTION__,
+                        entry->data.offset,
+                        metadata->data_count);
+                continue;
+            }
+            data_ptr = get_data(metadata) + entry->data.offset;
+        } else {
+            data_ptr = entry->data.value;
+        }
+        int count = entry->count;
+        if (verbosity < 2 && count > 16) count = 16;
+
+        print_data(fd, data_ptr, entry->tag, entry->type, count, indentation);
+    }
+}
+
+static void print_data(int fd, const uint8_t *data_ptr, uint32_t tag,
+        int type, int count, int indentation) {
+    static int values_per_line[NUM_TYPES] = {
+        [TYPE_BYTE]     = 16,
+        [TYPE_INT32]    = 4,
+        [TYPE_FLOAT]    = 8,
+        [TYPE_INT64]    = 2,
+        [TYPE_DOUBLE]   = 4,
+        [TYPE_RATIONAL] = 2,
+    };
+    size_t type_size = camera_metadata_type_size[type];
+    char value_string_tmp[CAMERA_METADATA_ENUM_STRING_MAX_SIZE];
+    uint32_t value;
+
+    int lines = count / values_per_line[type];
+    if (count % values_per_line[type] != 0) lines++;
+
+    int index = 0;
+    int j, k;
+    for (j = 0; j < lines; j++) {
+        dprintf(fd, "%*s[", indentation + 4, "");
+        for (k = 0;
+             k < values_per_line[type] && count > 0;
+             k++, count--, index += type_size) {
+
+            switch (type) {
+                case TYPE_BYTE:
+                    value = *(data_ptr + index);
+                    if (camera_metadata_enum_snprint(tag,
+                                                     value,
+                                                     value_string_tmp,
+                                                     sizeof(value_string_tmp))
+                        == OK) {
+                        dprintf(fd, "%s ", value_string_tmp);
+                    } else {
+                        dprintf(fd, "%hhu ",
+                                *(data_ptr + index));
+                    }
+                    break;
+                case TYPE_INT32:
+                    value =
+                            *(int32_t*)(data_ptr + index);
+                    if (camera_metadata_enum_snprint(tag,
+                                                     value,
+                                                     value_string_tmp,
+                                                     sizeof(value_string_tmp))
+                        == OK) {
+                        dprintf(fd, "%s ", value_string_tmp);
+                    } else {
+                        dprintf(fd, "%" PRId32 " ",
+                                *(int32_t*)(data_ptr + index));
+                    }
+                    break;
+                case TYPE_FLOAT:
+                    dprintf(fd, "%0.8f ",
+                            *(float*)(data_ptr + index));
+                    break;
+                case TYPE_INT64:
+                    dprintf(fd, "%" PRId64 " ",
+                            *(int64_t*)(data_ptr + index));
+                    break;
+                case TYPE_DOUBLE:
+                    dprintf(fd, "%0.8f ",
+                            *(double*)(data_ptr + index));
+                    break;
+                case TYPE_RATIONAL: {
+                    int32_t numerator = *(int32_t*)(data_ptr + index);
+                    int32_t denominator = *(int32_t*)(data_ptr + index + 4);
+                    dprintf(fd, "(%d / %d) ",
+                            numerator, denominator);
+                    break;
+                }
+                default:
+                    dprintf(fd, "??? ");
+            }
+        }
+        dprintf(fd, "]\n");
+    }
+}
diff --git a/media/camera/src/camera_metadata_tag_info.c b/media/camera/src/camera_metadata_tag_info.c
new file mode 100644
index 0000000..a267191
--- /dev/null
+++ b/media/camera/src/camera_metadata_tag_info.c
@@ -0,0 +1,2568 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * !! Do not reference this file directly !!
+ *
+ * It is logically a part of camera_metadata.c.  It is broken out for ease of
+ * maintaining the tag info.
+ *
+ * Array assignments are done using specified-index syntax to keep things in
+ * sync with camera_metadata_tags.h
+ */
+
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from camera_metadata_tag_info.mako
+ */
+
+const char *camera_metadata_section_names[ANDROID_SECTION_COUNT] = {
+    [ANDROID_COLOR_CORRECTION]     = "android.colorCorrection",
+    [ANDROID_CONTROL]              = "android.control",
+    [ANDROID_DEMOSAIC]             = "android.demosaic",
+    [ANDROID_EDGE]                 = "android.edge",
+    [ANDROID_FLASH]                = "android.flash",
+    [ANDROID_FLASH_INFO]           = "android.flash.info",
+    [ANDROID_HOT_PIXEL]            = "android.hotPixel",
+    [ANDROID_JPEG]                 = "android.jpeg",
+    [ANDROID_LENS]                 = "android.lens",
+    [ANDROID_LENS_INFO]            = "android.lens.info",
+    [ANDROID_NOISE_REDUCTION]      = "android.noiseReduction",
+    [ANDROID_QUIRKS]               = "android.quirks",
+    [ANDROID_REQUEST]              = "android.request",
+    [ANDROID_SCALER]               = "android.scaler",
+    [ANDROID_SENSOR]               = "android.sensor",
+    [ANDROID_SENSOR_INFO]          = "android.sensor.info",
+    [ANDROID_SHADING]              = "android.shading",
+    [ANDROID_STATISTICS]           = "android.statistics",
+    [ANDROID_STATISTICS_INFO]      = "android.statistics.info",
+    [ANDROID_TONEMAP]              = "android.tonemap",
+    [ANDROID_LED]                  = "android.led",
+    [ANDROID_INFO]                 = "android.info",
+    [ANDROID_BLACK_LEVEL]          = "android.blackLevel",
+    [ANDROID_SYNC]                 = "android.sync",
+    [ANDROID_REPROCESS]            = "android.reprocess",
+    [ANDROID_DEPTH]                = "android.depth",
+};
+
+unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2] = {
+    [ANDROID_COLOR_CORRECTION]     = { ANDROID_COLOR_CORRECTION_START,
+                                       ANDROID_COLOR_CORRECTION_END },
+    [ANDROID_CONTROL]              = { ANDROID_CONTROL_START,
+                                       ANDROID_CONTROL_END },
+    [ANDROID_DEMOSAIC]             = { ANDROID_DEMOSAIC_START,
+                                       ANDROID_DEMOSAIC_END },
+    [ANDROID_EDGE]                 = { ANDROID_EDGE_START,
+                                       ANDROID_EDGE_END },
+    [ANDROID_FLASH]                = { ANDROID_FLASH_START,
+                                       ANDROID_FLASH_END },
+    [ANDROID_FLASH_INFO]           = { ANDROID_FLASH_INFO_START,
+                                       ANDROID_FLASH_INFO_END },
+    [ANDROID_HOT_PIXEL]            = { ANDROID_HOT_PIXEL_START,
+                                       ANDROID_HOT_PIXEL_END },
+    [ANDROID_JPEG]                 = { ANDROID_JPEG_START,
+                                       ANDROID_JPEG_END },
+    [ANDROID_LENS]                 = { ANDROID_LENS_START,
+                                       ANDROID_LENS_END },
+    [ANDROID_LENS_INFO]            = { ANDROID_LENS_INFO_START,
+                                       ANDROID_LENS_INFO_END },
+    [ANDROID_NOISE_REDUCTION]      = { ANDROID_NOISE_REDUCTION_START,
+                                       ANDROID_NOISE_REDUCTION_END },
+    [ANDROID_QUIRKS]               = { ANDROID_QUIRKS_START,
+                                       ANDROID_QUIRKS_END },
+    [ANDROID_REQUEST]              = { ANDROID_REQUEST_START,
+                                       ANDROID_REQUEST_END },
+    [ANDROID_SCALER]               = { ANDROID_SCALER_START,
+                                       ANDROID_SCALER_END },
+    [ANDROID_SENSOR]               = { ANDROID_SENSOR_START,
+                                       ANDROID_SENSOR_END },
+    [ANDROID_SENSOR_INFO]          = { ANDROID_SENSOR_INFO_START,
+                                       ANDROID_SENSOR_INFO_END },
+    [ANDROID_SHADING]              = { ANDROID_SHADING_START,
+                                       ANDROID_SHADING_END },
+    [ANDROID_STATISTICS]           = { ANDROID_STATISTICS_START,
+                                       ANDROID_STATISTICS_END },
+    [ANDROID_STATISTICS_INFO]      = { ANDROID_STATISTICS_INFO_START,
+                                       ANDROID_STATISTICS_INFO_END },
+    [ANDROID_TONEMAP]              = { ANDROID_TONEMAP_START,
+                                       ANDROID_TONEMAP_END },
+    [ANDROID_LED]                  = { ANDROID_LED_START,
+                                       ANDROID_LED_END },
+    [ANDROID_INFO]                 = { ANDROID_INFO_START,
+                                       ANDROID_INFO_END },
+    [ANDROID_BLACK_LEVEL]          = { ANDROID_BLACK_LEVEL_START,
+                                       ANDROID_BLACK_LEVEL_END },
+    [ANDROID_SYNC]                 = { ANDROID_SYNC_START,
+                                       ANDROID_SYNC_END },
+    [ANDROID_REPROCESS]            = { ANDROID_REPROCESS_START,
+                                       ANDROID_REPROCESS_END },
+    [ANDROID_DEPTH]                = { ANDROID_DEPTH_START,
+                                       ANDROID_DEPTH_END },
+};
+
+static tag_info_t android_color_correction[ANDROID_COLOR_CORRECTION_END -
+        ANDROID_COLOR_CORRECTION_START] = {
+    [ ANDROID_COLOR_CORRECTION_MODE - ANDROID_COLOR_CORRECTION_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_COLOR_CORRECTION_TRANSFORM - ANDROID_COLOR_CORRECTION_START ] =
+    { "transform",                     TYPE_RATIONAL
+                },
+    [ ANDROID_COLOR_CORRECTION_GAINS - ANDROID_COLOR_CORRECTION_START ] =
+    { "gains",                         TYPE_FLOAT  },
+    [ ANDROID_COLOR_CORRECTION_ABERRATION_MODE - ANDROID_COLOR_CORRECTION_START ] =
+    { "aberrationMode",                TYPE_BYTE   },
+    [ ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES - ANDROID_COLOR_CORRECTION_START ] =
+    { "availableAberrationModes",      TYPE_BYTE   },
+};
+
+static tag_info_t android_control[ANDROID_CONTROL_END -
+        ANDROID_CONTROL_START] = {
+    [ ANDROID_CONTROL_AE_ANTIBANDING_MODE - ANDROID_CONTROL_START ] =
+    { "aeAntibandingMode",             TYPE_BYTE   },
+    [ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION - ANDROID_CONTROL_START ] =
+    { "aeExposureCompensation",        TYPE_INT32  },
+    [ ANDROID_CONTROL_AE_LOCK - ANDROID_CONTROL_START ] =
+    { "aeLock",                        TYPE_BYTE   },
+    [ ANDROID_CONTROL_AE_MODE - ANDROID_CONTROL_START ] =
+    { "aeMode",                        TYPE_BYTE   },
+    [ ANDROID_CONTROL_AE_REGIONS - ANDROID_CONTROL_START ] =
+    { "aeRegions",                     TYPE_INT32  },
+    [ ANDROID_CONTROL_AE_TARGET_FPS_RANGE - ANDROID_CONTROL_START ] =
+    { "aeTargetFpsRange",              TYPE_INT32  },
+    [ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER - ANDROID_CONTROL_START ] =
+    { "aePrecaptureTrigger",           TYPE_BYTE   },
+    [ ANDROID_CONTROL_AF_MODE - ANDROID_CONTROL_START ] =
+    { "afMode",                        TYPE_BYTE   },
+    [ ANDROID_CONTROL_AF_REGIONS - ANDROID_CONTROL_START ] =
+    { "afRegions",                     TYPE_INT32  },
+    [ ANDROID_CONTROL_AF_TRIGGER - ANDROID_CONTROL_START ] =
+    { "afTrigger",                     TYPE_BYTE   },
+    [ ANDROID_CONTROL_AWB_LOCK - ANDROID_CONTROL_START ] =
+    { "awbLock",                       TYPE_BYTE   },
+    [ ANDROID_CONTROL_AWB_MODE - ANDROID_CONTROL_START ] =
+    { "awbMode",                       TYPE_BYTE   },
+    [ ANDROID_CONTROL_AWB_REGIONS - ANDROID_CONTROL_START ] =
+    { "awbRegions",                    TYPE_INT32  },
+    [ ANDROID_CONTROL_CAPTURE_INTENT - ANDROID_CONTROL_START ] =
+    { "captureIntent",                 TYPE_BYTE   },
+    [ ANDROID_CONTROL_EFFECT_MODE - ANDROID_CONTROL_START ] =
+    { "effectMode",                    TYPE_BYTE   },
+    [ ANDROID_CONTROL_MODE - ANDROID_CONTROL_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_CONTROL_SCENE_MODE - ANDROID_CONTROL_START ] =
+    { "sceneMode",                     TYPE_BYTE   },
+    [ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE - ANDROID_CONTROL_START ] =
+    { "videoStabilizationMode",        TYPE_BYTE   },
+    [ ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES - ANDROID_CONTROL_START ] =
+    { "aeAvailableAntibandingModes",   TYPE_BYTE   },
+    [ ANDROID_CONTROL_AE_AVAILABLE_MODES - ANDROID_CONTROL_START ] =
+    { "aeAvailableModes",              TYPE_BYTE   },
+    [ ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES - ANDROID_CONTROL_START ] =
+    { "aeAvailableTargetFpsRanges",    TYPE_INT32  },
+    [ ANDROID_CONTROL_AE_COMPENSATION_RANGE - ANDROID_CONTROL_START ] =
+    { "aeCompensationRange",           TYPE_INT32  },
+    [ ANDROID_CONTROL_AE_COMPENSATION_STEP - ANDROID_CONTROL_START ] =
+    { "aeCompensationStep",            TYPE_RATIONAL
+                },
+    [ ANDROID_CONTROL_AF_AVAILABLE_MODES - ANDROID_CONTROL_START ] =
+    { "afAvailableModes",              TYPE_BYTE   },
+    [ ANDROID_CONTROL_AVAILABLE_EFFECTS - ANDROID_CONTROL_START ] =
+    { "availableEffects",              TYPE_BYTE   },
+    [ ANDROID_CONTROL_AVAILABLE_SCENE_MODES - ANDROID_CONTROL_START ] =
+    { "availableSceneModes",           TYPE_BYTE   },
+    [ ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES - ANDROID_CONTROL_START ] =
+    { "availableVideoStabilizationModes",
+                                        TYPE_BYTE   },
+    [ ANDROID_CONTROL_AWB_AVAILABLE_MODES - ANDROID_CONTROL_START ] =
+    { "awbAvailableModes",             TYPE_BYTE   },
+    [ ANDROID_CONTROL_MAX_REGIONS - ANDROID_CONTROL_START ] =
+    { "maxRegions",                    TYPE_INT32  },
+    [ ANDROID_CONTROL_SCENE_MODE_OVERRIDES - ANDROID_CONTROL_START ] =
+    { "sceneModeOverrides",            TYPE_BYTE   },
+    [ ANDROID_CONTROL_AE_PRECAPTURE_ID - ANDROID_CONTROL_START ] =
+    { "aePrecaptureId",                TYPE_INT32  },
+    [ ANDROID_CONTROL_AE_STATE - ANDROID_CONTROL_START ] =
+    { "aeState",                       TYPE_BYTE   },
+    [ ANDROID_CONTROL_AF_STATE - ANDROID_CONTROL_START ] =
+    { "afState",                       TYPE_BYTE   },
+    [ ANDROID_CONTROL_AF_TRIGGER_ID - ANDROID_CONTROL_START ] =
+    { "afTriggerId",                   TYPE_INT32  },
+    [ ANDROID_CONTROL_AWB_STATE - ANDROID_CONTROL_START ] =
+    { "awbState",                      TYPE_BYTE   },
+    [ ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS - ANDROID_CONTROL_START ] =
+    { "availableHighSpeedVideoConfigurations",
+                                        TYPE_INT32  },
+    [ ANDROID_CONTROL_AE_LOCK_AVAILABLE - ANDROID_CONTROL_START ] =
+    { "aeLockAvailable",               TYPE_BYTE   },
+    [ ANDROID_CONTROL_AWB_LOCK_AVAILABLE - ANDROID_CONTROL_START ] =
+    { "awbLockAvailable",              TYPE_BYTE   },
+    [ ANDROID_CONTROL_AVAILABLE_MODES - ANDROID_CONTROL_START ] =
+    { "availableModes",                TYPE_BYTE   },
+};
+
+static tag_info_t android_demosaic[ANDROID_DEMOSAIC_END -
+        ANDROID_DEMOSAIC_START] = {
+    [ ANDROID_DEMOSAIC_MODE - ANDROID_DEMOSAIC_START ] =
+    { "mode",                          TYPE_BYTE   },
+};
+
+static tag_info_t android_edge[ANDROID_EDGE_END -
+        ANDROID_EDGE_START] = {
+    [ ANDROID_EDGE_MODE - ANDROID_EDGE_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_EDGE_STRENGTH - ANDROID_EDGE_START ] =
+    { "strength",                      TYPE_BYTE   },
+    [ ANDROID_EDGE_AVAILABLE_EDGE_MODES - ANDROID_EDGE_START ] =
+    { "availableEdgeModes",            TYPE_BYTE   },
+};
+
+static tag_info_t android_flash[ANDROID_FLASH_END -
+        ANDROID_FLASH_START] = {
+    [ ANDROID_FLASH_FIRING_POWER - ANDROID_FLASH_START ] =
+    { "firingPower",                   TYPE_BYTE   },
+    [ ANDROID_FLASH_FIRING_TIME - ANDROID_FLASH_START ] =
+    { "firingTime",                    TYPE_INT64  },
+    [ ANDROID_FLASH_MODE - ANDROID_FLASH_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_FLASH_COLOR_TEMPERATURE - ANDROID_FLASH_START ] =
+    { "colorTemperature",              TYPE_BYTE   },
+    [ ANDROID_FLASH_MAX_ENERGY - ANDROID_FLASH_START ] =
+    { "maxEnergy",                     TYPE_BYTE   },
+    [ ANDROID_FLASH_STATE - ANDROID_FLASH_START ] =
+    { "state",                         TYPE_BYTE   },
+};
+
+static tag_info_t android_flash_info[ANDROID_FLASH_INFO_END -
+        ANDROID_FLASH_INFO_START] = {
+    [ ANDROID_FLASH_INFO_AVAILABLE - ANDROID_FLASH_INFO_START ] =
+    { "available",                     TYPE_BYTE   },
+    [ ANDROID_FLASH_INFO_CHARGE_DURATION - ANDROID_FLASH_INFO_START ] =
+    { "chargeDuration",                TYPE_INT64  },
+};
+
+static tag_info_t android_hot_pixel[ANDROID_HOT_PIXEL_END -
+        ANDROID_HOT_PIXEL_START] = {
+    [ ANDROID_HOT_PIXEL_MODE - ANDROID_HOT_PIXEL_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES - ANDROID_HOT_PIXEL_START ] =
+    { "availableHotPixelModes",        TYPE_BYTE   },
+};
+
+static tag_info_t android_jpeg[ANDROID_JPEG_END -
+        ANDROID_JPEG_START] = {
+    [ ANDROID_JPEG_GPS_COORDINATES - ANDROID_JPEG_START ] =
+    { "gpsCoordinates",                TYPE_DOUBLE },
+    [ ANDROID_JPEG_GPS_PROCESSING_METHOD - ANDROID_JPEG_START ] =
+    { "gpsProcessingMethod",           TYPE_BYTE   },
+    [ ANDROID_JPEG_GPS_TIMESTAMP - ANDROID_JPEG_START ] =
+    { "gpsTimestamp",                  TYPE_INT64  },
+    [ ANDROID_JPEG_ORIENTATION - ANDROID_JPEG_START ] =
+    { "orientation",                   TYPE_INT32  },
+    [ ANDROID_JPEG_QUALITY - ANDROID_JPEG_START ] =
+    { "quality",                       TYPE_BYTE   },
+    [ ANDROID_JPEG_THUMBNAIL_QUALITY - ANDROID_JPEG_START ] =
+    { "thumbnailQuality",              TYPE_BYTE   },
+    [ ANDROID_JPEG_THUMBNAIL_SIZE - ANDROID_JPEG_START ] =
+    { "thumbnailSize",                 TYPE_INT32  },
+    [ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES - ANDROID_JPEG_START ] =
+    { "availableThumbnailSizes",       TYPE_INT32  },
+    [ ANDROID_JPEG_MAX_SIZE - ANDROID_JPEG_START ] =
+    { "maxSize",                       TYPE_INT32  },
+    [ ANDROID_JPEG_SIZE - ANDROID_JPEG_START ] =
+    { "size",                          TYPE_INT32  },
+};
+
+static tag_info_t android_lens[ANDROID_LENS_END -
+        ANDROID_LENS_START] = {
+    [ ANDROID_LENS_APERTURE - ANDROID_LENS_START ] =
+    { "aperture",                      TYPE_FLOAT  },
+    [ ANDROID_LENS_FILTER_DENSITY - ANDROID_LENS_START ] =
+    { "filterDensity",                 TYPE_FLOAT  },
+    [ ANDROID_LENS_FOCAL_LENGTH - ANDROID_LENS_START ] =
+    { "focalLength",                   TYPE_FLOAT  },
+    [ ANDROID_LENS_FOCUS_DISTANCE - ANDROID_LENS_START ] =
+    { "focusDistance",                 TYPE_FLOAT  },
+    [ ANDROID_LENS_OPTICAL_STABILIZATION_MODE - ANDROID_LENS_START ] =
+    { "opticalStabilizationMode",      TYPE_BYTE   },
+    [ ANDROID_LENS_FACING - ANDROID_LENS_START ] =
+    { "facing",                        TYPE_BYTE   },
+    [ ANDROID_LENS_POSE_ROTATION - ANDROID_LENS_START ] =
+    { "poseRotation",                  TYPE_FLOAT  },
+    [ ANDROID_LENS_POSE_TRANSLATION - ANDROID_LENS_START ] =
+    { "poseTranslation",               TYPE_FLOAT  },
+    [ ANDROID_LENS_FOCUS_RANGE - ANDROID_LENS_START ] =
+    { "focusRange",                    TYPE_FLOAT  },
+    [ ANDROID_LENS_STATE - ANDROID_LENS_START ] =
+    { "state",                         TYPE_BYTE   },
+    [ ANDROID_LENS_INTRINSIC_CALIBRATION - ANDROID_LENS_START ] =
+    { "intrinsicCalibration",          TYPE_FLOAT  },
+    [ ANDROID_LENS_RADIAL_DISTORTION - ANDROID_LENS_START ] =
+    { "radialDistortion",              TYPE_FLOAT  },
+};
+
+static tag_info_t android_lens_info[ANDROID_LENS_INFO_END -
+        ANDROID_LENS_INFO_START] = {
+    [ ANDROID_LENS_INFO_AVAILABLE_APERTURES - ANDROID_LENS_INFO_START ] =
+    { "availableApertures",            TYPE_FLOAT  },
+    [ ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES - ANDROID_LENS_INFO_START ] =
+    { "availableFilterDensities",      TYPE_FLOAT  },
+    [ ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS - ANDROID_LENS_INFO_START ] =
+    { "availableFocalLengths",         TYPE_FLOAT  },
+    [ ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION - ANDROID_LENS_INFO_START ] =
+    { "availableOpticalStabilization", TYPE_BYTE   },
+    [ ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE - ANDROID_LENS_INFO_START ] =
+    { "hyperfocalDistance",            TYPE_FLOAT  },
+    [ ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE - ANDROID_LENS_INFO_START ] =
+    { "minimumFocusDistance",          TYPE_FLOAT  },
+    [ ANDROID_LENS_INFO_SHADING_MAP_SIZE - ANDROID_LENS_INFO_START ] =
+    { "shadingMapSize",                TYPE_INT32  },
+    [ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION - ANDROID_LENS_INFO_START ] =
+    { "focusDistanceCalibration",      TYPE_BYTE   },
+};
+
+static tag_info_t android_noise_reduction[ANDROID_NOISE_REDUCTION_END -
+        ANDROID_NOISE_REDUCTION_START] = {
+    [ ANDROID_NOISE_REDUCTION_MODE - ANDROID_NOISE_REDUCTION_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_NOISE_REDUCTION_STRENGTH - ANDROID_NOISE_REDUCTION_START ] =
+    { "strength",                      TYPE_BYTE   },
+    [ ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES - ANDROID_NOISE_REDUCTION_START ] =
+    { "availableNoiseReductionModes",  TYPE_BYTE   },
+};
+
+static tag_info_t android_quirks[ANDROID_QUIRKS_END -
+        ANDROID_QUIRKS_START] = {
+    [ ANDROID_QUIRKS_METERING_CROP_REGION - ANDROID_QUIRKS_START ] =
+    { "meteringCropRegion",            TYPE_BYTE   },
+    [ ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO - ANDROID_QUIRKS_START ] =
+    { "triggerAfWithAuto",             TYPE_BYTE   },
+    [ ANDROID_QUIRKS_USE_ZSL_FORMAT - ANDROID_QUIRKS_START ] =
+    { "useZslFormat",                  TYPE_BYTE   },
+    [ ANDROID_QUIRKS_USE_PARTIAL_RESULT - ANDROID_QUIRKS_START ] =
+    { "usePartialResult",              TYPE_BYTE   },
+    [ ANDROID_QUIRKS_PARTIAL_RESULT - ANDROID_QUIRKS_START ] =
+    { "partialResult",                 TYPE_BYTE   },
+};
+
+static tag_info_t android_request[ANDROID_REQUEST_END -
+        ANDROID_REQUEST_START] = {
+    [ ANDROID_REQUEST_FRAME_COUNT - ANDROID_REQUEST_START ] =
+    { "frameCount",                    TYPE_INT32  },
+    [ ANDROID_REQUEST_ID - ANDROID_REQUEST_START ] =
+    { "id",                            TYPE_INT32  },
+    [ ANDROID_REQUEST_INPUT_STREAMS - ANDROID_REQUEST_START ] =
+    { "inputStreams",                  TYPE_INT32  },
+    [ ANDROID_REQUEST_METADATA_MODE - ANDROID_REQUEST_START ] =
+    { "metadataMode",                  TYPE_BYTE   },
+    [ ANDROID_REQUEST_OUTPUT_STREAMS - ANDROID_REQUEST_START ] =
+    { "outputStreams",                 TYPE_INT32  },
+    [ ANDROID_REQUEST_TYPE - ANDROID_REQUEST_START ] =
+    { "type",                          TYPE_BYTE   },
+    [ ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS - ANDROID_REQUEST_START ] =
+    { "maxNumOutputStreams",           TYPE_INT32  },
+    [ ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS - ANDROID_REQUEST_START ] =
+    { "maxNumReprocessStreams",        TYPE_INT32  },
+    [ ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS - ANDROID_REQUEST_START ] =
+    { "maxNumInputStreams",            TYPE_INT32  },
+    [ ANDROID_REQUEST_PIPELINE_DEPTH - ANDROID_REQUEST_START ] =
+    { "pipelineDepth",                 TYPE_BYTE   },
+    [ ANDROID_REQUEST_PIPELINE_MAX_DEPTH - ANDROID_REQUEST_START ] =
+    { "pipelineMaxDepth",              TYPE_BYTE   },
+    [ ANDROID_REQUEST_PARTIAL_RESULT_COUNT - ANDROID_REQUEST_START ] =
+    { "partialResultCount",            TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_CAPABILITIES - ANDROID_REQUEST_START ] =
+    { "availableCapabilities",         TYPE_BYTE   },
+    [ ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS - ANDROID_REQUEST_START ] =
+    { "availableRequestKeys",          TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_RESULT_KEYS - ANDROID_REQUEST_START ] =
+    { "availableResultKeys",           TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS - ANDROID_REQUEST_START ] =
+    { "availableCharacteristicsKeys",  TYPE_INT32  },
+};
+
+static tag_info_t android_scaler[ANDROID_SCALER_END -
+        ANDROID_SCALER_START] = {
+    [ ANDROID_SCALER_CROP_REGION - ANDROID_SCALER_START ] =
+    { "cropRegion",                    TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_FORMATS - ANDROID_SCALER_START ] =
+    { "availableFormats",              TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS - ANDROID_SCALER_START ] =
+    { "availableJpegMinDurations",     TYPE_INT64  },
+    [ ANDROID_SCALER_AVAILABLE_JPEG_SIZES - ANDROID_SCALER_START ] =
+    { "availableJpegSizes",            TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM - ANDROID_SCALER_START ] =
+    { "availableMaxDigitalZoom",       TYPE_FLOAT  },
+    [ ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS - ANDROID_SCALER_START ] =
+    { "availableProcessedMinDurations",
+                                        TYPE_INT64  },
+    [ ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES - ANDROID_SCALER_START ] =
+    { "availableProcessedSizes",       TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS - ANDROID_SCALER_START ] =
+    { "availableRawMinDurations",      TYPE_INT64  },
+    [ ANDROID_SCALER_AVAILABLE_RAW_SIZES - ANDROID_SCALER_START ] =
+    { "availableRawSizes",             TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP - ANDROID_SCALER_START ] =
+    { "availableInputOutputFormatsMap",
+                                        TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS - ANDROID_SCALER_START ] =
+    { "availableStreamConfigurations", TYPE_INT32  },
+    [ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS - ANDROID_SCALER_START ] =
+    { "availableMinFrameDurations",    TYPE_INT64  },
+    [ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS - ANDROID_SCALER_START ] =
+    { "availableStallDurations",       TYPE_INT64  },
+    [ ANDROID_SCALER_CROPPING_TYPE - ANDROID_SCALER_START ] =
+    { "croppingType",                  TYPE_BYTE   },
+};
+
+static tag_info_t android_sensor[ANDROID_SENSOR_END -
+        ANDROID_SENSOR_START] = {
+    [ ANDROID_SENSOR_EXPOSURE_TIME - ANDROID_SENSOR_START ] =
+    { "exposureTime",                  TYPE_INT64  },
+    [ ANDROID_SENSOR_FRAME_DURATION - ANDROID_SENSOR_START ] =
+    { "frameDuration",                 TYPE_INT64  },
+    [ ANDROID_SENSOR_SENSITIVITY - ANDROID_SENSOR_START ] =
+    { "sensitivity",                   TYPE_INT32  },
+    [ ANDROID_SENSOR_REFERENCE_ILLUMINANT1 - ANDROID_SENSOR_START ] =
+    { "referenceIlluminant1",          TYPE_BYTE   },
+    [ ANDROID_SENSOR_REFERENCE_ILLUMINANT2 - ANDROID_SENSOR_START ] =
+    { "referenceIlluminant2",          TYPE_BYTE   },
+    [ ANDROID_SENSOR_CALIBRATION_TRANSFORM1 - ANDROID_SENSOR_START ] =
+    { "calibrationTransform1",         TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_CALIBRATION_TRANSFORM2 - ANDROID_SENSOR_START ] =
+    { "calibrationTransform2",         TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_COLOR_TRANSFORM1 - ANDROID_SENSOR_START ] =
+    { "colorTransform1",               TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_COLOR_TRANSFORM2 - ANDROID_SENSOR_START ] =
+    { "colorTransform2",               TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_FORWARD_MATRIX1 - ANDROID_SENSOR_START ] =
+    { "forwardMatrix1",                TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_FORWARD_MATRIX2 - ANDROID_SENSOR_START ] =
+    { "forwardMatrix2",                TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_BASE_GAIN_FACTOR - ANDROID_SENSOR_START ] =
+    { "baseGainFactor",                TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_BLACK_LEVEL_PATTERN - ANDROID_SENSOR_START ] =
+    { "blackLevelPattern",             TYPE_INT32  },
+    [ ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY - ANDROID_SENSOR_START ] =
+    { "maxAnalogSensitivity",          TYPE_INT32  },
+    [ ANDROID_SENSOR_ORIENTATION - ANDROID_SENSOR_START ] =
+    { "orientation",                   TYPE_INT32  },
+    [ ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS - ANDROID_SENSOR_START ] =
+    { "profileHueSatMapDimensions",    TYPE_INT32  },
+    [ ANDROID_SENSOR_TIMESTAMP - ANDROID_SENSOR_START ] =
+    { "timestamp",                     TYPE_INT64  },
+    [ ANDROID_SENSOR_TEMPERATURE - ANDROID_SENSOR_START ] =
+    { "temperature",                   TYPE_FLOAT  },
+    [ ANDROID_SENSOR_NEUTRAL_COLOR_POINT - ANDROID_SENSOR_START ] =
+    { "neutralColorPoint",             TYPE_RATIONAL
+                },
+    [ ANDROID_SENSOR_NOISE_PROFILE - ANDROID_SENSOR_START ] =
+    { "noiseProfile",                  TYPE_DOUBLE },
+    [ ANDROID_SENSOR_PROFILE_HUE_SAT_MAP - ANDROID_SENSOR_START ] =
+    { "profileHueSatMap",              TYPE_FLOAT  },
+    [ ANDROID_SENSOR_PROFILE_TONE_CURVE - ANDROID_SENSOR_START ] =
+    { "profileToneCurve",              TYPE_FLOAT  },
+    [ ANDROID_SENSOR_GREEN_SPLIT - ANDROID_SENSOR_START ] =
+    { "greenSplit",                    TYPE_FLOAT  },
+    [ ANDROID_SENSOR_TEST_PATTERN_DATA - ANDROID_SENSOR_START ] =
+    { "testPatternData",               TYPE_INT32  },
+    [ ANDROID_SENSOR_TEST_PATTERN_MODE - ANDROID_SENSOR_START ] =
+    { "testPatternMode",               TYPE_INT32  },
+    [ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES - ANDROID_SENSOR_START ] =
+    { "availableTestPatternModes",     TYPE_INT32  },
+    [ ANDROID_SENSOR_ROLLING_SHUTTER_SKEW - ANDROID_SENSOR_START ] =
+    { "rollingShutterSkew",            TYPE_INT64  },
+};
+
+static tag_info_t android_sensor_info[ANDROID_SENSOR_INFO_END -
+        ANDROID_SENSOR_INFO_START] = {
+    [ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE - ANDROID_SENSOR_INFO_START ] =
+    { "activeArraySize",               TYPE_INT32  },
+    [ ANDROID_SENSOR_INFO_SENSITIVITY_RANGE - ANDROID_SENSOR_INFO_START ] =
+    { "sensitivityRange",              TYPE_INT32  },
+    [ ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT - ANDROID_SENSOR_INFO_START ] =
+    { "colorFilterArrangement",        TYPE_BYTE   },
+    [ ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE - ANDROID_SENSOR_INFO_START ] =
+    { "exposureTimeRange",             TYPE_INT64  },
+    [ ANDROID_SENSOR_INFO_MAX_FRAME_DURATION - ANDROID_SENSOR_INFO_START ] =
+    { "maxFrameDuration",              TYPE_INT64  },
+    [ ANDROID_SENSOR_INFO_PHYSICAL_SIZE - ANDROID_SENSOR_INFO_START ] =
+    { "physicalSize",                  TYPE_FLOAT  },
+    [ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE - ANDROID_SENSOR_INFO_START ] =
+    { "pixelArraySize",                TYPE_INT32  },
+    [ ANDROID_SENSOR_INFO_WHITE_LEVEL - ANDROID_SENSOR_INFO_START ] =
+    { "whiteLevel",                    TYPE_INT32  },
+    [ ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE - ANDROID_SENSOR_INFO_START ] =
+    { "timestampSource",               TYPE_BYTE   },
+    [ ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED - ANDROID_SENSOR_INFO_START ] =
+    { "lensShadingApplied",            TYPE_BYTE   },
+    [ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE - ANDROID_SENSOR_INFO_START ] =
+    { "preCorrectionActiveArraySize",  TYPE_INT32  },
+};
+
+static tag_info_t android_shading[ANDROID_SHADING_END -
+        ANDROID_SHADING_START] = {
+    [ ANDROID_SHADING_MODE - ANDROID_SHADING_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_SHADING_STRENGTH - ANDROID_SHADING_START ] =
+    { "strength",                      TYPE_BYTE   },
+    [ ANDROID_SHADING_AVAILABLE_MODES - ANDROID_SHADING_START ] =
+    { "availableModes",                TYPE_BYTE   },
+};
+
+static tag_info_t android_statistics[ANDROID_STATISTICS_END -
+        ANDROID_STATISTICS_START] = {
+    [ ANDROID_STATISTICS_FACE_DETECT_MODE - ANDROID_STATISTICS_START ] =
+    { "faceDetectMode",                TYPE_BYTE   },
+    [ ANDROID_STATISTICS_HISTOGRAM_MODE - ANDROID_STATISTICS_START ] =
+    { "histogramMode",                 TYPE_BYTE   },
+    [ ANDROID_STATISTICS_SHARPNESS_MAP_MODE - ANDROID_STATISTICS_START ] =
+    { "sharpnessMapMode",              TYPE_BYTE   },
+    [ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE - ANDROID_STATISTICS_START ] =
+    { "hotPixelMapMode",               TYPE_BYTE   },
+    [ ANDROID_STATISTICS_FACE_IDS - ANDROID_STATISTICS_START ] =
+    { "faceIds",                       TYPE_INT32  },
+    [ ANDROID_STATISTICS_FACE_LANDMARKS - ANDROID_STATISTICS_START ] =
+    { "faceLandmarks",                 TYPE_INT32  },
+    [ ANDROID_STATISTICS_FACE_RECTANGLES - ANDROID_STATISTICS_START ] =
+    { "faceRectangles",                TYPE_INT32  },
+    [ ANDROID_STATISTICS_FACE_SCORES - ANDROID_STATISTICS_START ] =
+    { "faceScores",                    TYPE_BYTE   },
+    [ ANDROID_STATISTICS_HISTOGRAM - ANDROID_STATISTICS_START ] =
+    { "histogram",                     TYPE_INT32  },
+    [ ANDROID_STATISTICS_SHARPNESS_MAP - ANDROID_STATISTICS_START ] =
+    { "sharpnessMap",                  TYPE_INT32  },
+    [ ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP - ANDROID_STATISTICS_START ] =
+    { "lensShadingCorrectionMap",      TYPE_BYTE   },
+    [ ANDROID_STATISTICS_LENS_SHADING_MAP - ANDROID_STATISTICS_START ] =
+    { "lensShadingMap",                TYPE_FLOAT  },
+    [ ANDROID_STATISTICS_PREDICTED_COLOR_GAINS - ANDROID_STATISTICS_START ] =
+    { "predictedColorGains",           TYPE_FLOAT  },
+    [ ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM - ANDROID_STATISTICS_START ] =
+    { "predictedColorTransform",       TYPE_RATIONAL
+                },
+    [ ANDROID_STATISTICS_SCENE_FLICKER - ANDROID_STATISTICS_START ] =
+    { "sceneFlicker",                  TYPE_BYTE   },
+    [ ANDROID_STATISTICS_HOT_PIXEL_MAP - ANDROID_STATISTICS_START ] =
+    { "hotPixelMap",                   TYPE_INT32  },
+    [ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE - ANDROID_STATISTICS_START ] =
+    { "lensShadingMapMode",            TYPE_BYTE   },
+};
+
+static tag_info_t android_statistics_info[ANDROID_STATISTICS_INFO_END -
+        ANDROID_STATISTICS_INFO_START] = {
+    [ ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES - ANDROID_STATISTICS_INFO_START ] =
+    { "availableFaceDetectModes",      TYPE_BYTE   },
+    [ ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT - ANDROID_STATISTICS_INFO_START ] =
+    { "histogramBucketCount",          TYPE_INT32  },
+    [ ANDROID_STATISTICS_INFO_MAX_FACE_COUNT - ANDROID_STATISTICS_INFO_START ] =
+    { "maxFaceCount",                  TYPE_INT32  },
+    [ ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT - ANDROID_STATISTICS_INFO_START ] =
+    { "maxHistogramCount",             TYPE_INT32  },
+    [ ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE - ANDROID_STATISTICS_INFO_START ] =
+    { "maxSharpnessMapValue",          TYPE_INT32  },
+    [ ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE - ANDROID_STATISTICS_INFO_START ] =
+    { "sharpnessMapSize",              TYPE_INT32  },
+    [ ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES - ANDROID_STATISTICS_INFO_START ] =
+    { "availableHotPixelMapModes",     TYPE_BYTE   },
+    [ ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES - ANDROID_STATISTICS_INFO_START ] =
+    { "availableLensShadingMapModes",  TYPE_BYTE   },
+};
+
+static tag_info_t android_tonemap[ANDROID_TONEMAP_END -
+        ANDROID_TONEMAP_START] = {
+    [ ANDROID_TONEMAP_CURVE_BLUE - ANDROID_TONEMAP_START ] =
+    { "curveBlue",                     TYPE_FLOAT  },
+    [ ANDROID_TONEMAP_CURVE_GREEN - ANDROID_TONEMAP_START ] =
+    { "curveGreen",                    TYPE_FLOAT  },
+    [ ANDROID_TONEMAP_CURVE_RED - ANDROID_TONEMAP_START ] =
+    { "curveRed",                      TYPE_FLOAT  },
+    [ ANDROID_TONEMAP_MODE - ANDROID_TONEMAP_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_TONEMAP_MAX_CURVE_POINTS - ANDROID_TONEMAP_START ] =
+    { "maxCurvePoints",                TYPE_INT32  },
+    [ ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES - ANDROID_TONEMAP_START ] =
+    { "availableToneMapModes",         TYPE_BYTE   },
+    [ ANDROID_TONEMAP_GAMMA - ANDROID_TONEMAP_START ] =
+    { "gamma",                         TYPE_FLOAT  },
+    [ ANDROID_TONEMAP_PRESET_CURVE - ANDROID_TONEMAP_START ] =
+    { "presetCurve",                   TYPE_BYTE   },
+};
+
+static tag_info_t android_led[ANDROID_LED_END -
+        ANDROID_LED_START] = {
+    [ ANDROID_LED_TRANSMIT - ANDROID_LED_START ] =
+    { "transmit",                      TYPE_BYTE   },
+    [ ANDROID_LED_AVAILABLE_LEDS - ANDROID_LED_START ] =
+    { "availableLeds",                 TYPE_BYTE   },
+};
+
+static tag_info_t android_info[ANDROID_INFO_END -
+        ANDROID_INFO_START] = {
+    [ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL - ANDROID_INFO_START ] =
+    { "supportedHardwareLevel",        TYPE_BYTE   },
+};
+
+static tag_info_t android_black_level[ANDROID_BLACK_LEVEL_END -
+        ANDROID_BLACK_LEVEL_START] = {
+    [ ANDROID_BLACK_LEVEL_LOCK - ANDROID_BLACK_LEVEL_START ] =
+    { "lock",                          TYPE_BYTE   },
+};
+
+static tag_info_t android_sync[ANDROID_SYNC_END -
+        ANDROID_SYNC_START] = {
+    [ ANDROID_SYNC_FRAME_NUMBER - ANDROID_SYNC_START ] =
+    { "frameNumber",                   TYPE_INT64  },
+    [ ANDROID_SYNC_MAX_LATENCY - ANDROID_SYNC_START ] =
+    { "maxLatency",                    TYPE_INT32  },
+};
+
+static tag_info_t android_reprocess[ANDROID_REPROCESS_END -
+        ANDROID_REPROCESS_START] = {
+    [ ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR - ANDROID_REPROCESS_START ] =
+    { "effectiveExposureFactor",       TYPE_FLOAT  },
+    [ ANDROID_REPROCESS_MAX_CAPTURE_STALL - ANDROID_REPROCESS_START ] =
+    { "maxCaptureStall",               TYPE_INT32  },
+};
+
+static tag_info_t android_depth[ANDROID_DEPTH_END -
+        ANDROID_DEPTH_START] = {
+    [ ANDROID_DEPTH_MAX_DEPTH_SAMPLES - ANDROID_DEPTH_START ] =
+    { "maxDepthSamples",               TYPE_INT32  },
+    [ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS - ANDROID_DEPTH_START ] =
+    { "availableDepthStreamConfigurations",
+                                        TYPE_INT32  },
+    [ ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS - ANDROID_DEPTH_START ] =
+    { "availableDepthMinFrameDurations",
+                                        TYPE_INT64  },
+    [ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS - ANDROID_DEPTH_START ] =
+    { "availableDepthStallDurations",  TYPE_INT64  },
+    [ ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE - ANDROID_DEPTH_START ] =
+    { "depthIsExclusive",              TYPE_BYTE   },
+};
+
+
+tag_info_t *tag_info[ANDROID_SECTION_COUNT] = {
+    android_color_correction,
+    android_control,
+    android_demosaic,
+    android_edge,
+    android_flash,
+    android_flash_info,
+    android_hot_pixel,
+    android_jpeg,
+    android_lens,
+    android_lens_info,
+    android_noise_reduction,
+    android_quirks,
+    android_request,
+    android_scaler,
+    android_sensor,
+    android_sensor_info,
+    android_shading,
+    android_statistics,
+    android_statistics_info,
+    android_tonemap,
+    android_led,
+    android_info,
+    android_black_level,
+    android_sync,
+    android_reprocess,
+    android_depth,
+};
+
+int camera_metadata_enum_snprint(uint32_t tag,
+                                 uint32_t value,
+                                 char *dst,
+                                 size_t size) {
+    const char *msg = "error: not an enum";
+    int ret = -1;
+
+    switch(tag) {
+        case ANDROID_COLOR_CORRECTION_MODE: {
+            switch (value) {
+                case ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX:
+                    msg = "TRANSFORM_MATRIX";
+                    ret = 0;
+                    break;
+                case ANDROID_COLOR_CORRECTION_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_COLOR_CORRECTION_TRANSFORM: {
+            break;
+        }
+        case ANDROID_COLOR_CORRECTION_GAINS: {
+            break;
+        }
+        case ANDROID_COLOR_CORRECTION_ABERRATION_MODE: {
+            switch (value) {
+                case ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES: {
+            break;
+        }
+
+        case ANDROID_CONTROL_AE_ANTIBANDING_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ:
+                    msg = "50HZ";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ:
+                    msg = "60HZ";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO:
+                    msg = "AUTO";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_LOCK: {
+            switch (value) {
+                case ANDROID_CONTROL_AE_LOCK_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_LOCK_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AE_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_AE_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:
+                    msg = "ON_AUTO_FLASH";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH:
+                    msg = "ON_ALWAYS_FLASH";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE:
+                    msg = "ON_AUTO_FLASH_REDEYE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AE_REGIONS: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_TARGET_FPS_RANGE: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: {
+            switch (value) {
+                case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE:
+                    msg = "IDLE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START:
+                    msg = "START";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL:
+                    msg = "CANCEL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AF_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_AF_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_MODE_AUTO:
+                    msg = "AUTO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_MODE_MACRO:
+                    msg = "MACRO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+                    msg = "CONTINUOUS_VIDEO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+                    msg = "CONTINUOUS_PICTURE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_MODE_EDOF:
+                    msg = "EDOF";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AF_REGIONS: {
+            break;
+        }
+        case ANDROID_CONTROL_AF_TRIGGER: {
+            switch (value) {
+                case ANDROID_CONTROL_AF_TRIGGER_IDLE:
+                    msg = "IDLE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_TRIGGER_START:
+                    msg = "START";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
+                    msg = "CANCEL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AWB_LOCK: {
+            switch (value) {
+                case ANDROID_CONTROL_AWB_LOCK_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_LOCK_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AWB_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_AWB_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_AUTO:
+                    msg = "AUTO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
+                    msg = "INCANDESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
+                    msg = "FLUORESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT:
+                    msg = "WARM_FLUORESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
+                    msg = "DAYLIGHT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT:
+                    msg = "CLOUDY_DAYLIGHT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_TWILIGHT:
+                    msg = "TWILIGHT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_MODE_SHADE:
+                    msg = "SHADE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AWB_REGIONS: {
+            break;
+        }
+        case ANDROID_CONTROL_CAPTURE_INTENT: {
+            switch (value) {
+                case ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM:
+                    msg = "CUSTOM";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW:
+                    msg = "PREVIEW";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE:
+                    msg = "STILL_CAPTURE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD:
+                    msg = "VIDEO_RECORD";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT:
+                    msg = "VIDEO_SNAPSHOT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG:
+                    msg = "ZERO_SHUTTER_LAG";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_CAPTURE_INTENT_MANUAL:
+                    msg = "MANUAL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_EFFECT_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_EFFECT_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_MONO:
+                    msg = "MONO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_NEGATIVE:
+                    msg = "NEGATIVE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_SOLARIZE:
+                    msg = "SOLARIZE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_SEPIA:
+                    msg = "SEPIA";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_POSTERIZE:
+                    msg = "POSTERIZE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD:
+                    msg = "WHITEBOARD";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD:
+                    msg = "BLACKBOARD";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_EFFECT_MODE_AQUA:
+                    msg = "AQUA";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_MODE_AUTO:
+                    msg = "AUTO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_MODE_USE_SCENE_MODE:
+                    msg = "USE_SCENE_MODE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_MODE_OFF_KEEP_STATE:
+                    msg = "OFF_KEEP_STATE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_SCENE_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_SCENE_MODE_DISABLED:
+                    msg = "DISABLED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
+                    msg = "FACE_PRIORITY";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_ACTION:
+                    msg = "ACTION";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_PORTRAIT:
+                    msg = "PORTRAIT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_LANDSCAPE:
+                    msg = "LANDSCAPE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_NIGHT:
+                    msg = "NIGHT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT:
+                    msg = "NIGHT_PORTRAIT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_THEATRE:
+                    msg = "THEATRE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_BEACH:
+                    msg = "BEACH";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_SNOW:
+                    msg = "SNOW";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_SUNSET:
+                    msg = "SUNSET";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO:
+                    msg = "STEADYPHOTO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_FIREWORKS:
+                    msg = "FIREWORKS";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_SPORTS:
+                    msg = "SPORTS";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_PARTY:
+                    msg = "PARTY";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT:
+                    msg = "CANDLELIGHT";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_BARCODE:
+                    msg = "BARCODE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO:
+                    msg = "HIGH_SPEED_VIDEO";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_HDR:
+                    msg = "HDR";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT:
+                    msg = "FACE_PRIORITY_LOW_LIGHT";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE: {
+            switch (value) {
+                case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_AVAILABLE_MODES: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_COMPENSATION_RANGE: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_COMPENSATION_STEP: {
+            break;
+        }
+        case ANDROID_CONTROL_AF_AVAILABLE_MODES: {
+            break;
+        }
+        case ANDROID_CONTROL_AVAILABLE_EFFECTS: {
+            break;
+        }
+        case ANDROID_CONTROL_AVAILABLE_SCENE_MODES: {
+            break;
+        }
+        case ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES: {
+            break;
+        }
+        case ANDROID_CONTROL_AWB_AVAILABLE_MODES: {
+            break;
+        }
+        case ANDROID_CONTROL_MAX_REGIONS: {
+            break;
+        }
+        case ANDROID_CONTROL_SCENE_MODE_OVERRIDES: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_PRECAPTURE_ID: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_STATE: {
+            switch (value) {
+                case ANDROID_CONTROL_AE_STATE_INACTIVE:
+                    msg = "INACTIVE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_STATE_SEARCHING:
+                    msg = "SEARCHING";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_STATE_CONVERGED:
+                    msg = "CONVERGED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_STATE_LOCKED:
+                    msg = "LOCKED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED:
+                    msg = "FLASH_REQUIRED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_STATE_PRECAPTURE:
+                    msg = "PRECAPTURE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AF_STATE: {
+            switch (value) {
+                case ANDROID_CONTROL_AF_STATE_INACTIVE:
+                    msg = "INACTIVE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
+                    msg = "PASSIVE_SCAN";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
+                    msg = "PASSIVE_FOCUSED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
+                    msg = "ACTIVE_SCAN";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
+                    msg = "FOCUSED_LOCKED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+                    msg = "NOT_FOCUSED_LOCKED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
+                    msg = "PASSIVE_UNFOCUSED";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AF_TRIGGER_ID: {
+            break;
+        }
+        case ANDROID_CONTROL_AWB_STATE: {
+            switch (value) {
+                case ANDROID_CONTROL_AWB_STATE_INACTIVE:
+                    msg = "INACTIVE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_STATE_SEARCHING:
+                    msg = "SEARCHING";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_STATE_CONVERGED:
+                    msg = "CONVERGED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_STATE_LOCKED:
+                    msg = "LOCKED";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS: {
+            break;
+        }
+        case ANDROID_CONTROL_AE_LOCK_AVAILABLE: {
+            switch (value) {
+                case ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE:
+                    msg = "FALSE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE:
+                    msg = "TRUE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AWB_LOCK_AVAILABLE: {
+            switch (value) {
+                case ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE:
+                    msg = "FALSE";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE:
+                    msg = "TRUE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_CONTROL_AVAILABLE_MODES: {
+            break;
+        }
+
+        case ANDROID_DEMOSAIC_MODE: {
+            switch (value) {
+                case ANDROID_DEMOSAIC_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_DEMOSAIC_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_EDGE_MODE: {
+            switch (value) {
+                case ANDROID_EDGE_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_EDGE_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_EDGE_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                case ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG:
+                    msg = "ZERO_SHUTTER_LAG";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_EDGE_STRENGTH: {
+            break;
+        }
+        case ANDROID_EDGE_AVAILABLE_EDGE_MODES: {
+            break;
+        }
+
+        case ANDROID_FLASH_FIRING_POWER: {
+            break;
+        }
+        case ANDROID_FLASH_FIRING_TIME: {
+            break;
+        }
+        case ANDROID_FLASH_MODE: {
+            switch (value) {
+                case ANDROID_FLASH_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_FLASH_MODE_SINGLE:
+                    msg = "SINGLE";
+                    ret = 0;
+                    break;
+                case ANDROID_FLASH_MODE_TORCH:
+                    msg = "TORCH";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_FLASH_COLOR_TEMPERATURE: {
+            break;
+        }
+        case ANDROID_FLASH_MAX_ENERGY: {
+            break;
+        }
+        case ANDROID_FLASH_STATE: {
+            switch (value) {
+                case ANDROID_FLASH_STATE_UNAVAILABLE:
+                    msg = "UNAVAILABLE";
+                    ret = 0;
+                    break;
+                case ANDROID_FLASH_STATE_CHARGING:
+                    msg = "CHARGING";
+                    ret = 0;
+                    break;
+                case ANDROID_FLASH_STATE_READY:
+                    msg = "READY";
+                    ret = 0;
+                    break;
+                case ANDROID_FLASH_STATE_FIRED:
+                    msg = "FIRED";
+                    ret = 0;
+                    break;
+                case ANDROID_FLASH_STATE_PARTIAL:
+                    msg = "PARTIAL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_FLASH_INFO_AVAILABLE: {
+            switch (value) {
+                case ANDROID_FLASH_INFO_AVAILABLE_FALSE:
+                    msg = "FALSE";
+                    ret = 0;
+                    break;
+                case ANDROID_FLASH_INFO_AVAILABLE_TRUE:
+                    msg = "TRUE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_FLASH_INFO_CHARGE_DURATION: {
+            break;
+        }
+
+        case ANDROID_HOT_PIXEL_MODE: {
+            switch (value) {
+                case ANDROID_HOT_PIXEL_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_HOT_PIXEL_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES: {
+            break;
+        }
+
+        case ANDROID_JPEG_GPS_COORDINATES: {
+            break;
+        }
+        case ANDROID_JPEG_GPS_PROCESSING_METHOD: {
+            break;
+        }
+        case ANDROID_JPEG_GPS_TIMESTAMP: {
+            break;
+        }
+        case ANDROID_JPEG_ORIENTATION: {
+            break;
+        }
+        case ANDROID_JPEG_QUALITY: {
+            break;
+        }
+        case ANDROID_JPEG_THUMBNAIL_QUALITY: {
+            break;
+        }
+        case ANDROID_JPEG_THUMBNAIL_SIZE: {
+            break;
+        }
+        case ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES: {
+            break;
+        }
+        case ANDROID_JPEG_MAX_SIZE: {
+            break;
+        }
+        case ANDROID_JPEG_SIZE: {
+            break;
+        }
+
+        case ANDROID_LENS_APERTURE: {
+            break;
+        }
+        case ANDROID_LENS_FILTER_DENSITY: {
+            break;
+        }
+        case ANDROID_LENS_FOCAL_LENGTH: {
+            break;
+        }
+        case ANDROID_LENS_FOCUS_DISTANCE: {
+            break;
+        }
+        case ANDROID_LENS_OPTICAL_STABILIZATION_MODE: {
+            switch (value) {
+                case ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_LENS_FACING: {
+            switch (value) {
+                case ANDROID_LENS_FACING_FRONT:
+                    msg = "FRONT";
+                    ret = 0;
+                    break;
+                case ANDROID_LENS_FACING_BACK:
+                    msg = "BACK";
+                    ret = 0;
+                    break;
+                case ANDROID_LENS_FACING_EXTERNAL:
+                    msg = "EXTERNAL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_LENS_POSE_ROTATION: {
+            break;
+        }
+        case ANDROID_LENS_POSE_TRANSLATION: {
+            break;
+        }
+        case ANDROID_LENS_FOCUS_RANGE: {
+            break;
+        }
+        case ANDROID_LENS_STATE: {
+            switch (value) {
+                case ANDROID_LENS_STATE_STATIONARY:
+                    msg = "STATIONARY";
+                    ret = 0;
+                    break;
+                case ANDROID_LENS_STATE_MOVING:
+                    msg = "MOVING";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_LENS_INTRINSIC_CALIBRATION: {
+            break;
+        }
+        case ANDROID_LENS_RADIAL_DISTORTION: {
+            break;
+        }
+
+        case ANDROID_LENS_INFO_AVAILABLE_APERTURES: {
+            break;
+        }
+        case ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES: {
+            break;
+        }
+        case ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS: {
+            break;
+        }
+        case ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION: {
+            break;
+        }
+        case ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE: {
+            break;
+        }
+        case ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE: {
+            break;
+        }
+        case ANDROID_LENS_INFO_SHADING_MAP_SIZE: {
+            break;
+        }
+        case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION: {
+            switch (value) {
+                case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED:
+                    msg = "UNCALIBRATED";
+                    ret = 0;
+                    break;
+                case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE:
+                    msg = "APPROXIMATE";
+                    ret = 0;
+                    break;
+                case ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED:
+                    msg = "CALIBRATED";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_NOISE_REDUCTION_MODE: {
+            switch (value) {
+                case ANDROID_NOISE_REDUCTION_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_NOISE_REDUCTION_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                case ANDROID_NOISE_REDUCTION_MODE_MINIMAL:
+                    msg = "MINIMAL";
+                    ret = 0;
+                    break;
+                case ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG:
+                    msg = "ZERO_SHUTTER_LAG";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_NOISE_REDUCTION_STRENGTH: {
+            break;
+        }
+        case ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES: {
+            break;
+        }
+
+        case ANDROID_QUIRKS_METERING_CROP_REGION: {
+            break;
+        }
+        case ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO: {
+            break;
+        }
+        case ANDROID_QUIRKS_USE_ZSL_FORMAT: {
+            break;
+        }
+        case ANDROID_QUIRKS_USE_PARTIAL_RESULT: {
+            break;
+        }
+        case ANDROID_QUIRKS_PARTIAL_RESULT: {
+            switch (value) {
+                case ANDROID_QUIRKS_PARTIAL_RESULT_FINAL:
+                    msg = "FINAL";
+                    ret = 0;
+                    break;
+                case ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL:
+                    msg = "PARTIAL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_REQUEST_FRAME_COUNT: {
+            break;
+        }
+        case ANDROID_REQUEST_ID: {
+            break;
+        }
+        case ANDROID_REQUEST_INPUT_STREAMS: {
+            break;
+        }
+        case ANDROID_REQUEST_METADATA_MODE: {
+            switch (value) {
+                case ANDROID_REQUEST_METADATA_MODE_NONE:
+                    msg = "NONE";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_METADATA_MODE_FULL:
+                    msg = "FULL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_REQUEST_OUTPUT_STREAMS: {
+            break;
+        }
+        case ANDROID_REQUEST_TYPE: {
+            switch (value) {
+                case ANDROID_REQUEST_TYPE_CAPTURE:
+                    msg = "CAPTURE";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_TYPE_REPROCESS:
+                    msg = "REPROCESS";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS: {
+            break;
+        }
+        case ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS: {
+            break;
+        }
+        case ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS: {
+            break;
+        }
+        case ANDROID_REQUEST_PIPELINE_DEPTH: {
+            break;
+        }
+        case ANDROID_REQUEST_PIPELINE_MAX_DEPTH: {
+            break;
+        }
+        case ANDROID_REQUEST_PARTIAL_RESULT_COUNT: {
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
+            switch (value) {
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE:
+                    msg = "BACKWARD_COMPATIBLE";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR:
+                    msg = "MANUAL_SENSOR";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING:
+                    msg = "MANUAL_POST_PROCESSING";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW:
+                    msg = "RAW";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING:
+                    msg = "PRIVATE_REPROCESSING";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS:
+                    msg = "READ_SENSOR_SETTINGS";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE:
+                    msg = "BURST_CAPTURE";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING:
+                    msg = "YUV_REPROCESSING";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT:
+                    msg = "DEPTH_OUTPUT";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO:
+                    msg = "CONSTRAINED_HIGH_SPEED_VIDEO";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS: {
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_RESULT_KEYS: {
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: {
+            break;
+        }
+
+        case ANDROID_SCALER_CROP_REGION: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_FORMATS: {
+            switch (value) {
+                case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
+                    msg = "RAW16";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
+                    msg = "RAW_OPAQUE";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_FORMATS_YV12:
+                    msg = "YV12";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP:
+                    msg = "YCrCb_420_SP";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED:
+                    msg = "IMPLEMENTATION_DEFINED";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888:
+                    msg = "YCbCr_420_888";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_FORMATS_BLOB:
+                    msg = "BLOB";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_JPEG_SIZES: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_RAW_SIZES: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS: {
+            switch (value) {
+                case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT:
+                    msg = "OUTPUT";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT:
+                    msg = "INPUT";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS: {
+            break;
+        }
+        case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS: {
+            break;
+        }
+        case ANDROID_SCALER_CROPPING_TYPE: {
+            switch (value) {
+                case ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY:
+                    msg = "CENTER_ONLY";
+                    ret = 0;
+                    break;
+                case ANDROID_SCALER_CROPPING_TYPE_FREEFORM:
+                    msg = "FREEFORM";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_SENSOR_EXPOSURE_TIME: {
+            break;
+        }
+        case ANDROID_SENSOR_FRAME_DURATION: {
+            break;
+        }
+        case ANDROID_SENSOR_SENSITIVITY: {
+            break;
+        }
+        case ANDROID_SENSOR_REFERENCE_ILLUMINANT1: {
+            switch (value) {
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT:
+                    msg = "DAYLIGHT";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT:
+                    msg = "FLUORESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN:
+                    msg = "TUNGSTEN";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH:
+                    msg = "FLASH";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER:
+                    msg = "FINE_WEATHER";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER:
+                    msg = "CLOUDY_WEATHER";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE:
+                    msg = "SHADE";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT:
+                    msg = "DAYLIGHT_FLUORESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT:
+                    msg = "DAY_WHITE_FLUORESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT:
+                    msg = "COOL_WHITE_FLUORESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT:
+                    msg = "WHITE_FLUORESCENT";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A:
+                    msg = "STANDARD_A";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B:
+                    msg = "STANDARD_B";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C:
+                    msg = "STANDARD_C";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55:
+                    msg = "D55";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65:
+                    msg = "D65";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75:
+                    msg = "D75";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50:
+                    msg = "D50";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN:
+                    msg = "ISO_STUDIO_TUNGSTEN";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SENSOR_REFERENCE_ILLUMINANT2: {
+            break;
+        }
+        case ANDROID_SENSOR_CALIBRATION_TRANSFORM1: {
+            break;
+        }
+        case ANDROID_SENSOR_CALIBRATION_TRANSFORM2: {
+            break;
+        }
+        case ANDROID_SENSOR_COLOR_TRANSFORM1: {
+            break;
+        }
+        case ANDROID_SENSOR_COLOR_TRANSFORM2: {
+            break;
+        }
+        case ANDROID_SENSOR_FORWARD_MATRIX1: {
+            break;
+        }
+        case ANDROID_SENSOR_FORWARD_MATRIX2: {
+            break;
+        }
+        case ANDROID_SENSOR_BASE_GAIN_FACTOR: {
+            break;
+        }
+        case ANDROID_SENSOR_BLACK_LEVEL_PATTERN: {
+            break;
+        }
+        case ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY: {
+            break;
+        }
+        case ANDROID_SENSOR_ORIENTATION: {
+            break;
+        }
+        case ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS: {
+            break;
+        }
+        case ANDROID_SENSOR_TIMESTAMP: {
+            break;
+        }
+        case ANDROID_SENSOR_TEMPERATURE: {
+            break;
+        }
+        case ANDROID_SENSOR_NEUTRAL_COLOR_POINT: {
+            break;
+        }
+        case ANDROID_SENSOR_NOISE_PROFILE: {
+            break;
+        }
+        case ANDROID_SENSOR_PROFILE_HUE_SAT_MAP: {
+            break;
+        }
+        case ANDROID_SENSOR_PROFILE_TONE_CURVE: {
+            break;
+        }
+        case ANDROID_SENSOR_GREEN_SPLIT: {
+            break;
+        }
+        case ANDROID_SENSOR_TEST_PATTERN_DATA: {
+            break;
+        }
+        case ANDROID_SENSOR_TEST_PATTERN_MODE: {
+            switch (value) {
+                case ANDROID_SENSOR_TEST_PATTERN_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR:
+                    msg = "SOLID_COLOR";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS:
+                    msg = "COLOR_BARS";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY:
+                    msg = "COLOR_BARS_FADE_TO_GRAY";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_TEST_PATTERN_MODE_PN9:
+                    msg = "PN9";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1:
+                    msg = "CUSTOM1";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES: {
+            break;
+        }
+        case ANDROID_SENSOR_ROLLING_SHUTTER_SKEW: {
+            break;
+        }
+
+        case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE: {
+            break;
+        }
+        case ANDROID_SENSOR_INFO_SENSITIVITY_RANGE: {
+            break;
+        }
+        case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT: {
+            switch (value) {
+                case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB:
+                    msg = "RGGB";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG:
+                    msg = "GRBG";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG:
+                    msg = "GBRG";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR:
+                    msg = "BGGR";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB:
+                    msg = "RGB";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE: {
+            break;
+        }
+        case ANDROID_SENSOR_INFO_MAX_FRAME_DURATION: {
+            break;
+        }
+        case ANDROID_SENSOR_INFO_PHYSICAL_SIZE: {
+            break;
+        }
+        case ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE: {
+            break;
+        }
+        case ANDROID_SENSOR_INFO_WHITE_LEVEL: {
+            break;
+        }
+        case ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE: {
+            switch (value) {
+                case ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN:
+                    msg = "UNKNOWN";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME:
+                    msg = "REALTIME";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED: {
+            switch (value) {
+                case ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE:
+                    msg = "FALSE";
+                    ret = 0;
+                    break;
+                case ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE:
+                    msg = "TRUE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE: {
+            break;
+        }
+
+        case ANDROID_SHADING_MODE: {
+            switch (value) {
+                case ANDROID_SHADING_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_SHADING_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_SHADING_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SHADING_STRENGTH: {
+            break;
+        }
+        case ANDROID_SHADING_AVAILABLE_MODES: {
+            break;
+        }
+
+        case ANDROID_STATISTICS_FACE_DETECT_MODE: {
+            switch (value) {
+                case ANDROID_STATISTICS_FACE_DETECT_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE:
+                    msg = "SIMPLE";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_FACE_DETECT_MODE_FULL:
+                    msg = "FULL";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_STATISTICS_HISTOGRAM_MODE: {
+            switch (value) {
+                case ANDROID_STATISTICS_HISTOGRAM_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_HISTOGRAM_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_STATISTICS_SHARPNESS_MAP_MODE: {
+            switch (value) {
+                case ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE: {
+            switch (value) {
+                case ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_STATISTICS_FACE_IDS: {
+            break;
+        }
+        case ANDROID_STATISTICS_FACE_LANDMARKS: {
+            break;
+        }
+        case ANDROID_STATISTICS_FACE_RECTANGLES: {
+            break;
+        }
+        case ANDROID_STATISTICS_FACE_SCORES: {
+            break;
+        }
+        case ANDROID_STATISTICS_HISTOGRAM: {
+            break;
+        }
+        case ANDROID_STATISTICS_SHARPNESS_MAP: {
+            break;
+        }
+        case ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP: {
+            break;
+        }
+        case ANDROID_STATISTICS_LENS_SHADING_MAP: {
+            break;
+        }
+        case ANDROID_STATISTICS_PREDICTED_COLOR_GAINS: {
+            break;
+        }
+        case ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM: {
+            break;
+        }
+        case ANDROID_STATISTICS_SCENE_FLICKER: {
+            switch (value) {
+                case ANDROID_STATISTICS_SCENE_FLICKER_NONE:
+                    msg = "NONE";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_SCENE_FLICKER_50HZ:
+                    msg = "50HZ";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_SCENE_FLICKER_60HZ:
+                    msg = "60HZ";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_STATISTICS_HOT_PIXEL_MAP: {
+            break;
+        }
+        case ANDROID_STATISTICS_LENS_SHADING_MAP_MODE: {
+            switch (value) {
+                case ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES: {
+            break;
+        }
+        case ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT: {
+            break;
+        }
+        case ANDROID_STATISTICS_INFO_MAX_FACE_COUNT: {
+            break;
+        }
+        case ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT: {
+            break;
+        }
+        case ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE: {
+            break;
+        }
+        case ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE: {
+            break;
+        }
+        case ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES: {
+            break;
+        }
+        case ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES: {
+            break;
+        }
+
+        case ANDROID_TONEMAP_CURVE_BLUE: {
+            break;
+        }
+        case ANDROID_TONEMAP_CURVE_GREEN: {
+            break;
+        }
+        case ANDROID_TONEMAP_CURVE_RED: {
+            break;
+        }
+        case ANDROID_TONEMAP_MODE: {
+            switch (value) {
+                case ANDROID_TONEMAP_MODE_CONTRAST_CURVE:
+                    msg = "CONTRAST_CURVE";
+                    ret = 0;
+                    break;
+                case ANDROID_TONEMAP_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_TONEMAP_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                case ANDROID_TONEMAP_MODE_GAMMA_VALUE:
+                    msg = "GAMMA_VALUE";
+                    ret = 0;
+                    break;
+                case ANDROID_TONEMAP_MODE_PRESET_CURVE:
+                    msg = "PRESET_CURVE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_TONEMAP_MAX_CURVE_POINTS: {
+            break;
+        }
+        case ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES: {
+            break;
+        }
+        case ANDROID_TONEMAP_GAMMA: {
+            break;
+        }
+        case ANDROID_TONEMAP_PRESET_CURVE: {
+            switch (value) {
+                case ANDROID_TONEMAP_PRESET_CURVE_SRGB:
+                    msg = "SRGB";
+                    ret = 0;
+                    break;
+                case ANDROID_TONEMAP_PRESET_CURVE_REC709:
+                    msg = "REC709";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_LED_TRANSMIT: {
+            switch (value) {
+                case ANDROID_LED_TRANSMIT_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_LED_TRANSMIT_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_LED_AVAILABLE_LEDS: {
+            switch (value) {
+                case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT:
+                    msg = "TRANSMIT";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL: {
+            switch (value) {
+                case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED:
+                    msg = "LIMITED";
+                    ret = 0;
+                    break;
+                case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
+                    msg = "FULL";
+                    ret = 0;
+                    break;
+                case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY:
+                    msg = "LEGACY";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_BLACK_LEVEL_LOCK: {
+            switch (value) {
+                case ANDROID_BLACK_LEVEL_LOCK_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_BLACK_LEVEL_LOCK_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_SYNC_FRAME_NUMBER: {
+            switch (value) {
+                case ANDROID_SYNC_FRAME_NUMBER_CONVERGING:
+                    msg = "CONVERGING";
+                    ret = 0;
+                    break;
+                case ANDROID_SYNC_FRAME_NUMBER_UNKNOWN:
+                    msg = "UNKNOWN";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_SYNC_MAX_LATENCY: {
+            switch (value) {
+                case ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL:
+                    msg = "PER_FRAME_CONTROL";
+                    ret = 0;
+                    break;
+                case ANDROID_SYNC_MAX_LATENCY_UNKNOWN:
+                    msg = "UNKNOWN";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR: {
+            break;
+        }
+        case ANDROID_REPROCESS_MAX_CAPTURE_STALL: {
+            break;
+        }
+
+        case ANDROID_DEPTH_MAX_DEPTH_SAMPLES: {
+            break;
+        }
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS: {
+            switch (value) {
+                case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT:
+                    msg = "OUTPUT";
+                    ret = 0;
+                    break;
+                case ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT:
+                    msg = "INPUT";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS: {
+            break;
+        }
+        case ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS: {
+            break;
+        }
+        case ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE: {
+            switch (value) {
+                case ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE:
+                    msg = "FALSE";
+                    ret = 0;
+                    break;
+                case ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE:
+                    msg = "TRUE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+    }
+
+    strncpy(dst, msg, size - 1);
+    dst[size - 1] = '\0';
+
+    return ret;
+}
+
+
+#define CAMERA_METADATA_ENUM_STRING_MAX_SIZE 29
diff --git a/media/camera/tests/Android.mk b/media/camera/tests/Android.mk
new file mode 100644
index 0000000..b39b3b5
--- /dev/null
+++ b/media/camera/tests/Android.mk
@@ -0,0 +1,23 @@
+# Build the unit tests.
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
+
+LOCAL_SHARED_LIBRARIES := \
+	libutils \
+	libcamera_metadata
+
+LOCAL_C_INCLUDES := \
+	system/media/camera/include \
+	system/media/private/camera/include
+
+LOCAL_SRC_FILES := \
+	camera_metadata_tests.cpp
+
+LOCAL_MODULE := camera_metadata_tests
+LOCAL_MODULE_TAGS := tests
+LOCAL_MODULE_STEM_32 := camera_metadata_tests
+LOCAL_MODULE_STEM_64 := camera_metadata_tests64
+LOCAL_MULTILIB := both
+
+include $(BUILD_NATIVE_TEST)
diff --git a/media/camera/tests/camera_metadata_tests.cpp b/media/camera/tests/camera_metadata_tests.cpp
new file mode 100644
index 0000000..c3acb52
--- /dev/null
+++ b/media/camera/tests/camera_metadata_tests.cpp
@@ -0,0 +1,1876 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 1
+#define LOG_TAG "camera_metadata_tests"
+#include "cutils/log.h"
+
+#include <errno.h>
+
+#include <vector>
+#include <algorithm>
+#include "gtest/gtest.h"
+#include "system/camera_metadata.h"
+#include "camera_metadata_hidden.h"
+
+#include "camera_metadata_tests_fake_vendor.h"
+
+#define EXPECT_NULL(x)     EXPECT_EQ((void*)0, x)
+#define EXPECT_NOT_NULL(x) EXPECT_NE((void*)0, x)
+#define ARRAY_SIZE(a)      (sizeof(a) / sizeof((a)[0]))
+
+#define OK    0
+#define ERROR 1
+#define NOT_FOUND (-ENOENT)
+
+#define _Alignas(T) \
+    ({struct _AlignasStruct { char c; T field; };       \
+        offsetof(struct _AlignasStruct, field); })
+
+#define FINISH_USING_CAMERA_METADATA(m)                         \
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL)); \
+    free_camera_metadata(m);                                    \
+
+TEST(camera_metadata, allocate_normal) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 32;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    EXPECT_NOT_NULL(m);
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, allocate_nodata) {
+    camera_metadata_t *m = NULL;
+
+    m = allocate_camera_metadata(1, 0);
+
+    EXPECT_NOT_NULL(m);
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_count(m));
+    EXPECT_EQ((size_t)1, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_count(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_capacity(m));
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, clone_nodata) {
+    camera_metadata_t *src = NULL;
+    camera_metadata_t *copy = NULL;
+
+    src = allocate_camera_metadata(10, 0);
+
+    ASSERT_NE((void*)NULL, (void*)src);
+    copy = clone_camera_metadata(src);
+    ASSERT_NE((void*)NULL, (void*)copy);
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_count(copy));
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_capacity(copy));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_count(copy));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_capacity(copy));
+
+    FINISH_USING_CAMERA_METADATA(src);
+    FINISH_USING_CAMERA_METADATA(copy);
+}
+
+TEST(camera_metadata, allocate_nothing) {
+    camera_metadata_t *m = NULL;
+
+    m = allocate_camera_metadata(0, 0);
+
+    ASSERT_NE((void*)NULL, (void*)m);
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_count(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_count(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_capacity(m));
+}
+
+TEST(camera_metadata, place_normal) {
+    camera_metadata_t *m = NULL;
+    void *buf = NULL;
+
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 32;
+
+    size_t buf_size = calculate_camera_metadata_size(entry_capacity,
+            data_capacity);
+
+    EXPECT_TRUE(buf_size > 0);
+
+    buf = malloc(buf_size);
+
+    EXPECT_NOT_NULL(buf);
+
+    m = place_camera_metadata(buf, buf_size, entry_capacity, data_capacity);
+
+    EXPECT_EQ(buf, (uint8_t*)m);
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, &buf_size));
+
+    free(buf);
+}
+
+TEST(camera_metadata, place_nospace) {
+    camera_metadata_t *m = NULL;
+    void *buf = NULL;
+
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 32;
+
+    size_t buf_size = calculate_camera_metadata_size(entry_capacity,
+            data_capacity);
+
+    EXPECT_GT(buf_size, (size_t)0);
+
+    buf_size--;
+
+    buf = malloc(buf_size);
+
+    EXPECT_NOT_NULL(buf);
+
+    m = place_camera_metadata(buf, buf_size, entry_capacity, data_capacity);
+
+    EXPECT_NULL(m);
+
+    free(buf);
+}
+
+TEST(camera_metadata, place_extraspace) {
+    camera_metadata_t *m = NULL;
+    uint8_t *buf = NULL;
+
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 32;
+    const size_t extra_space = 10;
+
+    size_t buf_size = calculate_camera_metadata_size(entry_capacity,
+            data_capacity);
+
+    EXPECT_GT(buf_size, (size_t)0);
+
+    buf_size += extra_space;
+
+    buf = (uint8_t*)malloc(buf_size);
+
+    EXPECT_NOT_NULL(buf);
+
+    m = place_camera_metadata(buf, buf_size, entry_capacity, data_capacity);
+
+    EXPECT_EQ((uint8_t*)m, buf);
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+    EXPECT_EQ(buf + buf_size - extra_space, (uint8_t*)m + get_camera_metadata_size(m));
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, &buf_size));
+
+    free(buf);
+}
+
+TEST(camera_metadata, get_size) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 32;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    EXPECT_EQ(calculate_camera_metadata_size(entry_capacity, data_capacity),
+            get_camera_metadata_size(m) );
+
+    EXPECT_EQ(calculate_camera_metadata_size(0,0),
+            get_camera_metadata_compact_size(m) );
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, add_get_normal) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 128;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    int result;
+    size_t data_used = 0;
+    size_t entries_used = 0;
+
+    // INT64
+
+    int64_t exposure_time = 1000000000;
+    result = add_camera_metadata_entry(m,
+            ANDROID_SENSOR_EXPOSURE_TIME,
+            &exposure_time, 1);
+    EXPECT_EQ(OK, result);
+    data_used += calculate_camera_metadata_entry_data_size(
+            get_camera_metadata_tag_type(ANDROID_SENSOR_EXPOSURE_TIME), 1);
+    entries_used++;
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    // INT32
+
+    int32_t sensitivity = 800;
+    result = add_camera_metadata_entry(m,
+            ANDROID_SENSOR_SENSITIVITY,
+            &sensitivity, 1);
+    EXPECT_EQ(OK, result);
+    data_used += calculate_camera_metadata_entry_data_size(
+            get_camera_metadata_tag_type(ANDROID_SENSOR_SENSITIVITY), 1);
+    entries_used++;
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    // FLOAT
+
+    float focusDistance = 0.5f;
+    result = add_camera_metadata_entry(m,
+            ANDROID_LENS_FOCUS_DISTANCE,
+            &focusDistance, 1);
+    EXPECT_EQ(OK, result);
+    data_used += calculate_camera_metadata_entry_data_size(
+            get_camera_metadata_tag_type(ANDROID_LENS_FOCUS_DISTANCE), 1);
+    entries_used++;
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    // Array of FLOAT
+
+    float colorCorrectionGains[] = {1.69f,  1.00f,  1.00f,  2.41f};
+    result = add_camera_metadata_entry(m,
+            ANDROID_COLOR_CORRECTION_GAINS,
+            colorCorrectionGains, ARRAY_SIZE(colorCorrectionGains));
+    EXPECT_EQ(OK, result);
+    data_used += calculate_camera_metadata_entry_data_size(
+           get_camera_metadata_tag_type(ANDROID_COLOR_CORRECTION_GAINS),
+           ARRAY_SIZE(colorCorrectionGains));
+    entries_used++;
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+
+    // Array of RATIONAL
+
+    camera_metadata_rational_t colorTransform[] = {
+        {9, 10}, {0, 1}, {0, 1},
+        {1, 5}, {1, 2}, {0, 1},
+        {0, 1}, {1, 10}, {7, 10}
+    };
+    result = add_camera_metadata_entry(m,
+            ANDROID_COLOR_CORRECTION_TRANSFORM,
+            colorTransform, ARRAY_SIZE(colorTransform));
+    EXPECT_EQ(OK, result);
+    data_used += calculate_camera_metadata_entry_data_size(
+           get_camera_metadata_tag_type(ANDROID_COLOR_CORRECTION_TRANSFORM),
+           ARRAY_SIZE(colorTransform));
+    entries_used++;
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    // Check added entries
+
+    size_t index = 0;
+    camera_metadata_entry entry;
+
+    result = get_camera_metadata_entry(m,
+            index, &entry);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(index, entry.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, entry.tag);
+    EXPECT_EQ(TYPE_INT64, entry.type);
+    EXPECT_EQ((size_t)1, entry.count);
+    EXPECT_EQ(exposure_time, *entry.data.i64);
+    index++;
+
+    result = get_camera_metadata_entry(m,
+            index, &entry);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(index, entry.index);
+    EXPECT_EQ(ANDROID_SENSOR_SENSITIVITY, entry.tag);
+    EXPECT_EQ(TYPE_INT32, entry.type);
+    EXPECT_EQ((size_t)1, entry.count);
+    EXPECT_EQ(sensitivity, *entry.data.i32);
+    index++;
+
+    result = get_camera_metadata_entry(m,
+            index, &entry);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(index, entry.index);
+    EXPECT_EQ(ANDROID_LENS_FOCUS_DISTANCE, entry.tag);
+    EXPECT_EQ(TYPE_FLOAT, entry.type);
+    EXPECT_EQ((size_t)1, entry.count);
+    EXPECT_EQ(focusDistance, *entry.data.f);
+    index++;
+
+    result = get_camera_metadata_entry(m,
+            index, &entry);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(index, entry.index);
+    EXPECT_EQ(ANDROID_COLOR_CORRECTION_GAINS, entry.tag);
+    EXPECT_EQ(TYPE_FLOAT, entry.type);
+    EXPECT_EQ(ARRAY_SIZE(colorCorrectionGains), entry.count);
+    for (unsigned int i=0; i < entry.count; i++) {
+        EXPECT_EQ(colorCorrectionGains[i], entry.data.f[i]);
+    }
+    index++;
+
+    result = get_camera_metadata_entry(m,
+            index, &entry);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(index, entry.index);
+    EXPECT_EQ(ANDROID_COLOR_CORRECTION_TRANSFORM, entry.tag);
+    EXPECT_EQ(TYPE_RATIONAL, entry.type);
+    EXPECT_EQ(ARRAY_SIZE(colorTransform), entry.count);
+    for (unsigned int i=0; i < entry.count; i++) {
+        EXPECT_EQ(colorTransform[i].numerator, entry.data.r[i].numerator);
+        EXPECT_EQ(colorTransform[i].denominator, entry.data.r[i].denominator);
+    }
+    index++;
+
+    EXPECT_EQ(calculate_camera_metadata_size(entry_capacity, data_capacity),
+            get_camera_metadata_size(m) );
+
+    EXPECT_EQ(calculate_camera_metadata_size(entries_used, data_used),
+            get_camera_metadata_compact_size(m) );
+
+    IF_ALOGV() {
+        dump_camera_metadata(m, 0, 2);
+    }
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+void add_test_metadata(camera_metadata_t *m, int entry_count) {
+
+    EXPECT_NOT_NULL(m);
+
+    int result;
+    size_t data_used = 0;
+    size_t entries_used = 0;
+    int64_t exposure_time;
+    for (int i=0; i < entry_count; i++ ) {
+        exposure_time = 100 + i * 100;
+        result = add_camera_metadata_entry(m,
+                ANDROID_SENSOR_EXPOSURE_TIME,
+                &exposure_time, 1);
+        EXPECT_EQ(OK, result);
+        data_used += calculate_camera_metadata_entry_data_size(
+                get_camera_metadata_tag_type(ANDROID_SENSOR_EXPOSURE_TIME), 1);
+        entries_used++;
+    }
+    EXPECT_EQ(data_used, get_camera_metadata_data_count(m));
+    EXPECT_EQ(entries_used, get_camera_metadata_entry_count(m));
+    EXPECT_GE(get_camera_metadata_data_capacity(m),
+            get_camera_metadata_data_count(m));
+}
+
+TEST(camera_metadata, add_get_toomany) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 50;
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, entry_capacity);
+
+    int32_t sensitivity = 100;
+    result = add_camera_metadata_entry(m,
+            ANDROID_SENSOR_SENSITIVITY,
+            &sensitivity, 1);
+
+    EXPECT_EQ(ERROR, result);
+
+    camera_metadata_entry entry;
+    for (unsigned int i=0; i < entry_capacity; i++) {
+        int64_t exposure_time = 100 + i * 100;
+        result = get_camera_metadata_entry(m,
+                i, &entry);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, entry.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, entry.tag);
+        EXPECT_EQ(TYPE_INT64, entry.type);
+        EXPECT_EQ((size_t)1, entry.count);
+        EXPECT_EQ(exposure_time, *entry.data.i64);
+    }
+    entry.tag = 1234;
+    entry.type = 56;
+    entry.data.u8 = NULL;
+    entry.count = 7890;
+    result = get_camera_metadata_entry(m,
+            entry_capacity, &entry);
+    EXPECT_EQ(ERROR, result);
+    EXPECT_EQ((uint32_t)1234, entry.tag);
+    EXPECT_EQ((uint8_t)56, entry.type);
+    EXPECT_EQ(NULL, entry.data.u8);
+    EXPECT_EQ((size_t)7890, entry.count);
+
+    IF_ALOGV() {
+        dump_camera_metadata(m, 0, 2);
+    }
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, add_too_much_data) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    int result;
+    size_t data_used = entry_capacity * calculate_camera_metadata_entry_data_size(
+        get_camera_metadata_tag_type(ANDROID_SENSOR_EXPOSURE_TIME), 1);
+    m = allocate_camera_metadata(entry_capacity + 1, data_used);
+
+
+    add_test_metadata(m, entry_capacity);
+
+    int64_t exposure_time = 12345;
+    result = add_camera_metadata_entry(m,
+            ANDROID_SENSOR_EXPOSURE_TIME,
+            &exposure_time, 1);
+    EXPECT_EQ(ERROR, result);
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, copy_metadata) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 50;
+    const size_t data_capacity = 450;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, entry_capacity);
+
+    size_t buf_size = get_camera_metadata_compact_size(m);
+    EXPECT_LT((size_t)0, buf_size);
+
+    uint8_t *buf = (uint8_t*)malloc(buf_size);
+    EXPECT_NOT_NULL(buf);
+
+    camera_metadata_t *m2 = copy_camera_metadata(buf, buf_size, m);
+    EXPECT_NOT_NULL(m2);
+    EXPECT_EQ(buf, (uint8_t*)m2);
+    EXPECT_EQ(get_camera_metadata_entry_count(m),
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_count(m),
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(get_camera_metadata_entry_capacity(m2),
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_capacity(m2),
+            get_camera_metadata_data_count(m2));
+
+    for (unsigned int i=0; i < get_camera_metadata_entry_count(m); i++) {
+        camera_metadata_entry e1, e2;
+        int result;
+        result = get_camera_metadata_entry(m, i, &e1);
+        EXPECT_EQ(OK, result);
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(e1.index, e2.index);
+        EXPECT_EQ(e1.tag, e2.tag);
+        EXPECT_EQ(e1.type, e2.type);
+        EXPECT_EQ(e1.count, e2.count);
+        for (unsigned int j=0;
+             j < e1.count * camera_metadata_type_size[e1.type];
+             j++) {
+            EXPECT_EQ(e1.data.u8[j], e2.data.u8[j]);
+        }
+    }
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m2, &buf_size));
+    free(buf);
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, copy_metadata_extraspace) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 12;
+    const size_t data_capacity = 100;
+
+    const size_t extra_space = 10;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, entry_capacity);
+
+    size_t buf_size = get_camera_metadata_compact_size(m);
+    EXPECT_LT((size_t)0, buf_size);
+    buf_size += extra_space;
+
+    uint8_t *buf = (uint8_t*)malloc(buf_size);
+    EXPECT_NOT_NULL(buf);
+
+    camera_metadata_t *m2 = copy_camera_metadata(buf, buf_size, m);
+    EXPECT_NOT_NULL(m2);
+    EXPECT_EQ(buf, (uint8_t*)m2);
+    EXPECT_EQ(get_camera_metadata_entry_count(m),
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_count(m),
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(get_camera_metadata_entry_capacity(m2),
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_capacity(m2),
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(buf + buf_size - extra_space,
+            (uint8_t*)m2 + get_camera_metadata_size(m2) );
+
+    for (unsigned int i=0; i < get_camera_metadata_entry_count(m); i++) {
+        camera_metadata_entry e1, e2;
+
+        int result;
+        result = get_camera_metadata_entry(m, i, &e1);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e1.index);
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(e1.index, e2.index);
+        EXPECT_EQ(e1.tag, e2.tag);
+        EXPECT_EQ(e1.type, e2.type);
+        EXPECT_EQ(e1.count, e2.count);
+        for (unsigned int j=0;
+             j < e1.count * camera_metadata_type_size[e1.type];
+             j++) {
+            EXPECT_EQ(e1.data.u8[j], e2.data.u8[j]);
+        }
+    }
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m2, &buf_size));
+    free(buf);
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, copy_metadata_nospace) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 50;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, entry_capacity);
+
+    size_t buf_size = get_camera_metadata_compact_size(m);
+    EXPECT_LT((size_t)0, buf_size);
+
+    buf_size--;
+
+    uint8_t *buf = (uint8_t*)malloc(buf_size);
+    EXPECT_NOT_NULL(buf);
+
+    camera_metadata_t *m2 = copy_camera_metadata(buf, buf_size, m);
+    EXPECT_NULL(m2);
+
+    free(buf);
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, append_metadata) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 50;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, entry_capacity);
+
+    camera_metadata_t *m2 = NULL;
+
+    m2 = allocate_camera_metadata(entry_capacity*2, data_capacity*2);
+    EXPECT_NOT_NULL(m2);
+
+    result = append_camera_metadata(m2, m);
+
+    EXPECT_EQ(OK, result);
+
+    EXPECT_EQ(get_camera_metadata_entry_count(m),
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_count(m),
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(entry_capacity*2, get_camera_metadata_entry_capacity(m2));
+    EXPECT_EQ(data_capacity*2,  get_camera_metadata_data_capacity(m2));
+
+    for (unsigned int i=0; i < get_camera_metadata_entry_count(m); i++) {
+        camera_metadata_entry e1, e2;
+        int result;
+        result = get_camera_metadata_entry(m, i, &e1);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e1.index);
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(e1.index, e2.index);
+        EXPECT_EQ(e1.tag, e2.tag);
+        EXPECT_EQ(e1.type, e2.type);
+        EXPECT_EQ(e1.count, e2.count);
+        for (unsigned int j=0;
+             j < e1.count * camera_metadata_type_size[e1.type];
+             j++) {
+            EXPECT_EQ(e1.data.u8[j], e2.data.u8[j]);
+        }
+    }
+
+    result = append_camera_metadata(m2, m);
+
+    EXPECT_EQ(OK, result);
+
+    EXPECT_EQ(get_camera_metadata_entry_count(m)*2,
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_count(m)*2,
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(entry_capacity*2, get_camera_metadata_entry_capacity(m2));
+    EXPECT_EQ(data_capacity*2,  get_camera_metadata_data_capacity(m2));
+
+    for (unsigned int i=0; i < get_camera_metadata_entry_count(m2); i++) {
+        camera_metadata_entry e1, e2;
+
+        int result;
+        result = get_camera_metadata_entry(m,
+                i % entry_capacity, &e1);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i % entry_capacity, e1.index);
+        result = get_camera_metadata_entry(m2,
+                i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(e1.tag, e2.tag);
+        EXPECT_EQ(e1.type, e2.type);
+        EXPECT_EQ(e1.count, e2.count);
+        for (unsigned int j=0;
+             j < e1.count * camera_metadata_type_size[e1.type];
+             j++) {
+            EXPECT_EQ(e1.data.u8[j], e2.data.u8[j]);
+        }
+    }
+
+    FINISH_USING_CAMERA_METADATA(m);
+    FINISH_USING_CAMERA_METADATA(m2);
+}
+
+TEST(camera_metadata, append_metadata_nospace) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 50;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, entry_capacity);
+
+    camera_metadata_t *m2 = NULL;
+
+    m2 = allocate_camera_metadata(entry_capacity-1, data_capacity);
+    EXPECT_NOT_NULL(m2);
+
+    result = append_camera_metadata(m2, m);
+
+    EXPECT_EQ(ERROR, result);
+    EXPECT_EQ((size_t)0, get_camera_metadata_entry_count(m2));
+    EXPECT_EQ((size_t)0, get_camera_metadata_data_count(m2));
+
+    FINISH_USING_CAMERA_METADATA(m);
+    FINISH_USING_CAMERA_METADATA(m2);
+}
+
+TEST(camera_metadata, append_metadata_onespace) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 50;
+    const size_t entry_capacity2 = entry_capacity * 2 - 2;
+    const size_t data_capacity2 = data_capacity * 2;
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, entry_capacity);
+
+    camera_metadata_t *m2 = NULL;
+
+    m2 = allocate_camera_metadata(entry_capacity2, data_capacity2);
+    EXPECT_NOT_NULL(m2);
+
+    result = append_camera_metadata(m2, m);
+
+    EXPECT_EQ(OK, result);
+
+    EXPECT_EQ(get_camera_metadata_entry_count(m),
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_count(m),
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(entry_capacity2, get_camera_metadata_entry_capacity(m2));
+    EXPECT_EQ(data_capacity2,  get_camera_metadata_data_capacity(m2));
+
+    for (unsigned int i=0; i < get_camera_metadata_entry_count(m); i++) {
+        camera_metadata_entry e1, e2;
+
+        int result;
+        result = get_camera_metadata_entry(m, i, &e1);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e1.index);
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(e1.index, e2.index);
+        EXPECT_EQ(e1.tag, e2.tag);
+        EXPECT_EQ(e1.type, e2.type);
+        EXPECT_EQ(e1.count, e2.count);
+        for (unsigned int j=0;
+             j < e1.count * camera_metadata_type_size[e1.type];
+             j++) {
+            EXPECT_EQ(e1.data.u8[j], e2.data.u8[j]);
+        }
+    }
+
+    result = append_camera_metadata(m2, m);
+
+    EXPECT_EQ(ERROR, result);
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_count(m),
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(entry_capacity2, get_camera_metadata_entry_capacity(m2));
+    EXPECT_EQ(data_capacity2,  get_camera_metadata_data_capacity(m2));
+
+    for (unsigned int i=0; i < get_camera_metadata_entry_count(m2); i++) {
+        camera_metadata_entry e1, e2;
+
+        int result;
+        result = get_camera_metadata_entry(m,
+                i % entry_capacity, &e1);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i % entry_capacity, e1.index);
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(e1.tag, e2.tag);
+        EXPECT_EQ(e1.type, e2.type);
+        EXPECT_EQ(e1.count, e2.count);
+        for (unsigned int j=0;
+             j < e1.count * camera_metadata_type_size[e1.type];
+             j++) {
+            EXPECT_EQ(e1.data.u8[j], e2.data.u8[j]);
+        }
+    }
+
+    FINISH_USING_CAMERA_METADATA(m);
+    FINISH_USING_CAMERA_METADATA(m2);
+}
+
+TEST(camera_metadata, vendor_tags) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 50;
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    uint8_t superMode = 5;
+    result = add_camera_metadata_entry(m,
+            FAKEVENDOR_SENSOR_SUPERMODE,
+            &superMode, 1);
+    EXPECT_EQ(ERROR, result);
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    result = add_camera_metadata_entry(m,
+            ANDROID_REQUEST_METADATA_MODE,
+            &superMode, 1);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    EXPECT_NULL(get_camera_metadata_section_name(FAKEVENDOR_SENSOR_SUPERMODE));
+    EXPECT_NULL(get_camera_metadata_tag_name(FAKEVENDOR_SENSOR_SUPERMODE));
+    EXPECT_EQ(-1, get_camera_metadata_tag_type(FAKEVENDOR_SENSOR_SUPERMODE));
+
+    set_camera_metadata_vendor_ops(&fakevendor_ops);
+
+    result = add_camera_metadata_entry(m,
+            FAKEVENDOR_SENSOR_SUPERMODE,
+            &superMode, 1);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    result = add_camera_metadata_entry(m,
+            ANDROID_REQUEST_METADATA_MODE,
+            &superMode, 1);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    result = add_camera_metadata_entry(m,
+            FAKEVENDOR_SCALER_END,
+            &superMode, 1);
+    EXPECT_EQ(ERROR, result);
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    EXPECT_STREQ("com.fakevendor.sensor",
+            get_camera_metadata_section_name(FAKEVENDOR_SENSOR_SUPERMODE));
+    EXPECT_STREQ("superMode",
+            get_camera_metadata_tag_name(FAKEVENDOR_SENSOR_SUPERMODE));
+    EXPECT_EQ(TYPE_BYTE,
+            get_camera_metadata_tag_type(FAKEVENDOR_SENSOR_SUPERMODE));
+
+    EXPECT_STREQ("com.fakevendor.scaler",
+            get_camera_metadata_section_name(FAKEVENDOR_SCALER_END));
+    EXPECT_NULL(get_camera_metadata_tag_name(FAKEVENDOR_SCALER_END));
+    EXPECT_EQ(-1, get_camera_metadata_tag_type(FAKEVENDOR_SCALER_END));
+
+    set_camera_metadata_vendor_ops(NULL);
+    // TODO: fix vendor ops. Then the below 3 validations should fail.
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    result = add_camera_metadata_entry(m,
+            FAKEVENDOR_SENSOR_SUPERMODE,
+            &superMode, 1);
+    EXPECT_EQ(ERROR, result);
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    result = add_camera_metadata_entry(m,
+            ANDROID_REQUEST_METADATA_MODE,
+            &superMode, 1);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m, NULL));
+
+    EXPECT_NULL(get_camera_metadata_section_name(FAKEVENDOR_SENSOR_SUPERMODE));
+    EXPECT_NULL(get_camera_metadata_tag_name(FAKEVENDOR_SENSOR_SUPERMODE));
+    EXPECT_EQ(-1, get_camera_metadata_tag_type(FAKEVENDOR_SENSOR_SUPERMODE));
+
+    // Remove all vendor entries so validation passes
+    {
+        camera_metadata_ro_entry_t entry;
+        EXPECT_EQ(OK, find_camera_metadata_ro_entry(m,
+                                                    FAKEVENDOR_SENSOR_SUPERMODE,
+                                                    &entry));
+        EXPECT_EQ(OK, delete_camera_metadata_entry(m, entry.index));
+    }
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, add_all_tags) {
+    int total_tag_count = 0;
+    for (int i = 0; i < ANDROID_SECTION_COUNT; i++) {
+        total_tag_count += camera_metadata_section_bounds[i][1] -
+                camera_metadata_section_bounds[i][0];
+    }
+    int entry_data_count = 3;
+    int conservative_data_space = total_tag_count * entry_data_count * 8;
+    uint8_t data[entry_data_count * 8];
+    int32_t *data_int32 = (int32_t *)data;
+    float *data_float   = (float *)data;
+    int64_t *data_int64 = (int64_t *)data;
+    double *data_double = (double *)data;
+    camera_metadata_rational_t *data_rational =
+            (camera_metadata_rational_t *)data;
+
+    camera_metadata_t *m = allocate_camera_metadata(total_tag_count,
+            conservative_data_space);
+
+    ASSERT_NE((void*)NULL, (void*)m);
+
+    int result;
+
+    int counter = 0;
+    for (int i = 0; i < ANDROID_SECTION_COUNT; i++) {
+        for (uint32_t tag = camera_metadata_section_bounds[i][0];
+                tag < camera_metadata_section_bounds[i][1];
+             tag++, counter++) {
+            int type = get_camera_metadata_tag_type(tag);
+            ASSERT_NE(-1, type);
+
+            switch (type) {
+                case TYPE_BYTE:
+                    data[0] = tag & 0xFF;
+                    data[1] = (tag >> 8) & 0xFF;
+                    data[2] = (tag >> 16) & 0xFF;
+                    break;
+                case TYPE_INT32:
+                    data_int32[0] = tag;
+                    data_int32[1] = i;
+                    data_int32[2] = counter;
+                    break;
+                case TYPE_FLOAT:
+                    data_float[0] = tag;
+                    data_float[1] = i;
+                    data_float[2] = counter / (float)total_tag_count;
+                    break;
+                case TYPE_INT64:
+                    data_int64[0] = (int64_t)tag | ( (int64_t)tag << 32);
+                    data_int64[1] = i;
+                    data_int64[2] = counter;
+                    break;
+                case TYPE_DOUBLE:
+                    data_double[0] = tag;
+                    data_double[1] = i;
+                    data_double[2] = counter / (double)total_tag_count;
+                    break;
+                case TYPE_RATIONAL:
+                    data_rational[0].numerator = tag;
+                    data_rational[0].denominator = 1;
+                    data_rational[1].numerator = i;
+                    data_rational[1].denominator = 1;
+                    data_rational[2].numerator = counter;
+                    data_rational[2].denominator = total_tag_count;
+                    break;
+                default:
+                    FAIL() << "Unknown type field encountered:" << type;
+                    break;
+            }
+            result = add_camera_metadata_entry(m,
+                    tag,
+                    data,
+                    entry_data_count);
+            ASSERT_EQ(OK, result);
+
+        }
+    }
+
+    IF_ALOGV() {
+        dump_camera_metadata(m, 0, 2);
+    }
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, sort_metadata) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 5;
+    const size_t data_capacity = 100;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    // Add several unique entries in non-sorted order
+
+    camera_metadata_rational_t colorTransform[] = {
+        {9, 10}, {0, 1}, {0, 1},
+        {1, 5}, {1, 2}, {0, 1},
+        {0, 1}, {1, 10}, {7, 10}
+    };
+    result = add_camera_metadata_entry(m,
+            ANDROID_COLOR_CORRECTION_TRANSFORM,
+            colorTransform, ARRAY_SIZE(colorTransform));
+    EXPECT_EQ(OK, result);
+
+    float focus_distance = 0.5f;
+    result = add_camera_metadata_entry(m,
+            ANDROID_LENS_FOCUS_DISTANCE,
+            &focus_distance, 1);
+    EXPECT_EQ(OK, result);
+
+    int64_t exposure_time = 1000000000;
+    result = add_camera_metadata_entry(m,
+            ANDROID_SENSOR_EXPOSURE_TIME,
+            &exposure_time, 1);
+    EXPECT_EQ(OK, result);
+
+    int32_t sensitivity = 800;
+    result = add_camera_metadata_entry(m,
+            ANDROID_SENSOR_SENSITIVITY,
+            &sensitivity, 1);
+    EXPECT_EQ(OK, result);
+
+    // Test unsorted find
+    camera_metadata_entry_t entry;
+    result = find_camera_metadata_entry(m,
+            ANDROID_LENS_FOCUS_DISTANCE,
+            &entry);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(ANDROID_LENS_FOCUS_DISTANCE, entry.tag);
+    EXPECT_EQ((size_t)1, entry.index);
+    EXPECT_EQ(TYPE_FLOAT, entry.type);
+    EXPECT_EQ((size_t)1, entry.count);
+    EXPECT_EQ(focus_distance, *entry.data.f);
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_NOISE_REDUCTION_STRENGTH,
+            &entry);
+    EXPECT_EQ(NOT_FOUND, result);
+    EXPECT_EQ((size_t)1, entry.index);
+    EXPECT_EQ(ANDROID_LENS_FOCUS_DISTANCE, entry.tag);
+    EXPECT_EQ(TYPE_FLOAT, entry.type);
+    EXPECT_EQ((size_t)1, entry.count);
+    EXPECT_EQ(focus_distance, *entry.data.f);
+
+    // Sort
+    IF_ALOGV() {
+        std::cout << "Pre-sorted metadata" << std::endl;
+        dump_camera_metadata(m, 0, 2);
+    }
+
+    result = sort_camera_metadata(m);
+    EXPECT_EQ(OK, result);
+
+    IF_ALOGV() {
+        std::cout << "Sorted metadata" << std::endl;
+        dump_camera_metadata(m, 0, 2);
+    }
+
+    // Test sorted find
+    size_t lensFocusIndex = -1;
+    {
+        std::vector<uint32_t> tags;
+        tags.push_back(ANDROID_COLOR_CORRECTION_TRANSFORM);
+        tags.push_back(ANDROID_LENS_FOCUS_DISTANCE);
+        tags.push_back(ANDROID_SENSOR_EXPOSURE_TIME);
+        tags.push_back(ANDROID_SENSOR_SENSITIVITY);
+        std::sort(tags.begin(), tags.end());
+
+        lensFocusIndex =
+            std::find(tags.begin(), tags.end(), ANDROID_LENS_FOCUS_DISTANCE)
+            - tags.begin();
+    }
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_LENS_FOCUS_DISTANCE,
+            &entry);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ(lensFocusIndex, entry.index);
+    EXPECT_EQ(ANDROID_LENS_FOCUS_DISTANCE, entry.tag);
+    EXPECT_EQ(TYPE_FLOAT, entry.type);
+    EXPECT_EQ((size_t)1, (size_t)entry.count);
+    EXPECT_EQ(focus_distance, *entry.data.f);
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_NOISE_REDUCTION_STRENGTH,
+            &entry);
+    EXPECT_EQ(NOT_FOUND, result);
+    EXPECT_EQ(lensFocusIndex, entry.index);
+    EXPECT_EQ(ANDROID_LENS_FOCUS_DISTANCE, entry.tag);
+    EXPECT_EQ(TYPE_FLOAT, entry.type);
+    EXPECT_EQ((size_t)1, entry.count);
+    EXPECT_EQ(focus_distance, *entry.data.f);
+
+
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, delete_metadata) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 50;
+    const size_t data_capacity = 450;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    size_t num_entries = 5;
+    size_t data_per_entry =
+            calculate_camera_metadata_entry_data_size(TYPE_INT64, 1);
+    size_t num_data = num_entries * data_per_entry;
+
+    // Delete an entry with data
+
+    add_test_metadata(m, num_entries);
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+
+    result = delete_camera_metadata_entry(m, 1);
+    EXPECT_EQ(OK, result);
+    num_entries--;
+    num_data -= data_per_entry;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    result = delete_camera_metadata_entry(m, 4);
+    EXPECT_EQ(ERROR, result);
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    for (size_t i = 0; i < num_entries; i++) {
+        camera_metadata_entry e;
+        result = get_camera_metadata_entry(m, i, &e);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+        EXPECT_EQ(TYPE_INT64, e.type);
+        int64_t exposureTime = i < 1 ? 100 : 200 + 100 * i;
+        EXPECT_EQ(exposureTime, *e.data.i64);
+    }
+
+    // Delete an entry with no data, at end of array
+
+    int32_t frameCount = 12;
+    result = add_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT,
+            &frameCount, 1);
+    EXPECT_EQ(OK, result);
+    num_entries++;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    camera_metadata_entry e;
+    result = get_camera_metadata_entry(m, 4, &e);
+    EXPECT_EQ(OK, result);
+
+    EXPECT_EQ((size_t)4, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(frameCount, *e.data.i32);
+
+    result = delete_camera_metadata_entry(m, 4);
+    EXPECT_EQ(OK, result);
+
+    num_entries--;
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    result = delete_camera_metadata_entry(m, 4);
+    EXPECT_EQ(ERROR, result);
+
+    result = get_camera_metadata_entry(m, 4, &e);
+    EXPECT_EQ(ERROR, result);
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    // Delete with extra data on end of array
+    result = delete_camera_metadata_entry(m, 3);
+    EXPECT_EQ(OK, result);
+    num_entries--;
+    num_data -= data_per_entry;
+
+    for (size_t i = 0; i < num_entries; i++) {
+        camera_metadata_entry e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = i < 1 ? 100 : 200 + 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Delete without extra data in front of array
+
+    frameCount = 1001;
+    result = add_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT,
+            &frameCount, 1);
+    EXPECT_EQ(OK, result);
+    num_entries++;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    result = sort_camera_metadata(m);
+    EXPECT_EQ(OK, result);
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(frameCount, *e.data.i32);
+
+    result = delete_camera_metadata_entry(m, e.index);
+    EXPECT_EQ(OK, result);
+    num_entries--;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    for (size_t i = 0; i < num_entries; i++) {
+        camera_metadata_entry e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = i < 1 ? 100 : 200 + 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+}
+
+TEST(camera_metadata, update_metadata) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 50;
+    const size_t data_capacity = 450;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    size_t num_entries = 5;
+    size_t data_per_entry =
+            calculate_camera_metadata_entry_data_size(TYPE_INT64, 1);
+    size_t num_data = num_entries * data_per_entry;
+
+    add_test_metadata(m, num_entries);
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+
+    // Update with same-size data, doesn't fit in entry
+
+    int64_t newExposureTime = 1000;
+    camera_metadata_entry_t e;
+    result = update_camera_metadata_entry(m,
+            0, &newExposureTime, 1, &e);
+    EXPECT_EQ(OK, result);
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newExposureTime, *e.data.i64);
+
+    e.count = 0;
+    result = get_camera_metadata_entry(m,
+            0, &e);
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newExposureTime, *e.data.i64);
+
+    for (size_t i = 1; i < num_entries; i++) {
+        camera_metadata_entry e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 + 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Update with larger data
+    int64_t newExposures[2] = { 5000, 6000 };
+    result = update_camera_metadata_entry(m,
+            0, newExposures, 2, &e);
+    EXPECT_EQ(OK, result);
+    num_data += data_per_entry;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)2, e.count);
+    EXPECT_EQ(newExposures[0], e.data.i64[0]);
+    EXPECT_EQ(newExposures[1], e.data.i64[1]);
+
+    e.count = 0;
+    result = get_camera_metadata_entry(m,
+            0, &e);
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)2, e.count);
+    EXPECT_EQ(newExposures[0], e.data.i64[0]);
+    EXPECT_EQ(newExposures[1], e.data.i64[1]);
+
+    for (size_t i = 1; i < num_entries; i++) {
+        camera_metadata_entry e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 + 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Update with smaller data
+    newExposureTime = 100;
+    result = update_camera_metadata_entry(m,
+            0, &newExposureTime, 1, &e);
+    EXPECT_EQ(OK, result);
+
+    num_data -= data_per_entry;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newExposureTime, *e.data.i64);
+
+    e.count = 0;
+    result = get_camera_metadata_entry(m,
+            0, &e);
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newExposureTime, *e.data.i64);
+
+    for (size_t i = 1; i < num_entries; i++) {
+        camera_metadata_entry e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 + 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Update with size fitting in entry
+
+    int32_t frameCount = 1001;
+    result = add_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT,
+            &frameCount, 1);
+    EXPECT_EQ(OK, result);
+    num_entries++;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(entry_capacity, get_camera_metadata_entry_capacity(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+    EXPECT_EQ(data_capacity, get_camera_metadata_data_capacity(m));
+
+    result = sort_camera_metadata(m);
+    EXPECT_EQ(OK, result);
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(frameCount, *e.data.i32);
+
+    int32_t newFrameCount = 0x12349876;
+    result = update_camera_metadata_entry(m,
+            0, &newFrameCount, 1, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    for (size_t i = 1; i < num_entries; i++) {
+        camera_metadata_entry e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Update to bigger than entry
+
+    int32_t newFrameCounts[4] = { 0x0, 0x1, 0x10, 0x100 };
+
+    result = update_camera_metadata_entry(m,
+            0, &newFrameCounts, 4, &e);
+
+    EXPECT_EQ(OK, result);
+
+    num_data += calculate_camera_metadata_entry_data_size(TYPE_INT32,
+            4);
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)4, e.count);
+    EXPECT_EQ(newFrameCounts[0], e.data.i32[0]);
+    EXPECT_EQ(newFrameCounts[1], e.data.i32[1]);
+    EXPECT_EQ(newFrameCounts[2], e.data.i32[2]);
+    EXPECT_EQ(newFrameCounts[3], e.data.i32[3]);
+
+    e.count = 0;
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)4, e.count);
+    EXPECT_EQ(newFrameCounts[0], e.data.i32[0]);
+    EXPECT_EQ(newFrameCounts[1], e.data.i32[1]);
+    EXPECT_EQ(newFrameCounts[2], e.data.i32[2]);
+    EXPECT_EQ(newFrameCounts[3], e.data.i32[3]);
+
+    for (size_t i = 1; i < num_entries; i++) {
+        camera_metadata_entry e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Update to smaller than entry
+    result = update_camera_metadata_entry(m,
+            0, &newFrameCount, 1, &e);
+
+    EXPECT_EQ(OK, result);
+
+    num_data -= camera_metadata_type_size[TYPE_INT32] * 4;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    result = find_camera_metadata_entry(m,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    for (size_t i = 1; i < num_entries; i++) {
+        camera_metadata_entry_t e2;
+        result = get_camera_metadata_entry(m, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Setup new buffer with no spare data space
+
+    result = update_camera_metadata_entry(m,
+            1, newExposures, 2, &e);
+    EXPECT_EQ(OK, result);
+
+    num_data += data_per_entry;
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m));
+
+    EXPECT_EQ((size_t)1, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)2, e.count);
+    EXPECT_EQ(newExposures[0], e.data.i64[0]);
+    EXPECT_EQ(newExposures[1], e.data.i64[1]);
+
+    camera_metadata_t *m2;
+    m2 = allocate_camera_metadata(get_camera_metadata_entry_count(m),
+            get_camera_metadata_data_count(m));
+    EXPECT_NOT_NULL(m2);
+
+    result = append_camera_metadata(m2, m);
+    EXPECT_EQ(OK, result);
+
+    result = find_camera_metadata_entry(m2,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    // Update when there's no more room
+
+    result = update_camera_metadata_entry(m2,
+            0, &newFrameCounts, 4, &e);
+    EXPECT_EQ(ERROR, result);
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m2));
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    // Update when there's no data room, but change fits into entry
+
+    newFrameCount = 5;
+    result = update_camera_metadata_entry(m2,
+            0, &newFrameCount, 1, &e);
+    EXPECT_EQ(OK, result);
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m2));
+
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    result = find_camera_metadata_entry(m2,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    result = get_camera_metadata_entry(m2, 1, &e);
+    EXPECT_EQ((size_t)1, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)2, e.count);
+    EXPECT_EQ(newExposures[0], e.data.i64[0]);
+    EXPECT_EQ(newExposures[1], e.data.i64[1]);
+
+    for (size_t i = 2; i < num_entries; i++) {
+        camera_metadata_entry_t e2;
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Update when there's no data room, but data size doesn't change
+
+    newExposures[0] = 1000;
+
+    result = update_camera_metadata_entry(m2,
+            1, newExposures, 2, &e);
+    EXPECT_EQ(OK, result);
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m2));
+
+    EXPECT_EQ((size_t)1, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)2, e.count);
+    EXPECT_EQ(newExposures[0], e.data.i64[0]);
+    EXPECT_EQ(newExposures[1], e.data.i64[1]);
+
+    result = find_camera_metadata_entry(m2,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    for (size_t i = 2; i < num_entries; i++) {
+        camera_metadata_entry_t e2;
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+    // Update when there's no data room, but data size shrinks
+
+    result = update_camera_metadata_entry(m2,
+            1, &newExposureTime, 1, &e);
+    EXPECT_EQ(OK, result);
+
+    num_data -= calculate_camera_metadata_entry_data_size(TYPE_INT64, 2);
+    num_data += calculate_camera_metadata_entry_data_size(TYPE_INT64, 1);
+
+    EXPECT_EQ(num_entries, get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(num_data, get_camera_metadata_data_count(m2));
+
+    EXPECT_EQ((size_t)1, e.index);
+    EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e.tag);
+    EXPECT_EQ(TYPE_INT64, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newExposureTime, e.data.i64[0]);
+
+    result = find_camera_metadata_entry(m2,
+            ANDROID_REQUEST_FRAME_COUNT, &e);
+
+    EXPECT_EQ(OK, result);
+    EXPECT_EQ((size_t)0, e.index);
+    EXPECT_EQ(ANDROID_REQUEST_FRAME_COUNT, e.tag);
+    EXPECT_EQ(TYPE_INT32, e.type);
+    EXPECT_EQ((size_t)1, e.count);
+    EXPECT_EQ(newFrameCount, *e.data.i32);
+
+    for (size_t i = 2; i < num_entries; i++) {
+        camera_metadata_entry_t e2;
+        result = get_camera_metadata_entry(m2, i, &e2);
+        EXPECT_EQ(OK, result);
+        EXPECT_EQ(i, e2.index);
+        EXPECT_EQ(ANDROID_SENSOR_EXPOSURE_TIME, e2.tag);
+        EXPECT_EQ(TYPE_INT64, e2.type);
+        int64_t exposureTime = 100 * i;
+        EXPECT_EQ(exposureTime, *e2.data.i64);
+    }
+
+}
+
+TEST(camera_metadata, memcpy) {
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 50;
+    const size_t data_capacity = 450;
+
+    int result;
+
+    m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+    add_test_metadata(m, 5);
+
+    size_t m_size = get_camera_metadata_size(m);
+    uint8_t *dst = new uint8_t[m_size];
+
+    memcpy(dst, m, m_size);
+
+    camera_metadata_t *m2 = reinterpret_cast<camera_metadata_t*>(dst);
+
+    ASSERT_EQ(get_camera_metadata_size(m),
+            get_camera_metadata_size(m2));
+    EXPECT_EQ(get_camera_metadata_compact_size(m),
+            get_camera_metadata_compact_size(m2));
+    ASSERT_EQ(get_camera_metadata_entry_count(m),
+            get_camera_metadata_entry_count(m2));
+    EXPECT_EQ(get_camera_metadata_entry_capacity(m),
+            get_camera_metadata_entry_capacity(m2));
+    EXPECT_EQ(get_camera_metadata_data_count(m),
+            get_camera_metadata_data_count(m2));
+    EXPECT_EQ(get_camera_metadata_data_capacity(m),
+            get_camera_metadata_data_capacity(m2));
+
+    camera_metadata_entry_t e1, e2;
+    for (size_t i = 0; i < get_camera_metadata_entry_count(m); i++) {
+        result = get_camera_metadata_entry(m, i, &e1);
+        ASSERT_EQ(OK, result);
+        result = get_camera_metadata_entry(m2, i, &e2);
+        ASSERT_EQ(OK, result);
+
+        EXPECT_EQ(e1.index, e2.index);
+        EXPECT_EQ(e1.tag, e2.tag);
+        ASSERT_EQ(e1.type, e2.type);
+        ASSERT_EQ(e1.count, e2.count);
+
+        ASSERT_TRUE(!memcmp(e1.data.u8, e2.data.u8,
+                        camera_metadata_type_size[e1.type] * e1.count));
+    }
+
+    // Make sure updating one metadata buffer doesn't change the other
+
+    int64_t double_exposure_time[] = { 100, 200 };
+
+    result = update_camera_metadata_entry(m, 0,
+            double_exposure_time,
+            sizeof(double_exposure_time)/sizeof(int64_t), NULL);
+    EXPECT_EQ(OK, result);
+
+    result = get_camera_metadata_entry(m, 0, &e1);
+    ASSERT_EQ(OK, result);
+    result = get_camera_metadata_entry(m2, 0, &e2);
+    ASSERT_EQ(OK, result);
+
+    EXPECT_EQ(e1.index, e2.index);
+    EXPECT_EQ(e1.tag, e2.tag);
+    ASSERT_EQ(e1.type, e2.type);
+    ASSERT_EQ((size_t)2, e1.count);
+    ASSERT_EQ((size_t)1, e2.count);
+    EXPECT_EQ(100, e1.data.i64[0]);
+    EXPECT_EQ(200, e1.data.i64[1]);
+    EXPECT_EQ(100, e2.data.i64[0]);
+
+    // And in the reverse direction as well
+
+    double_exposure_time[0] = 300;
+    result = update_camera_metadata_entry(m2, 0,
+            double_exposure_time,
+            sizeof(double_exposure_time)/sizeof(int64_t), NULL);
+    EXPECT_EQ(OK, result);
+
+    result = get_camera_metadata_entry(m, 0, &e1);
+    ASSERT_EQ(OK, result);
+    result = get_camera_metadata_entry(m2, 0, &e2);
+    ASSERT_EQ(OK, result);
+
+    EXPECT_EQ(e1.index, e2.index);
+    EXPECT_EQ(e1.tag, e2.tag);
+    ASSERT_EQ(e1.type, e2.type);
+    ASSERT_EQ((size_t)2, e1.count);
+    ASSERT_EQ((size_t)2, e2.count);
+    EXPECT_EQ(100, e1.data.i64[0]);
+    EXPECT_EQ(200, e1.data.i64[1]);
+    EXPECT_EQ(300, e2.data.i64[0]);
+    EXPECT_EQ(200, e2.data.i64[1]);
+
+    EXPECT_EQ(OK, validate_camera_metadata_structure(m2, &m_size));
+
+    delete[] dst;
+    FINISH_USING_CAMERA_METADATA(m);
+}
+
+TEST(camera_metadata, data_alignment) {
+    // Verify that when we store the data, the data aligned as we expect
+    camera_metadata_t *m = NULL;
+    const size_t entry_capacity = 50;
+    const size_t data_capacity = 450;
+    char dummy_data[data_capacity] = {0,};
+
+    int m_types[] = {
+        TYPE_BYTE,
+        TYPE_INT32,
+        TYPE_FLOAT,
+        TYPE_INT64,
+        TYPE_DOUBLE,
+        TYPE_RATIONAL
+    };
+    const size_t (&m_type_sizes)[NUM_TYPES] = camera_metadata_type_size;
+    size_t m_type_align[] = {
+        _Alignas(uint8_t),                    // BYTE
+        _Alignas(int32_t),                    // INT32
+        _Alignas(float),                      // FLOAT
+        _Alignas(int64_t),                    // INT64
+        _Alignas(double),                     // DOUBLE
+        _Alignas(camera_metadata_rational_t), // RATIONAL
+    };
+    /* arbitrary tags. the important thing is that their type
+       corresponds to m_type_sizes[i]
+       */
+    int m_type_tags[] = {
+        ANDROID_REQUEST_TYPE,
+        ANDROID_REQUEST_ID,
+        ANDROID_LENS_FOCUS_DISTANCE,
+        ANDROID_SENSOR_EXPOSURE_TIME,
+        ANDROID_JPEG_GPS_COORDINATES,
+        ANDROID_CONTROL_AE_COMPENSATION_STEP
+    };
+
+    /*
+    if the asserts fail, its because we added more types.
+        this means the test should be updated to include more types.
+    */
+    ASSERT_EQ((size_t)NUM_TYPES, sizeof(m_types)/sizeof(m_types[0]));
+    ASSERT_EQ((size_t)NUM_TYPES, sizeof(m_type_align)/sizeof(m_type_align[0]));
+    ASSERT_EQ((size_t)NUM_TYPES, sizeof(m_type_tags)/sizeof(m_type_tags[0]));
+
+    for (int m_type = 0; m_type < (int)NUM_TYPES; ++m_type) {
+
+        ASSERT_EQ(m_types[m_type],
+            get_camera_metadata_tag_type(m_type_tags[m_type]));
+
+        // misalignment possibilities are [0,type_size) for any type pointer
+        for (size_t i = 0; i < m_type_sizes[m_type]; ++i) {
+
+            /* data_count = 1, we may store data in the index.
+               data_count = 10, we will store data separately
+             */
+            for (int data_count = 1; data_count <= 10; data_count += 9) {
+
+                m = allocate_camera_metadata(entry_capacity, data_capacity);
+
+                // add dummy data to test various different padding requirements
+                ASSERT_EQ(OK,
+                    add_camera_metadata_entry(m,
+                                              m_type_tags[TYPE_BYTE],
+                                              &dummy_data[0],
+                                              data_count + i));
+                // insert the type we care to test
+                ASSERT_EQ(OK,
+                    add_camera_metadata_entry(m, m_type_tags[m_type],
+                                             &dummy_data[0], data_count));
+
+                // now check the alignment for our desired type. it should be ok
+                camera_metadata_ro_entry_t entry = camera_metadata_ro_entry_t();
+                ASSERT_EQ(OK,
+                    find_camera_metadata_ro_entry(m, m_type_tags[m_type],
+                                                 &entry));
+
+                void* data_ptr = (void*)entry.data.u8;
+                void* aligned_ptr = (void*)((uintptr_t)data_ptr & ~(m_type_align[m_type] - 1));
+                EXPECT_EQ(aligned_ptr, data_ptr) <<
+                    "Wrong alignment for type " <<
+                    camera_metadata_type_names[m_type] <<
+                    " with " << (data_count + i) << " dummy bytes and " <<
+                    " data_count " << data_count <<
+                    " expected alignment was: " << m_type_align[m_type];
+
+                FINISH_USING_CAMERA_METADATA(m);
+            }
+        }
+    }
+}
diff --git a/media/camera/tests/camera_metadata_tests_fake_vendor.h b/media/camera/tests/camera_metadata_tests_fake_vendor.h
new file mode 100644
index 0000000..cdd219e
--- /dev/null
+++ b/media/camera/tests/camera_metadata_tests_fake_vendor.h
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Fake vendor extensions for testing
+ */
+
+#ifndef TESTING_CAMERA_METADATA_FAKEVENDOR_H
+#define TESTING_CAMERA_METADATA_FAKEVENDOR_H
+
+#include <stdint.h>
+
+#include <system/camera_metadata.h>
+#include <system/camera_vendor_tags.h>
+
+enum vendor_extension_section {
+    FAKEVENDOR_SENSOR = VENDOR_SECTION,
+    FAKEVENDOR_SENSOR_INFO,
+    FAKEVENDOR_COLORCORRECTION,
+    FAKEVENDOR_SCALER,
+    FAKEVENDOR_SECTION_END
+};
+
+const int FAKEVENDOR_SECTION_COUNT = FAKEVENDOR_SECTION_END - VENDOR_SECTION;
+
+enum vendor_extension_section_ranges {
+    FAKEVENDOR_SENSOR_START          = FAKEVENDOR_SENSOR << 16,
+    FAKEVENDOR_SENSOR_I_START        = FAKEVENDOR_SENSOR_INFO << 16,
+    FAKEVENDOR_COLORCORRECTION_START = FAKEVENDOR_COLORCORRECTION << 16,
+    FAKEVENDOR_SCALER_START          = FAKEVENDOR_SCALER << 16
+};
+
+enum vendor_extension_tags {
+    FAKEVENDOR_SENSOR_SUPERMODE = FAKEVENDOR_SENSOR_START,
+    FAKEVENDOR_SENSOR_DOUBLE_EXPOSURE,
+    FAKEVENDOR_SENSOR_END,
+
+    FAKEVENDOR_SENSOR_AVAILABLE_SUPERMODES = FAKEVENDOR_SENSOR_I_START,
+    FAKEVENDOR_SENSOR_I_END,
+
+    FAKEVENDOR_COLORCORRECTION_3DLUT_MODE = FAKEVENDOR_COLORCORRECTION_START,
+    FAKEVENDOR_COLORCORRECTION_3DLUT_TABLES,
+    FAKEVENDOR_COLORCORRECTION_END,
+
+    FAKEVENDOR_SCALER_DOWNSCALE_MODE = FAKEVENDOR_SCALER_START,
+    FAKEVENDOR_SCALER_DOWNSCALE_COEFF,
+    FAKEVENDOR_SCALER_END
+};
+
+typedef struct vendor_tag_info {
+    const char *tag_name;
+    uint8_t     tag_type;
+} vendor_tag_info_t;
+
+const char *fakevendor_section_names[FAKEVENDOR_SECTION_COUNT] = {
+    "com.fakevendor.sensor",
+    "com.fakevendor.sensor.info",
+    "com.fakevendor.colorCorrection",
+    "com.fakevendor.scaler"
+};
+
+uint32_t fakevendor_section_bounds[FAKEVENDOR_SECTION_COUNT][2] = {
+    { (uint32_t) FAKEVENDOR_SENSOR_START,          (uint32_t) FAKEVENDOR_SENSOR_END },
+    { (uint32_t) FAKEVENDOR_SENSOR_I_START,        (uint32_t) FAKEVENDOR_SENSOR_I_END },
+    { (uint32_t) FAKEVENDOR_COLORCORRECTION_START, (uint32_t) FAKEVENDOR_COLORCORRECTION_END },
+    { (uint32_t) FAKEVENDOR_SCALER_START,          (uint32_t) FAKEVENDOR_SCALER_END}
+};
+
+vendor_tag_info_t fakevendor_sensor[FAKEVENDOR_SENSOR_END -
+        FAKEVENDOR_SENSOR_START] = {
+    { "superMode",       TYPE_BYTE },
+    { "doubleExposure",  TYPE_INT64 }
+};
+
+vendor_tag_info_t fakevendor_sensor_info[FAKEVENDOR_SENSOR_I_END -
+        FAKEVENDOR_SENSOR_I_START] = {
+    { "availableSuperModes",   TYPE_BYTE }
+};
+
+vendor_tag_info_t fakevendor_color_correction[FAKEVENDOR_COLORCORRECTION_END -
+        FAKEVENDOR_COLORCORRECTION_START] = {
+    { "3dLutMode",   TYPE_BYTE },
+    { "3dLutTables", TYPE_FLOAT }
+};
+
+vendor_tag_info_t fakevendor_scaler[FAKEVENDOR_SCALER_END -
+        FAKEVENDOR_SCALER_START] = {
+    { "downscaleMode",  TYPE_BYTE },
+    { "downscaleCoefficients", TYPE_FLOAT }
+};
+
+vendor_tag_info_t *fakevendor_tag_info[FAKEVENDOR_SECTION_COUNT] = {
+    fakevendor_sensor,
+    fakevendor_sensor_info,
+    fakevendor_color_correction,
+    fakevendor_scaler
+};
+
+const char *get_fakevendor_section_name(const vendor_tag_ops_t *v,
+        uint32_t tag);
+const char *get_fakevendor_tag_name(const vendor_tag_ops_t *v,
+        uint32_t tag);
+int get_fakevendor_tag_type(const vendor_tag_ops_t *v,
+        uint32_t tag);
+int get_fakevendor_tag_count(const vendor_tag_ops_t *v);
+void get_fakevendor_tags(const vendor_tag_ops_t *v, uint32_t *tag_array);
+
+static const vendor_tag_ops_t fakevendor_ops = {
+    get_fakevendor_tag_count,
+    get_fakevendor_tags,
+    get_fakevendor_section_name,
+    get_fakevendor_tag_name,
+    get_fakevendor_tag_type
+};
+
+const char *get_fakevendor_section_name(const vendor_tag_ops_t *v,
+        uint32_t tag) {
+    if (v != &fakevendor_ops) return NULL;
+    int tag_section = (tag >> 16) - VENDOR_SECTION;
+    if (tag_section < 0 ||
+            tag_section >= FAKEVENDOR_SECTION_COUNT) return NULL;
+
+    return fakevendor_section_names[tag_section];
+}
+
+const char *get_fakevendor_tag_name(const vendor_tag_ops_t *v,
+        uint32_t tag) {
+    if (v != &fakevendor_ops) return NULL;
+    int tag_section = (tag >> 16) - VENDOR_SECTION;
+    if (tag_section < 0
+            || tag_section >= FAKEVENDOR_SECTION_COUNT
+            || tag >= fakevendor_section_bounds[tag_section][1]) return NULL;
+    int tag_index = tag & 0xFFFF;
+    return fakevendor_tag_info[tag_section][tag_index].tag_name;
+}
+
+int get_fakevendor_tag_type(const vendor_tag_ops_t *v,
+        uint32_t tag) {
+    if (v != &fakevendor_ops) return -1;
+    int tag_section = (tag >> 16) - VENDOR_SECTION;
+    if (tag_section < 0
+            || tag_section >= FAKEVENDOR_SECTION_COUNT
+            || tag >= fakevendor_section_bounds[tag_section][1]) return -1;
+    int tag_index = tag & 0xFFFF;
+    return fakevendor_tag_info[tag_section][tag_index].tag_type;
+}
+
+int get_fakevendor_tag_count(const vendor_tag_ops_t *v) {
+    int section;
+    unsigned int start, end;
+    int count = 0;
+
+    if (v != &fakevendor_ops) return -1;
+    for (section = 0; section < FAKEVENDOR_SECTION_COUNT; section++) {
+        start = fakevendor_section_bounds[section][0];
+        end = fakevendor_section_bounds[section][1];
+        count += end - start;
+    }
+    return count;
+}
+
+void get_fakevendor_tags(const vendor_tag_ops_t *v, uint32_t *tag_array) {
+    int section;
+    unsigned int start, end, tag;
+
+    if (v != &fakevendor_ops || tag_array == NULL) return;
+    for (section = 0; section < FAKEVENDOR_SECTION_COUNT; section++) {
+        start = fakevendor_section_bounds[section][0];
+        end = fakevendor_section_bounds[section][1];
+        for (tag = start; tag < end; tag++) {
+            *tag_array++ = tag;
+        }
+    }
+}
+
+#endif
diff --git a/media/private/camera/include/camera_metadata_hidden.h b/media/private/camera/include/camera_metadata_hidden.h
new file mode 100644
index 0000000..c5e0a39
--- /dev/null
+++ b/media/private/camera/include/camera_metadata_hidden.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H
+#define SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H
+
+#include <system/camera_vendor_tags.h>
+
+/**
+ * Error codes returned by vendor tags ops operations. These are intended
+ * to be used by all framework code that uses the return values from the
+ * vendor operations object.
+ */
+#define VENDOR_SECTION_NAME_ERR   NULL
+#define VENDOR_TAG_NAME_ERR       NULL
+#define VENDOR_TAG_COUNT_ERR      (-1)
+#define VENDOR_TAG_TYPE_ERR       (-1)
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+/** **These are private functions for use only by the camera framework.** **/
+
+/**
+ * Set the global vendor tag operations object used to define vendor tag
+ * structure when parsing camera metadata with functions defined in
+ * system/media/camera/include/camera_metadata.h.
+ */
+ANDROID_API
+int set_camera_metadata_vendor_ops(const vendor_tag_ops_t *query_ops);
+
+#ifdef __cplusplus
+} /* extern "C" */
+#endif
+
+#endif /* SYSTEM_MEDIA_PRIVATE_INCLUDE_CAMERA_METADATA_HIDDEN_H */
diff --git a/media/private/radio/include/radio_metadata_hidden.h b/media/private/radio/include/radio_metadata_hidden.h
new file mode 100644
index 0000000..1b76aa0
--- /dev/null
+++ b/media/private/radio/include/radio_metadata_hidden.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_RADIO_METADATA_HIDDEN_H
+#define ANDROID_RADIO_METADATA_HIDDEN_H
+
+#include <stdbool.h>
+#include <system/radio.h>
+#include <system/radio_metadata.h>
+
+/* default size allocated for a metadata buffer in 32 bits units */
+#define RADIO_METADATA_DEFAULT_SIZE 64
+/* maximum size allocated for a metadata buffer in 32 bits units */
+#define RADIO_METADATA_MAX_SIZE (RADIO_METADATA_DEFAULT_SIZE << 12)
+
+/* meta data entry in a meta data buffer */
+typedef struct radio_metadata_entry {
+    radio_metadata_key_t    key;
+    radio_metadata_type_t   type;
+    unsigned int            size;
+    unsigned char           data[];
+} radio_metadata_entry_t;
+
+
+/**
+* meta data buffer layout:
+*
+*   |    <---  32 bit   --->    |
+*   |---------------------------|
+*   | channel                   |
+*   |---------------------------|
+*   | sub_channel               |
+*   |---------------------------|
+*   | size_int                  | total size in 32 bit units including header and index
+*   |---------------------------|
+*   | count                     | number of entries
+*   |---------------------------|<--+
+*   | first entry               |   |
+*   |                           |   |
+*   |---------------------------|<+ |
+*   | second entry              | | |
+*   |                           | | |
+*   |                           | | |
+*   |---------------------------| | |
+*   |     :                     | | |
+*   |---------------------------| | |       \
+*   | offset of next free space | | |       |
+*   |---------------------------| | |       |
+*   |     :                     | | |       |
+*   |---------------------------| | |       >  index
+*   | offset of second entry    |-+ |       |
+*   |---------------------------|   |       |
+*   | offset of first entry     |---+       |
+*   |---------------------------|           /
+*
+*   A radio meta data buffer is allocated with radio_metadata_allocate() and released with
+*   radio_metadata_deallocate().
+*   Meta data entries are added with radio_metadata_add_xxx() where xxx is int, text or raw.
+*   The buffer is allocated with a default size (RADIO_METADATA_DEFAULT_SIZE entries)
+*   by radio_metadata_allocate() and reallocated if needed by radio_metadata_add_xxx()
+*/
+
+/* Radio meta data buffer header */
+typedef struct radio_metadata_buffer {
+    unsigned int channel;       /* channel (frequency) this meta data is associated with */
+    unsigned int sub_channel;   /* sub channel this meta data is associated with */
+    unsigned int size_int;      /* Total size in 32 bit word units */
+    unsigned int count;         /* number of meta data entries */
+} radio_metadata_buffer_t;
+
+
+
+#endif  // ANDROID_RADIO_METADATA_HIDDEN_H
diff --git a/media/radio/include/system/radio_metadata.h b/media/radio/include/system/radio_metadata.h
new file mode 100644
index 0000000..01c0403
--- /dev/null
+++ b/media/radio/include/system/radio_metadata.h
@@ -0,0 +1,268 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_RADIO_METADATA_H
+#define ANDROID_RADIO_METADATA_H
+
+#include <stdbool.h>
+#include <cutils/compiler.h>
+#include <system/radio.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* maximum length for text metadata including NUL terminator */
+#define RADIO_METADATA_TEXT_LEN_MAX 1024
+
+/* radio meta data key values */
+enum {
+    RADIO_METADATA_KEY_INVALID      = -1,
+    RADIO_METADATA_KEY_RDS_PI       = 0,      /* RDS PI                 - text  */
+    RADIO_METADATA_KEY_RDS_PS       = 1,      /* RDS PS                 - text */
+    RADIO_METADATA_KEY_RDS_PTY      = 2,      /* RDS PTY                - int  */
+    RADIO_METADATA_KEY_RBDS_PTY     = 3,      /* RBDS PTY               - int  */
+    RADIO_METADATA_KEY_RDS_RT       = 4,      /* RDS RT                 - text  */
+    RADIO_METADATA_KEY_TITLE        = 5,      /* Song title             - text  */
+    RADIO_METADATA_KEY_ARTIST       = 6,      /* Artist name            - text  */
+    RADIO_METADATA_KEY_ALBUM        = 7,      /* Album name             - text  */
+    RADIO_METADATA_KEY_GENRE        = 8,      /* Musical genre          - text  */
+    RADIO_METADATA_KEY_ICON         = 9,      /* Station icon           - raw  */
+    RADIO_METADATA_KEY_ART          = 10,     /* Album art              - raw  */
+    RADIO_METADATA_KEY_MIN          = RADIO_METADATA_KEY_RDS_PI,
+    RADIO_METADATA_KEY_MAX          = RADIO_METADATA_KEY_ART,
+};
+typedef int radio_metadata_key_t;
+
+
+enum {
+    RADIO_METADATA_TYPE_INVALID    = -1,
+    RADIO_METADATA_TYPE_INT        = 0,      /* signed 32 bit integer  */
+    RADIO_METADATA_TYPE_TEXT       = 1,      /* text in UTF-8 format, NUL terminated.
+                                                RADIO_METADATA_TEXT_LEN_MAX length including NUL. */
+    RADIO_METADATA_TYPE_RAW        = 2,      /* raw binary data (icon or art) */
+};
+typedef int radio_metadata_type_t;
+
+/*
+ * Return the type of the meta data corresponding to the key specified
+ *
+ * arguments:
+ * - key: the meta data key.
+ *
+ * returns:
+ *  the meta data type corresponding to the key or RADIO_METADATA_TYPE_INVALID
+ */
+ANDROID_API
+radio_metadata_type_t radio_metadata_type_of_key(const radio_metadata_key_t key);
+
+/*
+ * Allocate a meta data buffer for use by radio HAL callback for RADIO_EVENT_TUNED and
+ * RADIO_EVENT_METADATA events.
+ *
+ * arguments:
+ * - metadata: the address where the allocate meta data buffer should be returned.
+ * - channel: channel (frequency) this meta data is associated with.
+ * - sub_channel: sub channel this meta data is associated with.
+ *
+ * returns:
+ *  0 if successfully allocated
+ *  -ENOMEM if meta data buffer cannot be allocated
+ */
+ANDROID_API
+int radio_metadata_allocate(radio_metadata_t **metadata,
+                            const unsigned int channel,
+                            const unsigned int sub_channel);
+
+/*
+ * De-allocate a meta data buffer.
+ *
+ * arguments:
+ * - metadata: the meta data buffer to be de-allocated.
+ */
+ANDROID_API
+void radio_metadata_deallocate(radio_metadata_t *metadata);
+
+/*
+ * Add an integer meta data to the buffer.
+ *
+ * arguments:
+ * - metadata: the address of the meta data buffer. I/O. the meta data can be modified if the
+ * buffer is re-allocated
+ * - key: the meta data key.
+ * - value: the meta data value.
+ *
+ * returns:
+ *  0 if successfully added
+ *  -EINVAL if the buffer passed is invalid or the key does not match an integer type
+ *  -ENOMEM if meta data buffer cannot be re-allocated
+ */
+ANDROID_API
+int radio_metadata_add_int(radio_metadata_t **metadata,
+                           const radio_metadata_key_t key,
+                           const int value);
+
+/*
+ * Add an text meta data to the buffer.
+ *
+ * arguments:
+ * - metadata: the address of the meta data buffer. I/O. the meta data can be modified if the
+ * buffer is re-allocated
+ * - key: the meta data key.
+ * - value: the meta data value.
+ *
+ * returns:
+ *  0 if successfully added
+ *  -EINVAL if the buffer passed is invalid or the key does not match a text type or text
+ *  is too long
+ *  -ENOMEM if meta data buffer cannot be re-allocated
+ */
+ANDROID_API
+int radio_metadata_add_text(radio_metadata_t **metadata,
+                            const radio_metadata_key_t key,
+                            const char *value);
+
+/*
+ * Add an raw meta data to the buffer.
+ *
+ * arguments:
+ * - metadata: the address of the meta data buffer. I/O. the meta data can be modified if the
+ * buffer is re-allocated
+ * - key: the meta data key.
+ * - value: the meta data value.
+ *
+ * returns:
+ *  0 if successfully added
+ *  -EINVAL if the buffer passed is invalid or the key does not match a raw type
+ *  -ENOMEM if meta data buffer cannot be re-allocated
+ */
+ANDROID_API
+int radio_metadata_add_raw(radio_metadata_t **metadata,
+                           const radio_metadata_key_t key,
+                           const unsigned char *value,
+                           const unsigned int size);
+
+/*
+ * add all meta data in source buffer to destinaiton buffer.
+ *
+ * arguments:
+ * - dst_metadata: the address of the destination meta data buffer. if *dst_metadata is NULL,
+ * a new buffer is created.
+ * - src_metadata: the source meta data buffer.
+ *
+ * returns:
+ *  0 if successfully added
+ *  -ENOMEM if meta data buffer cannot be re-allocated
+ */
+ANDROID_API
+int radio_metadata_add_metadata(radio_metadata_t **dst_metadata,
+                           radio_metadata_t *src_metadata);
+
+/*
+ * Perform sanity check on a meta data buffer.
+ *
+ * arguments:
+ * - metadata: the meta data buffer.
+ *
+ * returns:
+ *  0 if no error found
+ *  -EINVAL if a consistency problem is found in the meta data buffer
+ */
+ANDROID_API
+int radio_metadata_check(const radio_metadata_t *metadata);
+
+/*
+ * Return the total size used by the meta data buffer.
+ * No sanity check is performed on the meta data buffer.
+ *
+ * arguments:
+ * - metadata: the meta data buffer.
+ *
+ * returns:
+ *  0 if an invalid meta data buffer is passed
+ *  the size in bytes otherwise
+ */
+ANDROID_API
+size_t radio_metadata_get_size(const radio_metadata_t *metadata);
+
+/*
+ * Return the number of meta data entries in the buffer.
+ * No sanity check is performed on the meta data buffer.
+ *
+ * arguments:
+ * - metadata: the meta data buffer.
+ *
+ * returns:
+ *  -EINVAL if an invalid meta data buffer is passed
+ *  the number of entries otherwise
+ */
+ANDROID_API
+int radio_metadata_get_count(const radio_metadata_t *metadata);
+
+/*
+ * Get a meta data at a specified index. Used to parse a meta data buffer.
+ * No sanity check is performed on the meta data buffer.
+ *
+ * arguments:
+ * - metadata: the meta data buffer.
+ * - index: the index to read from
+ * - key: where the meta data key should be returned
+ * - type: where the meta data type should be returned
+ * - value: where the address of the meta data value should be returned
+ * - size: where the size of the meta data value should be returned
+ *
+ * returns:
+ *  -EINVAL if an invalid argument is passed
+ *  0 otherwise
+ */
+ANDROID_API
+int radio_metadata_get_at_index(const radio_metadata_t *metadata,
+                                const unsigned int index,
+                                radio_metadata_key_t *key,
+                                radio_metadata_type_t *type,
+                                void **value,
+                                unsigned int *size);
+
+/*
+ * Get a meta data with the specified key.
+ * No sanity check is performed on the meta data buffer.
+ * This will return the first meta data found with the matching key.
+ *
+ * arguments:
+ * - metadata: the meta data buffer.
+ * - index: the index to read from
+ * - key: the meta data key to look for
+ * - type: where the meta data type should be returned
+ * - value: where the address of the meta data value should be returned
+ * - size: where the size of the meta data value should be returned
+ *
+ * returns:
+ *  -EINVAL if an invalid argument is passed
+ *  -ENOENT if no entry with the specified key is found
+ *  0 otherwise
+ */
+ANDROID_API
+int radio_metadata_get_from_key(const radio_metadata_t *metadata,
+                                const radio_metadata_key_t key,
+                                radio_metadata_type_t *type,
+                                void **value,
+                                unsigned int *size);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // ANDROID_RADIO_METADATA_H
diff --git a/media/radio/src/Android.mk b/media/radio/src/Android.mk
new file mode 100644
index 0000000..b96a40a
--- /dev/null
+++ b/media/radio/src/Android.mk
@@ -0,0 +1,24 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+	radio_metadata.c
+
+LOCAL_C_INCLUDES:= \
+	system/media/radio/include \
+	system/media/private/radio/include
+
+LOCAL_SHARED_LIBRARIES := \
+	libcutils \
+	liblog
+
+LOCAL_MODULE := libradio_metadata
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS += \
+	-fvisibility=hidden
+
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/../include
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/radio/src/radio_metadata.c b/media/radio/src/radio_metadata.c
new file mode 100644
index 0000000..41c67d8
--- /dev/null
+++ b/media/radio/src/radio_metadata.c
@@ -0,0 +1,405 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "radio_metadata"
+/*#define LOG_NDEBUG 0*/
+
+#include <errno.h>
+#include <stdlib.h>
+#include <string.h>
+#include <limits.h>
+#include <system/radio.h>
+#include <system/radio_metadata.h>
+#include <radio_metadata_hidden.h>
+#include <cutils/log.h>
+
+const radio_metadata_type_t metadata_key_type_table[] =
+{
+    RADIO_METADATA_TYPE_TEXT,
+    RADIO_METADATA_TYPE_TEXT,
+    RADIO_METADATA_TYPE_INT,
+    RADIO_METADATA_TYPE_INT,
+    RADIO_METADATA_TYPE_TEXT,
+    RADIO_METADATA_TYPE_TEXT,
+    RADIO_METADATA_TYPE_TEXT,
+    RADIO_METADATA_TYPE_TEXT,
+    RADIO_METADATA_TYPE_TEXT,
+    RADIO_METADATA_TYPE_RAW,
+    RADIO_METADATA_TYPE_RAW,
+};
+
+/**
+ * private functions
+ */
+
+bool is_valid_metadata_key(const radio_metadata_key_t key)
+{
+    if (key < RADIO_METADATA_KEY_MIN || key > RADIO_METADATA_KEY_MAX) {
+        return false;
+    }
+    return true;
+}
+
+int check_size(radio_metadata_buffer_t **metadata_ptr, const unsigned int size_int)
+{
+    radio_metadata_buffer_t *metadata = *metadata_ptr;
+    unsigned int index_offset = metadata->size_int - metadata->count - 1;
+    unsigned int data_offset = *((unsigned int *)metadata + index_offset);
+    unsigned int req_size_int;
+    unsigned int new_size_int;
+
+    if (size_int == 0) {
+        return 0;
+    }
+
+    req_size_int = data_offset + metadata->count + 1 + 1 + size_int;
+    /* do not grow buffer if it can accommodate the new entry plus an additional index entry */
+
+    if (req_size_int <= metadata->size_int) {
+        return 0;
+    }
+
+    if (req_size_int > RADIO_METADATA_MAX_SIZE || metadata->size_int >= RADIO_METADATA_MAX_SIZE) {
+        return -ENOMEM;
+    }
+    /* grow meta data buffer by a factor of 2 until new data fits */
+    new_size_int = metadata->size_int;
+    while (new_size_int < req_size_int)
+        new_size_int *= 2;
+
+    ALOGV("%s growing from %u to %u", __func__, metadata->size_int, new_size_int);
+    metadata = realloc(metadata, new_size_int * sizeof(unsigned int));
+    /* move index table */
+    memmove((unsigned int *)metadata + new_size_int - (metadata->count + 1),
+            (unsigned int *)metadata + metadata->size_int - (metadata->count + 1),
+            (metadata->count + 1) * sizeof(unsigned int));
+    metadata->size_int = new_size_int;
+
+    *metadata_ptr = metadata;
+    return 0;
+}
+
+/* checks on size and key validity are done before calling this function */
+int add_metadata(radio_metadata_buffer_t **metadata_ptr,
+                 const radio_metadata_key_t key,
+                 const radio_metadata_type_t type,
+                 const void *value,
+                 const unsigned int size)
+{
+    unsigned int entry_size_int;
+    int ret;
+    radio_metadata_entry_t *entry;
+    unsigned int index_offset;
+    unsigned int data_offset;
+    radio_metadata_buffer_t *metadata = *metadata_ptr;
+
+    entry_size_int = size + sizeof(radio_metadata_entry_t);
+    entry_size_int = (entry_size_int + sizeof(unsigned int) - 1) / sizeof(unsigned int);
+
+    ret = check_size(metadata_ptr, entry_size_int);
+    if (ret < 0) {
+        return ret;
+    }
+    metadata = *metadata_ptr;
+    index_offset = metadata->size_int - metadata->count - 1;
+    data_offset = *((unsigned int *)metadata + index_offset);
+
+    entry = (radio_metadata_entry_t *)((unsigned int *)metadata + data_offset);
+    entry->key = key;
+    entry->type = type;
+    entry->size = size;
+    memcpy(entry->data, value, size);
+
+    data_offset += entry_size_int;
+    *((unsigned int *)metadata + index_offset -1) = data_offset;
+    metadata->count++;
+    return 0;
+}
+
+radio_metadata_entry_t *get_entry_at_index(
+                                    const radio_metadata_buffer_t *metadata,
+                                    const unsigned index,
+                                    bool check)
+{
+    unsigned int index_offset = metadata->size_int - index - 1;
+    unsigned int data_offset = *((unsigned int *)metadata + index_offset);
+
+    if (check) {
+        if (index >= metadata->count) {
+            return NULL;
+        }
+        unsigned int min_offset;
+        unsigned int max_offset;
+        unsigned int min_entry_size_int;
+        min_offset = (sizeof(radio_metadata_buffer_t) + sizeof(unsigned int) - 1) /
+                        sizeof(unsigned int);
+        if (data_offset < min_offset) {
+            return NULL;
+        }
+        min_entry_size_int = 1 + sizeof(radio_metadata_entry_t);
+        min_entry_size_int = (min_entry_size_int + sizeof(unsigned int) - 1) / sizeof(unsigned int);
+        max_offset = metadata->size_int - metadata->count - 1 - min_entry_size_int;
+        if (data_offset > max_offset) {
+            return NULL;
+        }
+    }
+    return (radio_metadata_entry_t *)((unsigned int *)metadata + data_offset);
+}
+
+/**
+ * metadata API functions
+ */
+
+radio_metadata_type_t radio_metadata_type_of_key(const radio_metadata_key_t key)
+{
+    if (!is_valid_metadata_key(key)) {
+        return RADIO_METADATA_TYPE_INVALID;
+    }
+    return metadata_key_type_table[key - RADIO_METADATA_KEY_MIN];
+}
+
+int radio_metadata_allocate(radio_metadata_t **metadata,
+                            const unsigned int channel,
+                            const unsigned int sub_channel)
+{
+    radio_metadata_buffer_t *metadata_buf =
+            (radio_metadata_buffer_t *)calloc(RADIO_METADATA_DEFAULT_SIZE, sizeof(unsigned int));
+    if (metadata_buf == NULL) {
+        return -ENOMEM;
+    }
+
+    metadata_buf->channel = channel;
+    metadata_buf->sub_channel = sub_channel;
+    metadata_buf->size_int = RADIO_METADATA_DEFAULT_SIZE;
+    *((unsigned int *)metadata_buf + RADIO_METADATA_DEFAULT_SIZE - 1) =
+            (sizeof(radio_metadata_buffer_t) + sizeof(unsigned int) - 1) /
+                sizeof(unsigned int);
+    *metadata = (radio_metadata_t *)metadata_buf;
+    return 0;
+}
+
+void radio_metadata_deallocate(radio_metadata_t *metadata)
+{
+    free(metadata);
+}
+
+int radio_metadata_add_int(radio_metadata_t **metadata,
+                           const radio_metadata_key_t key,
+                           const int value)
+{
+    radio_metadata_type_t type = radio_metadata_type_of_key(key);
+    if (metadata == NULL || *metadata == NULL || type != RADIO_METADATA_TYPE_INT) {
+        return -EINVAL;
+    }
+    return add_metadata((radio_metadata_buffer_t **)metadata,
+                        key, type, &value, sizeof(int));
+}
+
+int radio_metadata_add_text(radio_metadata_t **metadata,
+                            const radio_metadata_key_t key,
+                            const char *value)
+{
+    radio_metadata_type_t type = radio_metadata_type_of_key(key);
+    if (metadata == NULL || *metadata == NULL || type != RADIO_METADATA_TYPE_TEXT ||
+            value == NULL || strlen(value) >= RADIO_METADATA_TEXT_LEN_MAX) {
+        return -EINVAL;
+    }
+    return add_metadata((radio_metadata_buffer_t **)metadata, key, type, value, strlen(value) + 1);
+}
+
+int radio_metadata_add_raw(radio_metadata_t **metadata,
+                           const radio_metadata_key_t key,
+                           const unsigned char *value,
+                           const unsigned int size)
+{
+    radio_metadata_type_t type = radio_metadata_type_of_key(key);
+    if (metadata == NULL || *metadata == NULL || type != RADIO_METADATA_TYPE_RAW || value == NULL) {
+        return -EINVAL;
+    }
+    return add_metadata((radio_metadata_buffer_t **)metadata, key, type, value, size);
+}
+
+int radio_metadata_add_metadata(radio_metadata_t **dst_metadata,
+                           radio_metadata_t *src_metadata)
+{
+    radio_metadata_buffer_t *src_metadata_buf = (radio_metadata_buffer_t *)src_metadata;
+    radio_metadata_buffer_t *dst_metadata_buf;
+    int status;
+    unsigned int index;
+
+    if (dst_metadata == NULL || src_metadata == NULL) {
+        return -EINVAL;
+    }
+    if (*dst_metadata == NULL) {
+        status = radio_metadata_allocate(dst_metadata, src_metadata_buf->channel,
+                                src_metadata_buf->sub_channel);
+        if (status != 0) {
+            return status;
+        }
+    }
+
+    dst_metadata_buf = (radio_metadata_buffer_t *)*dst_metadata;
+    dst_metadata_buf->channel = src_metadata_buf->channel;
+    dst_metadata_buf->sub_channel = src_metadata_buf->sub_channel;
+
+    for (index = 0; index < src_metadata_buf->count; index++) {
+        radio_metadata_key_t key;
+        radio_metadata_type_t type;
+        void *value;
+        unsigned int size;
+        status = radio_metadata_get_at_index(src_metadata, index, &key, &type, &value, &size);
+        if (status != 0)
+            continue;
+        status = add_metadata((radio_metadata_buffer_t **)dst_metadata, key, type, value, size);
+        if (status != 0)
+            break;
+    }
+    return status;
+}
+
+int radio_metadata_check(const radio_metadata_t *metadata)
+{
+    radio_metadata_buffer_t *metadata_buf =
+            (radio_metadata_buffer_t *)metadata;
+    unsigned int count;
+    unsigned int min_entry_size_int;
+
+    if (metadata_buf == NULL) {
+        return -EINVAL;
+    }
+
+    if (metadata_buf->size_int > RADIO_METADATA_MAX_SIZE) {
+        return -EINVAL;
+    }
+
+    /* sanity check on entry count versus buffer size */
+    min_entry_size_int = 1 + sizeof(radio_metadata_entry_t);
+    min_entry_size_int = (min_entry_size_int + sizeof(unsigned int) - 1) /
+                                sizeof(unsigned int);
+    if ((metadata_buf->count * min_entry_size_int + metadata_buf->count + 1 +
+            (sizeof(radio_metadata_buffer_t) + sizeof(unsigned int) - 1) / sizeof(unsigned int)) >
+                    metadata_buf->size_int) {
+        return -EINVAL;
+    }
+
+    /* sanity check on each entry */
+    for (count = 0; count < metadata_buf->count; count++) {
+        radio_metadata_entry_t *entry = get_entry_at_index(metadata_buf, count, true);
+        radio_metadata_entry_t *next_entry;
+        if (entry == NULL) {
+            return -EINVAL;
+        }
+        if (!is_valid_metadata_key(entry->key)) {
+            return -EINVAL;
+        }
+        if (entry->type != radio_metadata_type_of_key(entry->key)) {
+            return -EINVAL;
+        }
+
+        /* do not request check because next entry can be the free slot */
+        next_entry = get_entry_at_index(metadata_buf, count + 1, false);
+        if ((char *)entry->data + entry->size > (char *)next_entry) {
+            return -EINVAL;
+        }
+    }
+
+    return 0;
+}
+
+size_t radio_metadata_get_size(const radio_metadata_t *metadata)
+{
+    radio_metadata_buffer_t *metadata_buf =
+            (radio_metadata_buffer_t *)metadata;
+
+    if (metadata_buf == NULL) {
+        return 0;
+    }
+    return (size_t)(metadata_buf->size_int * sizeof(unsigned int));
+}
+
+int radio_metadata_get_count(const radio_metadata_t *metadata)
+{
+    radio_metadata_buffer_t *metadata_buf =
+            (radio_metadata_buffer_t *)metadata;
+
+    if (metadata_buf == NULL) {
+        return -EINVAL;
+    }
+    return (int)metadata_buf->count;
+}
+
+int radio_metadata_get_at_index(const radio_metadata_t *metadata,
+                                const unsigned int index,
+                                radio_metadata_key_t *key,
+                                radio_metadata_type_t *type,
+                                void **value,
+                                unsigned int *size)
+{
+    unsigned int index_offset;
+    unsigned int data_offset;
+    radio_metadata_entry_t *entry;
+    radio_metadata_buffer_t *metadata_buf =
+            (radio_metadata_buffer_t *)metadata;
+
+    if (metadata_buf == NULL || key == NULL || type == NULL ||
+            value == NULL || size == NULL) {
+        return -EINVAL;
+    }
+    if (index >= metadata_buf->count) {
+        return -EINVAL;
+    }
+
+    entry = get_entry_at_index(metadata_buf, index, false);
+    *key = entry->key;
+    *type = entry->type;
+    *value = (void *)entry->data;
+    *size = entry->size;
+
+    return 0;
+}
+
+int radio_metadata_get_from_key(const radio_metadata_t *metadata,
+                                const radio_metadata_key_t key,
+                                radio_metadata_type_t *type,
+                                void **value,
+                                unsigned int *size)
+{
+    unsigned int count;
+    radio_metadata_entry_t *entry = NULL;
+    radio_metadata_buffer_t *metadata_buf =
+            (radio_metadata_buffer_t *)metadata;
+
+    if (metadata_buf == NULL || type == NULL || value == NULL || size == NULL) {
+        return -EINVAL;
+    }
+    if (!is_valid_metadata_key(key)) {
+        return -EINVAL;
+    }
+
+    for (count = 0; count < metadata_buf->count; entry = NULL, count++) {
+        entry = get_entry_at_index(metadata_buf, count, false);
+        if (entry->key == key) {
+            break;
+        }
+    }
+    if (entry == NULL) {
+        return -ENOENT;
+    }
+    *type = entry->type;
+    *value = (void *)entry->data;
+    *size = entry->size;
+    return 0;
+}