diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3ea0951..2ef9c44 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,36 @@
# Changelog
+## [2.1.0]
+
+### Features
+- Update OpenXR Plugin dependency to `1.10.0`.
+- Added `Magic Leap 2 Facial Expressions` OpenXR Feature.
+- Added `Magic Leap 2 Environmental Meshing` OpenXR feature.
+- Added `Magic Leap 2 Spatial Anchors` OpenXR Feature
+- Added `Magic Leap 2 Spatial Anchors Storage` OpenXR Feature
+- Added `MagicLeap.Android.Permissions`, a new and improved Permissions API.
+- Included an AprilTagSettings struct to the `Magic Leap 2 Marker Understanding` OpenXR Feature
+
+### Experimental
+- Added `AndroidCamera` APIs for performing basic YUV and JPEG Camera capture.
+
+### Bugfixes
+- Fixed issue where JPEG screen capture with `MLCamera` was not displaying an image.
+- Fixed legacy `MLPlanes` subsystem not being initialized when using Magic Leap XR Provider.
+- Added project validation rules to check for Player Settings required by Magic Leap 2.
+- Fixed `MLCVCamera` being unable to query Headpose from the MLSDK when the OpenXR PRovider is active.
+- Fixed `MLMarkerTracker` issue where detected `AprilTag` markers are flipped.
+
+### Deprecations & Removals
+- Removed Preferences>External Tools>Magic Leap.
+- Marked `MLPermissions` Obsolete. Use `MagicLeap.Android.Permissions` instead.
+
+### Known Issues
+- `MLAnchors` API does not work when the `Magic Leap 2 Spatial Anchors` or `Magic Leap 2 Spatial Anchors Storage` OpenXR Features are enabled.
+
+### Misc.
+- Renamed the ML Rig & Inputs Sample input actions asset as well as the old `MagicLeapInputs` asset to make it clear what the differences are.
+
## [2.0.0]
### Features
diff --git a/Editor/APKBuilder.cs b/Editor/APKBuilder.cs
index cb993c6..bf50ca1 100644
--- a/Editor/APKBuilder.cs
+++ b/Editor/APKBuilder.cs
@@ -15,13 +15,6 @@ public class APKBuilder
private static readonly string Arg_ForceSDKPathFromEnvVar = "--force_sdk_path_from_env_var";
private static readonly string Arg_AppVersionCodeValue = "--version-code";
-#if !UNITY_2022_2_OR_NEWER
- private static readonly Dictionary BuildTargetToGroup = new Dictionary()
- {
- { BuildTarget.Relish, BuildTargetGroup.Relish }
- };
-#endif
-
private bool didSetSDKPathFromEnvVar = false;
private class BuildSettingsCache
@@ -71,15 +64,8 @@ private void Build()
{
throw new System.Exception("You must be on the Android Build Target to use APKBuilder.");
}
-#else
- if (!BuildTargetToGroup.ContainsKey(EditorUserBuildSettings.activeBuildTarget))
- {
- throw new System.Exception($"Unsupported build target {EditorUserBuildSettings.activeBuildTarget} for APKBuilder. Only Relish build target is supported.");
- }
#endif
- SetupSDKPaths(System.Array.IndexOf(System.Environment.GetCommandLineArgs(), Arg_ForceSDKPathFromEnvVar) != -1);
-
BuildSettingsCache buildSettingsCache = new BuildSettingsCache();
try
@@ -252,43 +238,6 @@ private bool TryGetArgValue(string arg, ref string value)
return false;
}
- ///
- /// Magic Leap SDK is required during the build process to include the
- /// correct permissions in AndroidManifest.xml.
- ///
- ///
- private void SetupSDKPaths(bool bForceSetFromEnvVar)
- {
- bool didSetFromEnvVar = false;
- if (bForceSetFromEnvVar || !MagicLeapSDKUtil.SdkAvailable)
- {
- string relishSDKPath = System.Environment.GetEnvironmentVariable("RELISHSDK_UNITY");
- if (string.IsNullOrEmpty(relishSDKPath))
- {
- Debug.LogWarning("No SDK path found for Relish in editor preferences or RELISHSDK_UNITY environment variable. Build will probably fail OR Magic Leap permissions will not be included in AndroidManifest.xml.");
- }
- else
- {
- if (bForceSetFromEnvVar)
- {
- Debug.LogFormat("{0} was passed as cmd line arg. Force setting Relish SDK Path to {1} from env vars.", Arg_ForceSDKPathFromEnvVar, relishSDKPath);
- }
- else
- {
- Debug.LogFormat("No SDK path set for Relish in editor preferences. Using {0} from env vars.", relishSDKPath);
- }
- MagicLeapSDKUtil.SdkPath = relishSDKPath;
- didSetFromEnvVar = true;
- }
- }
- else
- {
- Debug.LogFormat("Relish SDK found in editor preferences at {0}", MagicLeapSDKUtil.SdkPath);
- }
-
- this.didSetSDKPathFromEnvVar = didSetFromEnvVar;
- }
-
[Serializable]
private class SimplePackageManifest
{
diff --git a/Editor/MLAppSim/AppSimShimLibSupport.cs b/Editor/MLAppSim/AppSimShimLibSupport.cs
deleted file mode 100644
index dd2cec2..0000000
--- a/Editor/MLAppSim/AppSimShimLibSupport.cs
+++ /dev/null
@@ -1,114 +0,0 @@
-// %BANNER_BEGIN%
-// ---------------------------------------------------------------------
-// %COPYRIGHT_BEGIN%
-// Copyright (c) (2021-2022) Magic Leap, Inc. All Rights Reserved.
-// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
-// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
-// %COPYRIGHT_END%
-// ---------------------------------------------------------------------
-// %BANNER_END%
-
-using System;
-using System.Collections.Generic;
-using System.IO;
-using UnityEngine;
-using System.Linq;
-using UnityEngine.XR.MagicLeap;
-using UnityEngine.XR.Management;
-using UnityEditor.XR.Management;
-#if UNITY_OPENXR_1_9_0_OR_NEWER
-using UnityEngine.XR.OpenXR;
-#endif
-
-namespace UnityEditor.XR.MagicLeap
-{
- public static class AppSimShimLibSupport
- {
- private static string LaunchProcess => Path.Combine(MagicLeapSDKUtil.AppSimRuntimePath, "bin/ZIDiscovery");
-
- public static readonly string SessionStateKey_ZISearchPaths = "ZI_SEARCH_PATHS";
- private static List libSearchPaths = new List();
-
- ///
- /// Gets called before the XR Loader starts initializing all the subsystems.
- /// Runs ZIDiscovery.exe to get the list of folders where the ZI shim
- /// libs reside and sends them to the SDK loader lib.
- ///
- [RuntimeInitializeOnLoadMethod(RuntimeInitializeLoadType.SubsystemRegistration)]
- private static void CheckForLibrarySearchPaths()
- {
- var settings = XRGeneralSettingsPerBuildTarget.XRGeneralSettingsForBuildTarget(BuildTargetGroup.Standalone);
- if (settings == null || settings.Manager == null )
- {
- return;
- }
-
- bool foundSupportedLoader = false;
-#if UNITY_XR_MAGICLEAP_PROVIDER
- foundSupportedLoader = settings.Manager.activeLoaders.Any(l => l is MagicLeapLoader);
-#elif UNITY_OPENXR_1_9_0_OR_NEWER
- foundSupportedLoader = settings.Manager.activeLoaders.Any(l => l is OpenXRLoader);
-#endif
-
- if (!foundSupportedLoader)
- {
- Debug.LogError("No supported XR loader found for AppSim");
- return;
- }
-
- string cachedSearchPaths = SessionState.GetString(SessionStateKey_ZISearchPaths, string.Empty);
- if (string.IsNullOrEmpty(cachedSearchPaths))
- {
- var ziRuntime = MagicLeapSDKUtil.AppSimRuntimePath;
- if (string.IsNullOrEmpty(ziRuntime))
- {
- Debug.LogError("Zero Iteration Runtime path is not set.");
- SettingsService.OpenUserPreferences("Preferences/External Tools/Magic Leap");
- return;
- }
-
-#if UNITY_EDITOR_WIN
- ziRuntime = ziRuntime.Replace("/", "\\");
-#endif
- var startInfo = new System.Diagnostics.ProcessStartInfo
- {
- UseShellExecute = false,
- FileName = LaunchProcess,
- Arguments = $"-m\"{MagicLeapSDKUtil.SdkPath}\" -p",
- CreateNoWindow = true,
- RedirectStandardOutput = true,
- RedirectStandardError = true
- };
-
- var discoveryProc = new System.Diagnostics.Process();
- discoveryProc.EnableRaisingEvents = true;
- discoveryProc.StartInfo = startInfo;
-
- discoveryProc.Start();
-
- StreamReader outputStream = discoveryProc.StandardOutput;
-
- string output = outputStream.ReadToEnd();
-
- discoveryProc.WaitForExit();
-
- if (discoveryProc.ExitCode != 0)
- {
- StreamReader errorStream = discoveryProc.StandardError;
- var error = errorStream.ReadToEnd();
- Debug.LogError($"ZIDiscovery returned with code {discoveryProc.ExitCode}: {error}");
- return;
- }
-
- libSearchPaths = new List(output.Split(new string[] { "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries));
- SessionState.SetString(SessionStateKey_ZISearchPaths, string.Join(Path.PathSeparator, libSearchPaths));
- }
- else
- {
- libSearchPaths = new List(cachedSearchPaths.Split(Path.PathSeparator));
- }
-
- MagicLeapXrProvider.AddLibrarySearchPaths(libSearchPaths, settings.Manager.activeLoaders);
- }
- }
-}
diff --git a/Editor/MLAppSim/LabDriverControl.cs b/Editor/MLAppSim/LabDriverControl.cs
deleted file mode 100644
index 0f816a9..0000000
--- a/Editor/MLAppSim/LabDriverControl.cs
+++ /dev/null
@@ -1,431 +0,0 @@
-using System;
-using System.Collections.Generic;
-using System.Diagnostics;
-using System.IO;
-using System.Linq;
-using System.Text;
-using UnityEditor;
-using UnityEditor.UIElements;
-using UnityEngine;
-using UnityEngine.Rendering;
-using UnityEngine.UIElements;
-
-namespace UnityEditor.XR.MagicLeap
-{
- public class LabDriverControl : EditorWindow
- {
- private IMGUIContainer _remoteChecksUi;
- private VisualElement _mainVisualContainer;
-
- private string[] _availablePackages = new string[] { };
-
- internal static StringBuilder s_LabdriverLog = new StringBuilder();
- internal static StringBuilder s_LabdriverErrorLog = new StringBuilder();
-
- internal static bool s_LabdriverIsRunning = false;
- internal static bool s_LabdriverExit = false;
- internal static bool s_LabdriverOutputDone = false;
- internal static bool s_LabdriverIsImport = false;
- internal static Action s_LabdriverOnComplete = null;
-
- private static int progressId;
-
- private static Process s_Process;
-
- internal class JSONObject
- {
- public List results = null;
- public bool success;
- }
-
- [Serializable]
- internal class JSONResult
- {
- public List output = null;
- public List error = null;
- }
-
- private void OnDisable()
- {
- }
-
- private void OnEnable()
- {
- _remoteChecksUi = new IMGUIContainer(OnRemoteChecksUI);
- _mainVisualContainer = new VisualElement()
- {
- name = "MainVisualContainer"
- };
- _mainVisualContainer.Add(_remoteChecksUi);
- var root = this.rootVisualElement;
- root.Add(_mainVisualContainer);
-
- _availablePackages = MagicLeapPackageLocator.GetUnityPackages().ToArray();
- }
-
- private void OnGUI()
- {
-
- }
-
- private void OnRemoteChecksUI()
- {
- GUILayout.Label("Magic Leap App Simulator Requirements", EditorStyles.boldLabel);
-
- using (new GUILayout.HorizontalScope())
- {
- if (GUILayout.Button("Import MagicLeap unitypackage"))
- {
- var rect = GUILayoutUtility.GetLastRect();
- var versions = new GenericMenu();
- foreach (var pkg in _availablePackages)
- {
- versions.AddItem(new GUIContent(pkg), false, InstallPackage, pkg);
- }
- // show options as a drop down.
- versions.DropDown(rect);
- }
- }
- }
-
- //[MenuItem("Window/XR/MagicLeap Dashboard", false, 1)]
- private static void Display()
- {
- // Get existing open window or if none, make a new one:
- EditorWindow.GetWindow(false, "ML Dashboard").Show();
- }
-
- private void InstallPackage(object p)
- {
- var path = p as string;
- UnityEngine.Debug.LogFormat("Importing: {0}", path);
- AssetDatabase.ImportPackage(path, true);
- }
-
- // EditorUtility.DisplayProgressbar must be called from the main thread
- // Typically you would call it from OnGUI, but this class may not have its own GUI calls
- // so instead we place it in the Editor's global update ticks.
- private static void ShowProgressDialog()
- {
- if (s_LabdriverIsRunning)
- {
- EditorUtility.DisplayProgressBar("Magic Leap Hub", "Issuing command to Magic Leap Hub...", 1f);
- }
- else
- {
- EditorUtility.ClearProgressBar();
- EditorApplication.update -= ShowProgressDialog;
- }
- }
-
- internal static void LaunchLabDriver(List args, Action onComplete, bool importCommand = false, bool useVirtualDevice = false)
- {
- EditorApplication.update += ShowProgressDialog;
-
- s_LabdriverIsRunning = true;
- s_LabdriverExit = false;
- s_LabdriverOutputDone = false;
- s_LabdriverLog.Clear();
-
- var startInfo = new ProcessStartInfo
- {
- UseShellExecute = false,
-#if UNITY_EDITOR_WIN
- FileName = "cmd.exe",
-#else // OSX or Linux
- FileName = "/bin/sh",
-#endif
- CreateNoWindow = true,
- RedirectStandardOutput = true,
- RedirectStandardError = true
- };
-
- // copy arguments into new-style ArgumentList, which handles spaces properly
-#if UNITY_EDITOR_WIN
- // i.e. "cmd /C labdriver ..." runs this command rather than an interactive shell
- startInfo.ArgumentList.Add("/C");
-#endif
- foreach (string arg in args)
- {
- startInfo.ArgumentList.Add(arg);
- }
-
- if (useVirtualDevice)
- {
- if (!startInfo.EnvironmentVariables.ContainsKey("ML_ZI_ROOT"))
- {
- startInfo.EnvironmentVariables.Add("ML_ZI_ROOT", MagicLeapSDKUtil.SdkPath + "/VirtualDevice");
- }
- }
-
- s_Process = new System.Diagnostics.Process();
- s_Process.EnableRaisingEvents = true;
- s_Process.OutputDataReceived += OnOutputReceived;
- s_Process.Exited += OnProcessExit;
- s_Process.ErrorDataReceived += OnErrorReceived;
- s_Process.StartInfo = startInfo;
-
- s_LabdriverIsImport = importCommand;
- s_LabdriverOnComplete = onComplete;
- s_LabdriverLog.Clear();
- s_LabdriverErrorLog.Clear();
-
- progressId = Progress.Start("Running labdriver process");
-
- s_Process.Start();
- s_Process.BeginOutputReadLine();
- s_Process.BeginErrorReadLine();
- }
-
- private static void LaunchLabDriverCommand(List commands, Action onComplete)
- {
- var sdkPath = MagicLeapSDKUtil.SdkPath;
- if (string.IsNullOrEmpty(sdkPath))
- {
- UnityEngine.Debug.LogError("Magic Leap SDK path not configured!");
- return;
- }
- else if (!File.Exists(Path.Combine(sdkPath, "labdriver")))
- {
- UnityEngine.Debug.LogErrorFormat("labdriver executable not found in configured SDK path \"{0}\"! Make sure the path is valid.", sdkPath);
- return;
- }
- if (!s_LabdriverIsRunning)
- {
- UnityEngine.Debug.Log("Launching labdriver with: " + string.Join(" ", commands));
-#if UNITY_EDITOR_WIN
- commands.InsertRange(0, new List{$"{sdkPath}/labdriver.cmd", "-pretty"});
-#else // OSX or Linux
- commands.InsertRange(0, new List{$"{sdkPath}/labdriver", "-pretty"});
-#endif
- LaunchLabDriver(commands, onComplete);
- }
- else
- {
- UnityEngine.Debug.Log("Previous Magic Leap Hub command is still running. Please wait until it completes.");
- }
- }
-
- private static void HandleLabDriverResult(bool success, string json)
- {
- if (!success)
- {
- UnityEngine.Debug.LogError("Magic Leap Hub command failed:\n" + json);
- }
- }
-
- [MenuItem("Magic Leap/Launch Magic Leap Hub")]
- private static void LaunchHub()
- {
- LaunchLabDriverCommand(new List{"start-gui"}, HandleLabDriverResult);
- }
-
- [MenuItem("Magic Leap/Save Diagnostic Logs...")]
- private static void SaveLogs()
- {
- bool result = EditorUtility.DisplayDialog("Privacy Notice",
- String.Concat(
- "Create an error report file (.zip) to help us diagnose problems.\n\n",
- "(Note: if you are using a Magic Leap device, please connect it now.)\n\n",
- "Ask a question in the Magic Leap Developer Portal (https://developer.magicleap.cloud/support) " +
- "and attach the error report .zip file.\n\n",
- "Error reports are public, and the .zip file may contain identifying information, so you should inspect the .zip before sending.\n\n",
- "See https://developer-docs.magicleap.cloud/docs/guides/developer-tools/ml-hub/error-reporting"
- ), "Ok", "Cancel");
- if (!result)
- {
- return;
- }
-
- string dateTime = DateTime.Now.ToString("yyyyMMddHHmmss");
- string tempFile = $"MLHubLogs-{dateTime}.zip";
- string tempFileDir = Path.GetTempPath();
- string tempFilePath = Path.Combine(tempFileDir, tempFile);
-
- void OpenLogFile(bool success, string json)
- {
- if (!success)
- {
- HandleLabDriverResult(success, json);
- return;
- }
-
- // reveal file in explorer/finder
- ProcessStartInfo startInfo = null;
-#if UNITY_EDITOR_OSX
- startInfo = new ProcessStartInfo
- {
- UseShellExecute = false,
- FileName = "/usr/bin/open",
- ArgumentList = { "-R", tempFilePath },
- CreateNoWindow = true
- };
-#elif UNITY_EDITOR_WIN
- startInfo = new ProcessStartInfo
- {
- UseShellExecute = false,
- FileName = "explorer.exe",
- ArgumentList = { "/select,", tempFilePath }, // the embedded comma is needed
- CreateNoWindow = true
- };
-#endif
- if (startInfo != null)
- {
- var process = new System.Diagnostics.Process();
- process.StartInfo = startInfo;
- process.Start();
- }
- }
-
- LaunchLabDriverCommand(new List{"save-logs", tempFilePath}, OpenLogFile);
- }
-
- private static void WaitForComplete()
- {
- // Confirm the process has exited and the output has completed before processing logs.
- if (s_LabdriverExit && s_LabdriverOutputDone)
- {
- s_Process.CancelErrorRead();
- s_Process.CancelOutputRead();
- String finalResult = s_LabdriverLog.ToString();
-
- s_LabdriverIsRunning = false;
-
- Progress.Report(progressId, 1.0f);
-
- if (!String.IsNullOrEmpty(finalResult))
- {
- JSONObject fullLog = JsonUtility.FromJson(finalResult);
-
- if (!fullLog.success)
- {
- ProcessLogs(fullLog);
- }
- else if (s_LabdriverIsImport)
- {
- IEnumerable ZILibraries = fullLog.results[0].output;
-
- // TODO : Lib discovery is no longer run via LabDriverControl.
- // Refactor this class to remove unnecesarry code.
- //ZeroIterationImportSupport.DiscoveryReturned(ZILibraries);
- }
-
- Progress.Remove(progressId);
-
- if (s_LabdriverOnComplete != null)
- {
- // success
- s_LabdriverOnComplete(true, finalResult);
- }
- }
- else
- {
- String finalError = s_LabdriverErrorLog.ToString();
-
- Progress.Remove(progressId);
-
- if (s_LabdriverOnComplete != null)
- {
- // failure
- s_LabdriverOnComplete(false, finalError);
- }
- }
-
- s_LabdriverIsImport = false;
- s_LabdriverOnComplete = null;
- }
- }
-
- private static void OnOutputReceived(object sender, DataReceivedEventArgs e)
- {
- if (!String.IsNullOrEmpty(e.Data))
- {
- s_LabdriverLog.Append(e.Data);
-
- string statusCheck = s_LabdriverLog.ToString();
-
- if ((statusCheck.Substring(statusCheck.Length - 1, 1) == "}") && statusCheck.Contains("\"success\":"))
- {
- s_LabdriverOutputDone = true;
- WaitForComplete();
- }
- s_LabdriverLog.Append(' ');
- }
- else
- {
- if (!s_LabdriverOutputDone)
- {
- s_LabdriverOutputDone = true;
- WaitForComplete();
- }
- }
- }
-
- private static void OnErrorReceived(object sender, DataReceivedEventArgs e)
- {
- if (!String.IsNullOrEmpty(e.Data))
- {
- s_LabdriverErrorLog.Append(e.Data);
- s_LabdriverErrorLog.Append(' ');
- }
- }
-
- private static void OnProcessExit(object sender, EventArgs e)
- {
- s_LabdriverExit = true;
-
- WaitForComplete();
- }
-
- private static void ProcessLogs(JSONObject currentLogs)
- {
- string logToPrint = "";
-
- foreach (JSONResult result in currentLogs.results)
- {
- foreach (string item in result.error)
- {
- if (!String.IsNullOrEmpty(item))
- {
- logToPrint += "\n" + item;
- }
- }
- }
-
- if (String.IsNullOrEmpty(logToPrint) || s_LabdriverIsImport)
- {
- String currentAction = s_LabdriverIsImport ? "import support libraries" : "launch Magic Leap App Simulator";
- logToPrint = String.Format("Magic Leap Hub encountered an unknown error while attempting to {0}. " +
- "Please confirm Magic Leap Hub in installed and up to date with the Magic Leap App Simulator Module and " +
- "Magic Leap App Simulator Runtime package installed.\n", currentAction) + logToPrint;
- }
-
- String finalLog = "labdriver completed with errors. \nErrors:" + logToPrint;
-
- UnityEngine.Debug.LogError(finalLog);
- }
- }
-
-
- internal static class MagicLeapPackageLocator
- {
- public static IEnumerable GetUnityPackages()
- {
- var tools = Path.Combine(MagicLeapRoot, "tools");
- return new DirectoryInfo(tools).GetFiles("*.unitypackage", SearchOption.AllDirectories).Select(fi => fi.FullName);
- }
-
- private static string HomeFolder
- {
- get
- {
- var home = Environment.GetEnvironmentVariable("USERPROFILE");
- return (string.IsNullOrEmpty(home))
- ? Environment.GetEnvironmentVariable("HOME")
- : home;
- }
- }
-
- public static string MagicLeapRoot { get { return Path.Combine(HomeFolder, "MagicLeap"); } }
- }
-}
diff --git a/Editor/MagicLeapSDKUtil.cs b/Editor/MagicLeapSDKUtil.cs
index e84b6d9..4568752 100644
--- a/Editor/MagicLeapSDKUtil.cs
+++ b/Editor/MagicLeapSDKUtil.cs
@@ -19,11 +19,7 @@ public sealed class MagicLeapSDKUtil
{
private const string kManifestPath = ".metadata/sdk.manifest";
private const string kMagicLeapSDKRoot = "MagicLeapSDKRoot";
-#if UNITY_2022_2_OR_NEWER
private const UnityEditor.BuildTarget kBuildTarget = BuildTarget.Android;
-#else
- private const UnityEditor.BuildTarget kBuildTarget = BuildTarget.Relish;
-#endif
private static uint minApiLevel = 0;
[Serializable]
@@ -42,9 +38,10 @@ public static bool SdkAvailable
{
get
{
- if (string.IsNullOrEmpty(SdkPath))
+ var path = SdkPath;
+ if (string.IsNullOrEmpty(path))
return false;
- return File.Exists(Path.Combine(SdkPath, kManifestPath));
+ return Directory.Exists(path.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar));
}
}
@@ -69,7 +66,7 @@ public static uint MinimumApiLevel
}
///
- /// MLSDK path for the relish target.
+ /// MLSDK path for the android target.
///
public static string SdkPath
{
@@ -77,14 +74,6 @@ public static string SdkPath
set { SetSDKPath(kBuildTarget, value); }
}
- public static string AppSimRuntimePath => MagicLeapEditorPreferences.ZeroIterationRuntimePath;
- public static bool SearchingForZI => MagicLeapEditorPreferences.RunningLabdriver;
- public static event Action OnZeroIterationPathChanged
- {
- add { MagicLeapEditorPreferences.ZIRuntimePathChangeEvt += value; }
- remove { MagicLeapEditorPreferences.ZIRuntimePathChangeEvt -= value; }
- }
-
///
/// MLSDK version
///
@@ -105,7 +94,7 @@ public static void DeleteSDKPathFromEditorPrefs(BuildTarget target)
///
/// Get the MLSDK path for the given build target platform.
///
- /// Relish is the only valid target for now.
+ /// Android is the only valid target for now.
///
private static string GetSDKPath(BuildTarget target)
{
diff --git a/Editor/OpenXR/MagicLeapFeatureGroup.cs b/Editor/OpenXR/MagicLeapFeatureGroup.cs
index c62c78a..e40e2a5 100644
--- a/Editor/OpenXR/MagicLeapFeatureGroup.cs
+++ b/Editor/OpenXR/MagicLeapFeatureGroup.cs
@@ -9,18 +9,18 @@
// %BANNER_END%
#if UNITY_OPENXR_1_9_0_OR_NEWER
-using UnityEditor;
-using UnityEditor.XR.OpenXR.Features;
+using UnityEngine.XR.OpenXR.Features.MagicLeapSupport;
-namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+namespace UnityEditor.XR.OpenXR.Features.MagicLeapSupport
{
[OpenXRFeatureSet(
UiName = "Magic Leap",
- Description = "All Magic Leap OpenXR Features",
+ Description = "Features supported by the Magic Leap 2 platform.",
FeatureSetId = "com.magicleap.openxr.featuregroup",
- SupportedBuildTargets = new [] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
- FeatureIds = new [] {
- MagicLeapFeature.FeatureId ,
+ SupportedBuildTargets = new [] { BuildTargetGroup.Android },
+ FeatureIds = new[] {
+ MagicLeapFeature.FeatureId,
+ MagicLeapControllerProfile.FeatureId,
MagicLeapRenderingExtensionsFeature.FeatureId,
MagicLeapReferenceSpacesFeature.FeatureId,
MagicLeapPlanesFeature.FeatureId,
@@ -28,22 +28,60 @@ namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
MagicLeapSystemNotificationsFeature.FeatureId,
MagicLeapMarkerUnderstandingFeature.FeatureId,
MagicLeapLocalizationMapFeature.FeatureId,
- MagicLeapSpatialAnchorsFeature.FeatureId
- },
- RequiredFeatureIds = new[] {
- MagicLeapFeature.FeatureId
+ MagicLeapSpatialAnchorsFeature.FeatureId,
+ MagicLeapSpatialAnchorsStorageFeature.FeatureId,
+ MagicLeapFacialExpressionFeature.FeatureId,
+ MagicLeapMeshingFeature.FeatureId
},
DefaultFeatureIds = new[] {
+ MagicLeapFeature.FeatureId,
+ MagicLeapControllerProfile.FeatureId,
+ MagicLeapRenderingExtensionsFeature.FeatureId,
+ MagicLeapReferenceSpacesFeature.FeatureId,
+ MagicLeapPlanesFeature.FeatureId,
+ MagicLeapUserCalibrationFeature.FeatureId,
+ MagicLeapSystemNotificationsFeature.FeatureId,
+ MagicLeapMarkerUnderstandingFeature.FeatureId,
+ MagicLeapLocalizationMapFeature.FeatureId,
+ MagicLeapSpatialAnchorsFeature.FeatureId,
+ MagicLeapSpatialAnchorsStorageFeature.FeatureId,
+ MagicLeapFacialExpressionFeature.FeatureId,
+ MagicLeapMeshingFeature.FeatureId
+ }
+ )]
+ public class MagicLeapFeatureGroup { }
+
+#if UNITY_EDITOR_WIN
+ [OpenXRFeatureSet(
+ UiName = "Magic Leap AppSim",
+ Description = "All features supported by the Magic Leap 2 platform.",
+ FeatureSetId = "com.magicleap.openxr.featuregroup.appsim",
+ SupportedBuildTargets = new[] { BuildTargetGroup.Standalone },
+ FeatureIds = new[] {
+ MagicLeapFeature.FeatureId,
+ MagicLeapControllerProfile.FeatureId,
MagicLeapRenderingExtensionsFeature.FeatureId,
MagicLeapReferenceSpacesFeature.FeatureId,
MagicLeapPlanesFeature.FeatureId,
MagicLeapUserCalibrationFeature.FeatureId,
MagicLeapSystemNotificationsFeature.FeatureId,
MagicLeapMarkerUnderstandingFeature.FeatureId,
- MagicLeapLocalizationMapFeature.FeatureId
+ MagicLeapLocalizationMapFeature.FeatureId,
+ MagicLeapSpatialAnchorsFeature.FeatureId,
+ MagicLeapSpatialAnchorsStorageFeature.FeatureId,
+ MagicLeapFacialExpressionFeature.FeatureId,
+ MagicLeapMeshingFeature.FeatureId
+ },
+ DefaultFeatureIds = new[] {
+ MagicLeapFeature.FeatureId,
+ MagicLeapControllerProfile.FeatureId,
+ MagicLeapPlanesFeature.FeatureId,
+ MagicLeapMarkerUnderstandingFeature.FeatureId,
+ MagicLeapSpatialAnchorsFeature.FeatureId,
+ MagicLeapMeshingFeature.FeatureId
}
)]
- public class MagicLeapFeatureGroup
- { }
+#endif
+ public class MagicLeapFeatureAppSimGroup { }
}
#endif
diff --git a/Editor/OpenXR/MagicLeapRenderingExtensionsFeatureEditor.cs b/Editor/OpenXR/MagicLeapRenderingExtensionsFeatureEditor.cs
new file mode 100644
index 0000000..876ad40
--- /dev/null
+++ b/Editor/OpenXR/MagicLeapRenderingExtensionsFeatureEditor.cs
@@ -0,0 +1,55 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2019-2023) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+using UnityEditor;
+using UnityEngine.XR.OpenXR.Features.MagicLeapSupport;
+using UnityEngine.XR.OpenXR.NativeTypes;
+
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+[CustomEditor(typeof(MagicLeapRenderingExtensionsFeature))]
+public class MagicLeapRenderingExtensionsFeatureEditor : Editor
+{
+ private SerializedProperty protectedSurfaceProperty;
+ private SerializedProperty vignetteProperty;
+ private SerializedProperty blendModeProperty;
+ private SerializedProperty globalDimmerProperty;
+ private SerializedProperty globalDimmerValueProperty;
+
+ private enum SupportedBlendModes
+ {
+ Additive = XrEnvironmentBlendMode.Additive,
+ AlphaBlend = XrEnvironmentBlendMode.AlphaBlend
+ }
+
+ private void OnEnable()
+ {
+ blendModeProperty = serializedObject.FindProperty("BlendMode");
+ protectedSurfaceProperty = serializedObject.FindProperty("UseProtectedSurface");
+ vignetteProperty = serializedObject.FindProperty("UseVignetteMode");
+ globalDimmerProperty = serializedObject.FindProperty("GlobalDimmerEnabled");
+ globalDimmerValueProperty = serializedObject.FindProperty("GlobalDimmerValue");
+ }
+
+ public override void OnInspectorGUI()
+ {
+ serializedObject.Update();
+
+ blendModeProperty.intValue = (int)(SupportedBlendModes) EditorGUILayout.EnumPopup("Blend Mode", (SupportedBlendModes)blendModeProperty.intValue);
+ protectedSurfaceProperty.boolValue = EditorGUILayout.Toggle("Protected Surface", protectedSurfaceProperty.boolValue);
+ vignetteProperty.boolValue = EditorGUILayout.Toggle("Vignette", vignetteProperty.boolValue);
+ globalDimmerProperty.boolValue = EditorGUILayout.Toggle("Enable Global Dimmer", globalDimmerProperty.boolValue);
+
+ var dimmerEnabled = globalDimmerProperty.boolValue;
+ globalDimmerValueProperty.floatValue = EditorGUILayout.Slider("Global Dimmer Value", dimmerEnabled ? globalDimmerValueProperty.floatValue : 0f, 0f, 1f);
+
+ serializedObject.ApplyModifiedProperties();
+ }
+}
+#endif
diff --git a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferences.cs.meta b/Editor/OpenXR/MagicLeapRenderingExtensionsFeatureEditor.cs.meta
similarity index 83%
rename from Editor/SettingsProviders/Preferences/MagicLeapEditorPreferences.cs.meta
rename to Editor/OpenXR/MagicLeapRenderingExtensionsFeatureEditor.cs.meta
index 4ee88bf..eceb268 100644
--- a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferences.cs.meta
+++ b/Editor/OpenXR/MagicLeapRenderingExtensionsFeatureEditor.cs.meta
@@ -1,5 +1,5 @@
fileFormatVersion: 2
-guid: 99ce30e5b1edc124b8e4e0b12a5bac03
+guid: bc0418a2644a443cd8ebc8031c2c0cd8
MonoImporter:
externalObjects: {}
serializedVersion: 2
diff --git a/Editor/SettingsProviders/Permissions/PermissionSettingsLoader.cs b/Editor/SettingsProviders/Permissions/PermissionSettingsLoader.cs
index 5daba57..9c8456c 100644
--- a/Editor/SettingsProviders/Permissions/PermissionSettingsLoader.cs
+++ b/Editor/SettingsProviders/Permissions/PermissionSettingsLoader.cs
@@ -14,9 +14,10 @@
namespace UnityEditor.XR.MagicLeap
{
- public class PermissionSettingsLoader
+ internal class PermissionSettingsLoader
{
- private string FilePath => Path.Combine(MagicLeapSDKUtil.SdkPath, "data", "ml_permissions.json");
+ private const string manifestFileName = "ml_permissions.json";
+ public string ManifestFilePath => Path.Combine("Packages/com.magicleap.unitysdk/Editor/SettingsProviders/Permissions/data~", manifestFileName);
internal PermissionsListJson settingJson { get; private set; }
public PermissionSettingsLoader() => Initialize();
@@ -25,7 +26,7 @@ private void Initialize()
{
try
{
- var json = File.ReadAllText(FilePath, Encoding.UTF8);
+ var json = File.ReadAllText(ManifestFilePath, Encoding.UTF8);
settingJson = JsonUtility.FromJson($"{{\"Settings\":{json}}}");
}
diff --git a/Editor/SettingsProviders/Permissions/data~/ml_permissions.json b/Editor/SettingsProviders/Permissions/data~/ml_permissions.json
new file mode 100644
index 0000000..0018979
--- /dev/null
+++ b/Editor/SettingsProviders/Permissions/data~/ml_permissions.json
@@ -0,0 +1,93 @@
+[
+ {
+ "name": "android.permission.CAMERA",
+ "description": "Permission to use device camera(s).",
+ "level": "dangerous",
+ "min_api_level": 20
+ },
+ {
+ "name": "android.permission.RECORD_AUDIO",
+ "description": "Permission to use device microphone to record audio.",
+ "level": "dangerous",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.EYE_TRACKING",
+ "description": "Permission to obtain eye tracking data.",
+ "level": "dangerous",
+ "min_api_level": 9
+ },
+ {
+ "name": "com.magicleap.permission.PUPIL_SIZE",
+ "description": "Permission to obtain eye pupil size data.",
+ "level": "dangerous",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.SPATIAL_ANCHOR",
+ "description": "Permission to obtain spatial anchor(s) data.",
+ "level": "normal",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.HAND_TRACKING",
+ "description": "Permission to obtain hand tracking data.",
+ "level": "normal",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.WEBVIEW",
+ "description": "Permission to enable WebView access.",
+ "level": "normal",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.MARKER_TRACKING",
+ "description": "Permission to obtain marker tracking data.",
+ "level": "normal",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.VOICE_INPUT",
+ "description": "Permission to obtain voice input data.",
+ "level": "dangerous",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.SPATIAL_MAPPING",
+ "description": "Permission to obtain following spatial data: planes data, meshing data.",
+ "level": "dangerous",
+ "min_api_level": 20
+ },
+ {
+ "name": "com.magicleap.permission.DEPTH_CAMERA",
+ "description": "Permission to use depth camera.",
+ "level": "dangerous",
+ "min_api_level": 23
+ },
+ {
+ "name": "com.magicleap.permission.EYE_CAMERA",
+ "description": "Permission to use eye cameras.",
+ "level": "dangerous",
+ "min_api_level": 26
+ },
+ {
+ "name": "com.magicleap.permission.SPACE_MANAGER",
+ "description": "Permission to use Magic Leap Space manager.",
+ "level": "normal",
+ "min_api_level": 26
+ },
+ {
+ "name": "com.magicleap.permission.SPACE_IMPORT_EXPORT",
+ "description": "Permission to export and import Magic Leap Spaces.",
+ "level": "dangerous",
+ "min_api_level": 26
+ },
+ {
+ "name": "com.magicleap.permission.FACIAL_EXPRESSION",
+ "description": "Permission to obtain facial expression data.",
+ "level": "dangerous",
+ "min_api_level": 29
+ }
+
+]
diff --git a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferences.cs b/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferences.cs
deleted file mode 100644
index d3a960b..0000000
--- a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferences.cs
+++ /dev/null
@@ -1,471 +0,0 @@
-using System.Collections.Generic;
-using UnityEngine.UIElements;
-using System.IO;
-using UnityEngine.XR.MagicLeap;
-using System.Diagnostics;
-using System.Threading.Tasks;
-using System;
-
-namespace UnityEditor.XR.MagicLeap
-{
- internal static class MagicLeapEditorPreferences
- {
- private static readonly string SdkPathEditorPrefsKey = "MagicLeapSDKRoot";
- private static readonly string UserCustomZIBackendPath = "ZI_User_Selected_ZI_Runtime";
- private static readonly string LabdriverFoundBackendPath = "ZI_Labdriver_Found_ZI_Runtime";
- private static readonly string ToggleZIPathEnableOverride = "ZI_Path_Enable_Override";
-
- private static readonly string PathToUIAsset = "Packages/com.magicleap.unitysdk/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferences.uxml";
-
- private static TextField sdkInputField;
- private static TextField ziInputField;
- private static Button sdkPathBrowseBtn;
- private static Button ziPathBrowseBtn;
- private static VisualElement sdkPathHelpContainer;
- private static VisualElement ziPathHelpContainer;
- private static HelpBox sdkPathHelp;
- private static HelpBox ziPathHelp;
- private static Toggle ziPathOverrideToggle;
-
- private static bool labdriverRunning = false;
- private static int progressId = -1;
-
- private static string mlsdkPath = "";
- private static string ziRuntimePath;
- private static string labdriverResultPath = "";
- private static bool enableOverrideZiPath;
- private static bool usingLabdriverFoundPath = false;
-
- public static string ZeroIterationRuntimePath => ziRuntimePath;
-
- public static bool RunningLabdriver => labdriverRunning;
-
- public static event Action ZIRuntimePathChangeEvt;
-
- [InitializeOnLoad]
- class PreferencesLoader
- {
- static PreferencesLoader()
- {
- var path = GetSavedSDKPath();
- if (Directory.Exists(path))
- {
- mlsdkPath = path;
- }
- enableOverrideZiPath = EditorPrefs.GetBool(ToggleZIPathEnableOverride);
- if (enableOverrideZiPath)
- {
- path = EditorPrefs.GetString(UserCustomZIBackendPath);
- if (Directory.Exists(path))
- {
- ziRuntimePath = path;
- }
- }
- else
- {
- LocateZIRuntimeFromMLSDK();
- }
-
- EditorApplication.update += () =>
- {
- if (!string.IsNullOrEmpty(labdriverResultPath))
- {
- EditorPrefs.SetString(LabdriverFoundBackendPath, labdriverResultPath);
- if (ziRuntimePath != labdriverResultPath)
- {
- ziRuntimePath = labdriverResultPath;
- ZIRuntimePathChangeEvt?.Invoke(ziRuntimePath);
- }
- labdriverResultPath = "";
- }
- if (labdriverRunning)
- {
- Progress.Report(progressId, 1f, "Searching for latest ML App Sim installation...");
- }
- else
- {
- progressId = Progress.Remove(progressId);
- }
- };
- }
- }
-
- [SettingsProvider]
- public static SettingsProvider CreateMLPreferencesTabProvider()
- {
- var provider = new SettingsProvider("Preferences/External Tools/Magic Leap", SettingsScope.User)
- {
- label = "Magic Leap",
-
- // activateHandler is called when the user clicks on the Settings item in the Settings window.
- activateHandler = (searchContext, rootElement) =>
- {
- var visualTree = AssetDatabase.LoadAssetAtPath(PathToUIAsset);
-
- visualTree.CloneTree(rootElement);
-
- sdkInputField = rootElement.Q("MLSDK-Input");
- ziInputField = rootElement.Q("ZI-Input");
- sdkPathBrowseBtn = rootElement.Q
+ *
+ *
When auto-white balance (AWB) is enabled with ACAMERA_CONTROL_AWB_MODE, this
+ * control is overridden by the AWB routine. When AWB is disabled, the
+ * application controls how the color mapping is performed.
+ *
We define the expected processing pipeline below. For consistency
+ * across devices, this is always the case with TRANSFORM_MATRIX.
+ *
When either FAST or HIGH_QUALITY is used, the camera device may
+ * do additional processing but ACAMERA_COLOR_CORRECTION_GAINS and
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM will still be provided by the
+ * camera device (in the results) and be roughly correct.
+ *
Switching to TRANSFORM_MATRIX and using the data provided from
+ * FAST or HIGH_QUALITY will yield a picture with the same white point
+ * as what was produced by the camera device in the earlier frame.
+ *
The expected processing pipeline is as follows:
+ *
+ *
The white balance is encoded by two values, a 4-channel white-balance
+ * gain vector (applied in the Bayer domain), and a 3x3 color transform
+ * matrix (applied after demosaic).
+ *
The 4-channel white-balance gains are defined as:
+ *
ACAMERA_COLOR_CORRECTION_GAINS = [ R G_even G_odd B ]
+ *
+ *
where G_even is the gain for green pixels on even rows of the
+ * output, and G_odd is the gain for green pixels on the odd rows.
+ * These may be identical for a given camera device implementation; if
+ * the camera device does not support a separate gain for even/odd green
+ * channels, it will use the G_even value, and write G_odd equal to
+ * G_even in the output result metadata.
+ *
The matrices for color transforms are defined as a 9-entry vector:
A color transform matrix to use to transform
+ * from sensor RGB color space to output linear sRGB color space.
+ *
+ *
Type: rational[3*3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This matrix is either set by the camera device when the request
+ * ACAMERA_COLOR_CORRECTION_MODE is not TRANSFORM_MATRIX, or
+ * directly by the application in the request when the
+ * ACAMERA_COLOR_CORRECTION_MODE is TRANSFORM_MATRIX.
+ *
In the latter case, the camera device may round the matrix to account
+ * for precision issues; the final rounded matrix should be reported back
+ * in this matrix result metadata. The transform should keep the magnitude
+ * of the output color values within [0, 1.0] (assuming input color
+ * values is within the normalized range [0, 1.0]), or clipping may occur.
+ *
The valid range of each matrix element varies on different devices, but
+ * values within [-1.5, 3.0] are guaranteed not to be clipped.
Gains applying to Bayer raw color channels for
+ * white-balance.
+ *
+ *
Type: float[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
These per-channel gains are either set by the camera device
+ * when the request ACAMERA_COLOR_CORRECTION_MODE is not
+ * TRANSFORM_MATRIX, or directly by the application in the
+ * request when the ACAMERA_COLOR_CORRECTION_MODE is
+ * TRANSFORM_MATRIX.
+ *
The gains in the result metadata are the gains actually
+ * applied by the camera device to the current frame.
+ *
The valid range of gains varies on different devices, but gains
+ * between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
+ * device allows gains below 1.0, this is usually not recommended because
+ * this can create color artifacts.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Chromatic (color) aberration is caused by the fact that different wavelengths of light
+ * can not focus on the same point after exiting from the lens. This metadata defines
+ * the high level control of chromatic aberration correction algorithm, which aims to
+ * minimize the chromatic artifacts that may occur along the object boundaries in an
+ * image.
+ *
FAST/HIGH_QUALITY both mean that camera device determined aberration
+ * correction will be applied. HIGH_QUALITY mode indicates that the camera device will
+ * use the highest-quality aberration correction algorithms, even if it slows down
+ * capture rate. FAST means the camera device will not slow down capture rate when
+ * applying aberration correction.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This key lists the valid modes for ACAMERA_COLOR_CORRECTION_ABERRATION_MODE. If no
+ * aberration correction modes are available for a device, this list will solely include
+ * OFF mode. All camera devices will support either OFF or FAST mode.
+ *
Camera devices that support the MANUAL_POST_PROCESSING capability will always list
+ * OFF mode. This includes all FULL level devices.
+ *
LEGACY devices will always only support FAST mode.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Some kinds of lighting fixtures, such as some fluorescent
+ * lights, flicker at the rate of the power supply frequency
+ * (60Hz or 50Hz, depending on country). While this is
+ * typically not noticeable to a person, it can be visible to
+ * a camera device. If a camera sets its exposure time to the
+ * wrong value, the flicker may become visible in the
+ * viewfinder as flicker or in a final captured image, as a
+ * set of variable-brightness bands across the image.
+ *
Therefore, the auto-exposure routines of camera devices
+ * include antibanding routines that ensure that the chosen
+ * exposure value will not cause such banding. The choice of
+ * exposure time depends on the rate of flicker, which the
+ * camera device can detect automatically, or the expected
+ * rate can be selected by the application using this
+ * control.
+ *
A given camera device may not support all of the possible
+ * options for the antibanding mode. The
+ * ACAMERA_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES key contains
+ * the available modes for a given camera device.
+ *
AUTO mode is the default if it is available on given
+ * camera device. When AUTO mode is not available, the
+ * default will be either 50HZ or 60HZ, and both 50HZ
+ * and 60HZ will be available.
+ *
If manual exposure control is enabled (by setting
+ * ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE to OFF),
+ * then this setting has no effect, and the application must
+ * ensure it selects exposure times that do not cause banding
+ * issues. The ACAMERA_STATISTICS_SCENE_FLICKER key can assist
+ * the application in this.
Adjustment to auto-exposure (AE) target image
+ * brightness.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
The adjustment is measured as a count of steps, with the
+ * step size defined by ACAMERA_CONTROL_AE_COMPENSATION_STEP and the
+ * allowed range by ACAMERA_CONTROL_AE_COMPENSATION_RANGE.
+ *
For example, if the exposure value (EV) step is 0.333, '6'
+ * will mean an exposure compensation of +2 EV; -3 will mean an
+ * exposure compensation of -1 EV. One EV represents a doubling
+ * of image brightness. Note that this control will only be
+ * effective if ACAMERA_CONTROL_AE_MODE != OFF. This control
+ * will take effect even when ACAMERA_CONTROL_AE_LOCK == true.
+ *
In the event of exposure compensation value being changed, camera device
+ * may take several frames to reach the newly requested exposure target.
+ * During that time, ACAMERA_CONTROL_AE_STATE field will be in the SEARCHING
+ * state. Once the new exposure target is reached, ACAMERA_CONTROL_AE_STATE will
+ * change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
+ * FLASH_REQUIRED (if the scene is too dark for still capture).
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
When set to true (ON), the AE algorithm is locked to its latest parameters,
+ * and will not change exposure settings until the lock is set to false (OFF).
+ *
Note that even when AE is locked, the flash may be fired if
+ * the ACAMERA_CONTROL_AE_MODE is ON_AUTO_FLASH /
+ * ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
+ *
When ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION is changed, even if the AE lock
+ * is ON, the camera device will still adjust its exposure value.
+ *
If AE precapture is triggered (see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER)
+ * when AE is already locked, the camera device will not change the exposure time
+ * (ACAMERA_SENSOR_EXPOSURE_TIME) and sensitivity (ACAMERA_SENSOR_SENSITIVITY)
+ * parameters. The flash may be fired if the ACAMERA_CONTROL_AE_MODE
+ * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
+ * ACAMERA_CONTROL_AE_MODE is ON_ALWAYS_FLASH, the scene may become overexposed.
+ * Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
+ *
When an AE precapture sequence is triggered, AE unlock will not be able to unlock
+ * the AE if AE is locked by the camera device internally during precapture metering
+ * sequence In other words, submitting requests with AE unlock has no effect for an
+ * ongoing precapture metering sequence. Otherwise, the precapture metering sequence
+ * will never succeed in a sequence of preview requests where AE lock is always set
+ * to false.
+ *
Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AE updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:
+ *
+ *
Starting in auto-AE mode:
+ *
Lock AE
+ *
Wait for the first result to be output that has the AE locked
+ *
Copy exposure settings from that result into a request, set the request to manual AE
+ *
Submit the capture request, proceed to run manual AE as desired.
+ *
+ *
See ACAMERA_CONTROL_AE_STATE for AE lock related state transition details.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This control is only effective if ACAMERA_CONTROL_MODE is
+ * AUTO.
+ *
When set to any of the ON modes, the camera device's
+ * auto-exposure routine is enabled, overriding the
+ * application's selected exposure time, sensor sensitivity,
+ * and frame duration (ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY, and
+ * ACAMERA_SENSOR_FRAME_DURATION). If one of the FLASH modes
+ * is selected, the camera device's flash unit controls are
+ * also overridden.
+ *
The FLASH modes are only available if the camera device
+ * has a flash unit (ACAMERA_FLASH_INFO_AVAILABLE is true).
+ *
If flash TORCH mode is desired, this field must be set to
+ * ON or OFF, and ACAMERA_FLASH_MODE set to TORCH.
+ *
When set to any of the ON modes, the values chosen by the
+ * camera device auto-exposure routine for the overridden
+ * fields for a given capture will be available in its
+ * CaptureResult.
List of metering areas to use for auto-exposure adjustment.
+ *
+ *
Type: int32[5*area_count]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Not available if android.control.maxRegionsAe is 0.
+ * Otherwise will always be present.
+ *
The maximum number of regions supported by the device is determined by the value
+ * of android.control.maxRegionsAe.
+ *
For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0,0) being
+ * the top-left pixel in the active pixel array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+ * active pixel array.
+ *
For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system depends on the mode being set.
+ * When the distortion correction mode is OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the pre-correction active array, and
+ * (ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right
+ * pixel in the pre-correction active pixel array.
+ * When the distortion correction mode is not OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the active array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+ * active pixel array.
+ *
The weight must be within [0, 1000], and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.
+ *
The weights are relative to weights of other exposure metering regions, so if only one
+ * region is used, all non-zero weights will have the same effect. A region with 0
+ * weight is ignored.
+ *
If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.
+ *
If the metering region is outside the used ACAMERA_SCALER_CROP_REGION returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.
+ *
When setting the AE metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AE regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.
+ *
Starting from API level 30, the coordinate system of activeArraySize or
+ * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+ * pre-zoom field of view. This means that the same aeRegions values at different
+ * ACAMERA_CONTROL_ZOOM_RATIO represent different parts of the scene. The aeRegions
+ * coordinates are relative to the activeArray/preCorrectionActiveArray representing the
+ * zoomed field of view. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0 (default), the same
+ * aeRegions at different ACAMERA_SCALER_CROP_REGION still represent the same parts of the
+ * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
+ * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
+ * mode.
The data representation is int[5 * area_count].
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This entry is normally set to IDLE, or is not
+ * included at all in the request settings. When included and
+ * set to START, the camera device will trigger the auto-exposure (AE)
+ * precapture metering sequence.
+ *
When set to CANCEL, the camera device will cancel any active
+ * precapture metering trigger, and return to its initial AE state.
+ * If a precapture metering sequence is already completed, and the camera
+ * device has implicitly locked the AE for subsequent still capture, the
+ * CANCEL trigger will unlock the AE and return to its initial AE state.
+ *
The precapture sequence should be triggered before starting a
+ * high-quality still capture for final metering decisions to
+ * be made, and for firing pre-capture flash pulses to estimate
+ * scene brightness and required final capture flash power, when
+ * the flash is enabled.
+ *
Normally, this entry should be set to START for only a
+ * single request, and the application should wait until the
+ * sequence completes before starting a new one.
+ *
When a precapture metering sequence is finished, the camera device
+ * may lock the auto-exposure routine internally to be able to accurately expose the
+ * subsequent still capture image (ACAMERA_CONTROL_CAPTURE_INTENT == STILL_CAPTURE).
+ * For this case, the AE may not resume normal scan if no subsequent still capture is
+ * submitted. To ensure that the AE routine restarts normal scan, the application should
+ * submit a request with ACAMERA_CONTROL_AE_LOCK == true, followed by a request
+ * with ACAMERA_CONTROL_AE_LOCK == false, if the application decides not to submit a
+ * still capture request after the precapture sequence completes. Alternatively, for
+ * API level 23 or newer devices, the CANCEL can be used to unlock the camera device
+ * internally locked AE if the application doesn't submit a still capture request after
+ * the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
+ * be used in devices that have earlier API levels.
+ *
The exact effect of auto-exposure (AE) precapture trigger
+ * depends on the current AE mode and state; see
+ * ACAMERA_CONTROL_AE_STATE for AE precapture state transition
+ * details.
+ *
On LEGACY-level devices, the precapture trigger is not supported;
+ * capturing a high-resolution JPEG image will automatically trigger a
+ * precapture sequence before the high-resolution capture, including
+ * potentially firing a pre-capture flash.
+ *
Using the precapture trigger and the auto-focus trigger ACAMERA_CONTROL_AF_TRIGGER
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to ACAMERA_CONTROL_AE_STATE indicating the start of the precapture sequence, for
+ * example.
+ *
If both the precapture and the auto-focus trigger are activated on the same request, then
+ * the camera device will complete them in the optimal order for that device.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Only effective if ACAMERA_CONTROL_MODE = AUTO and the lens is not fixed focus
+ * (i.e. ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE > 0). Also note that
+ * when ACAMERA_CONTROL_AE_MODE is OFF, the behavior of AF is device
+ * dependent. It is recommended to lock AF by using ACAMERA_CONTROL_AF_TRIGGER before
+ * setting ACAMERA_CONTROL_AE_MODE to OFF, or set AF mode to OFF when AE is OFF.
+ *
If the lens is controlled by the camera device auto-focus algorithm,
+ * the camera device will report the current AF status in ACAMERA_CONTROL_AF_STATE
+ * in result metadata.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Not available if android.control.maxRegionsAf is 0.
+ * Otherwise will always be present.
+ *
The maximum number of focus areas supported by the device is determined by the value
+ * of android.control.maxRegionsAf.
+ *
For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0,0) being
+ * the top-left pixel in the active pixel array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+ * active pixel array.
+ *
For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system depends on the mode being set.
+ * When the distortion correction mode is OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the pre-correction active array, and
+ * (ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right
+ * pixel in the pre-correction active pixel array.
+ * When the distortion correction mode is not OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the active array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+ * active pixel array.
+ *
The weight must be within [0, 1000], and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.
+ *
The weights are relative to weights of other metering regions, so if only one region
+ * is used, all non-zero weights will have the same effect. A region with 0 weight is
+ * ignored.
+ *
If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device. The capture result will either be a zero weight region as well, or
+ * the region selected by the camera device as the focus area of interest.
+ *
If the metering region is outside the used ACAMERA_SCALER_CROP_REGION returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.
+ *
When setting the AF metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AF regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.
+ *
Starting from API level 30, the coordinate system of activeArraySize or
+ * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+ * pre-zoom field of view. This means that the same afRegions values at different
+ * ACAMERA_CONTROL_ZOOM_RATIO represent different parts of the scene. The afRegions
+ * coordinates are relative to the activeArray/preCorrectionActiveArray representing the
+ * zoomed field of view. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0 (default), the same
+ * afRegions at different ACAMERA_SCALER_CROP_REGION still represent the same parts of the
+ * scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
+ * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
+ * mode.
The data representation is int[5 * area_count].
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This entry is normally set to IDLE, or is not
+ * included at all in the request settings.
+ *
When included and set to START, the camera device will trigger the
+ * autofocus algorithm. If autofocus is disabled, this trigger has no effect.
+ *
When set to CANCEL, the camera device will cancel any active trigger,
+ * and return to its initial AF state.
+ *
Generally, applications should set this entry to START or CANCEL for only a
+ * single capture, and then return it to IDLE (or not set at all). Specifying
+ * START for multiple captures in a row means restarting the AF operation over
+ * and over again.
+ *
See ACAMERA_CONTROL_AF_STATE for what the trigger means for each AF mode.
+ *
Using the autofocus trigger and the precapture trigger ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to ACAMERA_CONTROL_AF_STATE, for example.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
When set to true (ON), the AWB algorithm is locked to its latest parameters,
+ * and will not change color balance settings until the lock is set to false (OFF).
+ *
Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AWB updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:
+ *
+ *
Starting in auto-AWB mode:
+ *
Lock AWB
+ *
Wait for the first result to be output that has the AWB locked
+ *
Copy AWB settings from that result into a request, set the request to manual AWB
+ *
Submit the capture request, proceed to run manual AWB as desired.
+ *
+ *
Note that AWB lock is only meaningful when
+ * ACAMERA_CONTROL_AWB_MODE is in the AUTO mode; in other modes,
+ * AWB is already fixed to a specific setting.
+ *
Some LEGACY devices may not support ON; the value is then overridden to OFF.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This control is only effective if ACAMERA_CONTROL_MODE is AUTO.
+ *
When set to the AUTO mode, the camera device's auto-white balance
+ * routine is enabled, overriding the application's selected
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM, ACAMERA_COLOR_CORRECTION_GAINS and
+ * ACAMERA_COLOR_CORRECTION_MODE. Note that when ACAMERA_CONTROL_AE_MODE
+ * is OFF, the behavior of AWB is device dependent. It is recommended to
+ * also set AWB mode to OFF or lock AWB by using ACAMERA_CONTROL_AWB_LOCK before
+ * setting AE mode to OFF.
+ *
When set to the OFF mode, the camera device's auto-white balance
+ * routine is disabled. The application manually controls the white
+ * balance by ACAMERA_COLOR_CORRECTION_TRANSFORM, ACAMERA_COLOR_CORRECTION_GAINS
+ * and ACAMERA_COLOR_CORRECTION_MODE.
+ *
When set to any other modes, the camera device's auto-white
+ * balance routine is disabled. The camera device uses each
+ * particular illumination target for white balance
+ * adjustment. The application's values for
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM,
+ * ACAMERA_COLOR_CORRECTION_GAINS and
+ * ACAMERA_COLOR_CORRECTION_MODE are ignored.
List of metering areas to use for auto-white-balance illuminant
+ * estimation.
+ *
+ *
Type: int32[5*area_count]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Not available if android.control.maxRegionsAwb is 0.
+ * Otherwise will always be present.
+ *
The maximum number of regions supported by the device is determined by the value
+ * of android.control.maxRegionsAwb.
+ *
For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0,0) being
+ * the top-left pixel in the active pixel array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+ * active pixel array.
+ *
For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system depends on the mode being set.
+ * When the distortion correction mode is OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the pre-correction active array, and
+ * (ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right
+ * pixel in the pre-correction active pixel array.
+ * When the distortion correction mode is not OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the active array, and
+ * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+ * active pixel array.
+ *
The weight must range from 0 to 1000, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.
+ *
The weights are relative to weights of other white balance metering regions, so if
+ * only one region is used, all non-zero weights will have the same effect. A region with
+ * 0 weight is ignored.
+ *
If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.
+ *
If the metering region is outside the used ACAMERA_SCALER_CROP_REGION returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.
+ *
When setting the AWB metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AWB regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.
+ *
Starting from API level 30, the coordinate system of activeArraySize or
+ * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+ * pre-zoom field of view. This means that the same awbRegions values at different
+ * ACAMERA_CONTROL_ZOOM_RATIO represent different parts of the scene. The awbRegions
+ * coordinates are relative to the activeArray/preCorrectionActiveArray representing the
+ * zoomed field of view. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0 (default), the same
+ * awbRegions at different ACAMERA_SCALER_CROP_REGION still represent the same parts of
+ * the scene as they do before. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use
+ * activeArraySize or preCorrectionActiveArraySize still depends on distortion correction
+ * mode.
The data representation is int[5 * area_count].
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
Information to the camera device 3A (auto-exposure,
+ * auto-focus, auto-white balance) routines about the purpose
+ * of this capture, to help the camera device to decide optimal 3A
+ * strategy.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
When this mode is set, a color effect will be applied
+ * to images produced by the camera device. The interpretation
+ * and implementation of these color effects is left to the
+ * implementor of the camera device, and should not be
+ * depended on to be consistent (or present) across all
+ * devices.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This is a top-level 3A control switch. When set to OFF, all 3A control
+ * by the camera device is disabled. The application must set the fields for
+ * capture parameters itself.
+ *
When set to AUTO, the individual algorithm controls in
+ * ACAMERA_CONTROL_* are in effect, such as ACAMERA_CONTROL_AF_MODE.
+ *
When set to USE_SCENE_MODE or USE_EXTENDED_SCENE_MODE, the individual controls in
+ * ACAMERA_CONTROL_* are mostly disabled, and the camera device
+ * implements one of the scene mode or extended scene mode settings (such as ACTION,
+ * SUNSET, PARTY, or BOKEH) as it wishes. The camera device scene mode
+ * 3A settings are provided by {@link ACameraCaptureSession_captureCallback_result capture results}.
+ *
When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
+ * is that this frame will not be used by camera device background 3A statistics
+ * update, as if this frame is never captured. This mode can be used in the scenario
+ * where the application doesn't want a 3A manual control capture to affect
+ * the subsequent auto 3A capture results.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Scene modes are custom camera modes optimized for a certain set of conditions and
+ * capture settings.
+ *
This is the mode that that is active when
+ * ACAMERA_CONTROL_MODE == USE_SCENE_MODE. Aside from FACE_PRIORITY, these modes will
+ * disable ACAMERA_CONTROL_AE_MODE, ACAMERA_CONTROL_AWB_MODE, and ACAMERA_CONTROL_AF_MODE
+ * while in use.
+ *
The interpretation and implementation of these scene modes is left
+ * to the implementor of the camera device. Their behavior will not be
+ * consistent across all devices, and any given device may only implement
+ * a subset of these modes.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Video stabilization automatically warps images from
+ * the camera in order to stabilize motion between consecutive frames.
+ *
If enabled, video stabilization can modify the
+ * ACAMERA_SCALER_CROP_REGION to keep the video stream stabilized.
+ *
Switching between different video stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode
+ * in capture result metadata. For example, When "ON" mode is requested,
+ * the video stabilization modes in the first several capture results may
+ * still be "OFF", and it will become "ON" when the initialization is
+ * done.
+ *
In addition, not all recording sizes or frame rates may be supported for
+ * stabilization by a device that reports stabilization support. It is guaranteed
+ * that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+ * the recording resolution is less than or equal to 1920 x 1080 (width less than
+ * or equal to 1920, height less than or equal to 1080), and the recording
+ * frame rate is less than or equal to 30fps. At other sizes, the CaptureResult
+ * ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE field will return
+ * OFF if the recording output is not stabilized, or if there are no output
+ * Surface types that can be stabilized.
+ *
If a camera device supports both this mode and OIS
+ * (ACAMERA_LENS_OPTICAL_STABILIZATION_MODE), turning both modes on may
+ * produce undesirable interaction, so it is recommended not to enable
+ * both at the same time.
+ *
If video stabilization is set to "PREVIEW_STABILIZATION",
+ * ACAMERA_LENS_OPTICAL_STABILIZATION_MODE is overridden. The camera sub-system may choose
+ * to turn on hardware based image stabilization in addition to software based stabilization
+ * if it deems that appropriate.
+ * This key may be a part of the available session keys, which camera clients may
+ * query via
+ * {@link ACameraManager_getCameraCharacteristics }.
+ * If this is the case, changing this key over the life-time of a capture session may
+ * cause delays / glitches.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Not all of the auto-exposure anti-banding modes may be
+ * supported by a given camera device. This field lists the
+ * valid anti-banding modes that the application may request
+ * for this camera device with the
+ * ACAMERA_CONTROL_AE_ANTIBANDING_MODE control.
List of auto-exposure modes for ACAMERA_CONTROL_AE_MODE that are supported by this camera
+ * device.
+ *
+ * @see ACAMERA_CONTROL_AE_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Not all the auto-exposure modes may be supported by a
+ * given camera device, especially if no flash unit is
+ * available. This entry lists the valid modes for
+ * ACAMERA_CONTROL_AE_MODE for this camera device.
+ *
All camera devices support ON, and all camera devices with flash
+ * units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
+ *
FULL mode camera devices always support OFF mode,
+ * which enables application control of camera exposure time,
+ * sensitivity, and frame duration.
+ *
LEGACY mode camera devices never support OFF mode.
+ * LIMITED mode devices support OFF if they support the MANUAL_SENSOR
+ * capability.
For devices that advertise NIR color filter arrangement in
+ * ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, this list will always include
+ * (max, max) where max = the maximum output frame rate of the maximum YUV_420_888
+ * output size.
+ *
For devices advertising any color filter arrangement other than NIR, or devices not
+ * advertising color filter arrangement, this list will always include (min, max) and
+ * (max, max) where min <= 15 and max = the maximum output frame rate of the
+ * maximum YUV_420_888 output size.
Maximum and minimum exposure compensation values for
+ * ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION, in counts of ACAMERA_CONTROL_AE_COMPENSATION_STEP,
+ * that are supported by this camera device.
Smallest step by which the exposure compensation
+ * can be changed.
+ *
+ *
Type: rational
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This is the unit for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION. For example, if this key has
+ * a value of 1/2, then a setting of -2 for ACAMERA_CONTROL_AE_EXPOSURE_COMPENSATION means
+ * that the target EV offset for the auto-exposure routine is -1 EV.
+ *
One unit of EV compensation changes the brightness of the captured image by a factor
+ * of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
List of auto-focus (AF) modes for ACAMERA_CONTROL_AF_MODE that are
+ * supported by this camera device.
+ *
+ * @see ACAMERA_CONTROL_AF_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Not all the auto-focus modes may be supported by a
+ * given camera device. This entry lists the valid modes for
+ * ACAMERA_CONTROL_AF_MODE for this camera device.
+ *
All LIMITED and FULL mode camera devices will support OFF mode, and all
+ * camera devices with adjustable focuser units
+ * (ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE > 0) will support AUTO mode.
+ *
LEGACY devices will support OFF mode only if they support
+ * focusing to infinity (by also setting ACAMERA_LENS_FOCUS_DISTANCE to
+ * 0.0f).
List of color effects for ACAMERA_CONTROL_EFFECT_MODE that are supported by this camera
+ * device.
+ *
+ * @see ACAMERA_CONTROL_EFFECT_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This list contains the color effect modes that can be applied to
+ * images produced by the camera device.
+ * Implementations are not expected to be consistent across all devices.
+ * If no color effect modes are available for a device, this will only list
+ * OFF.
+ *
A color effect will only be applied if
+ * ACAMERA_CONTROL_MODE != OFF. OFF is always included in this list.
+ *
This control has no effect on the operation of other control routines such
+ * as auto-exposure, white balance, or focus.
List of scene modes for ACAMERA_CONTROL_SCENE_MODE that are supported by this camera
+ * device.
+ *
+ * @see ACAMERA_CONTROL_SCENE_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This list contains scene modes that can be set for the camera device.
+ * Only scene modes that have been fully implemented for the
+ * camera device may be included here. Implementations are not expected
+ * to be consistent across all devices.
+ *
If no scene modes are supported by the camera device, this
+ * will be set to DISABLED. Otherwise DISABLED will not be listed.
+ *
FACE_PRIORITY is always listed if face detection is
+ * supported (i.e.ACAMERA_STATISTICS_INFO_MAX_FACE_COUNT >
+ * 0).
List of auto-white-balance modes for ACAMERA_CONTROL_AWB_MODE that are supported by this
+ * camera device.
+ *
+ * @see ACAMERA_CONTROL_AWB_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Not all the auto-white-balance modes may be supported by a
+ * given camera device. This entry lists the valid modes for
+ * ACAMERA_CONTROL_AWB_MODE for this camera device.
+ *
All camera devices will support ON mode.
+ *
Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
+ * mode, which enables application control of white balance, by using
+ * ACAMERA_COLOR_CORRECTION_TRANSFORM and ACAMERA_COLOR_CORRECTION_GAINS(ACAMERA_COLOR_CORRECTION_MODE must be set to TRANSFORM_MATRIX). This includes all FULL
+ * mode camera devices.
List of the maximum number of regions that can be used for metering in
+ * auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
+ * this corresponds to the maximum number of elements in
+ * ACAMERA_CONTROL_AE_REGIONS, ACAMERA_CONTROL_AWB_REGIONS,
+ * and ACAMERA_CONTROL_AF_REGIONS.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Switching between or enabling AE modes (ACAMERA_CONTROL_AE_MODE) always
+ * resets the AE state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
+ * or ACAMERA_CONTROL_SCENE_MODE if ACAMERA_CONTROL_MODE == USE_SCENE_MODE resets all
+ * the algorithm states to INACTIVE.
+ *
The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. For example: INACTIVE may never actually be
+ * seen in a result.
+ *
The state in the result is the state for this image (in sync with this image): if
+ * AE state becomes CONVERGED, then the image data associated with this result should
+ * be good to use.
+ *
Below are state transition tables for different AE modes.
+ *
State | Transition Cause | New State | Notes
+ * :------------:|:----------------:|:---------:|:-----------------------:
+ * INACTIVE | | INACTIVE | Camera device auto exposure algorithm is disabled
+ *
When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON*:
+ *
State | Transition Cause | New State | Notes
+ * :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
+ * INACTIVE | Camera device initiates AE scan | SEARCHING | Values changing
+ * INACTIVE | ACAMERA_CONTROL_AE_LOCK is ON | LOCKED | Values locked
+ * SEARCHING | Camera device finishes AE scan | CONVERGED | Good values, not changing
+ * SEARCHING | Camera device finishes AE scan | FLASH_REQUIRED | Converged but too dark w/o flash
+ * SEARCHING | ACAMERA_CONTROL_AE_LOCK is ON | LOCKED | Values locked
+ * CONVERGED | Camera device initiates AE scan | SEARCHING | Values changing
+ * CONVERGED | ACAMERA_CONTROL_AE_LOCK is ON | LOCKED | Values locked
+ * FLASH_REQUIRED | Camera device initiates AE scan | SEARCHING | Values changing
+ * FLASH_REQUIRED | ACAMERA_CONTROL_AE_LOCK is ON | LOCKED | Values locked
+ * LOCKED | ACAMERA_CONTROL_AE_LOCK is OFF | SEARCHING | Values not good after unlock
+ * LOCKED | ACAMERA_CONTROL_AE_LOCK is OFF | CONVERGED | Values good after unlock
+ * LOCKED | ACAMERA_CONTROL_AE_LOCK is OFF | FLASH_REQUIRED | Exposure good, but too dark
+ * PRECAPTURE | Sequence done. ACAMERA_CONTROL_AE_LOCK is OFF | CONVERGED | Ready for high-quality capture
+ * PRECAPTURE | Sequence done. ACAMERA_CONTROL_AE_LOCK is ON | LOCKED | Ready for high-quality capture
+ * LOCKED | aeLock is ON and aePrecaptureTrigger is START | LOCKED | Precapture trigger is ignored when AE is already locked
+ * LOCKED | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED | Precapture trigger is ignored when AE is already locked
+ * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START | PRECAPTURE | Start AE precapture metering sequence
+ * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL| INACTIVE | Currently active precapture metering sequence is canceled
+ *
If the camera device supports AE external flash mode (ON_EXTERNAL_FLASH is included in
+ * ACAMERA_CONTROL_AE_AVAILABLE_MODES), ACAMERA_CONTROL_AE_STATE must be FLASH_REQUIRED after
+ * the camera device finishes AE scan and it's too dark without flash.
+ *
For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.
+ *
For example, for above AE modes (AE_MODE_ON*), in addition to the state transitions
+ * listed in above table, it is also legal for the camera device to skip one or more
+ * transient states between two results. See below table for examples:
+ *
State | Transition Cause | New State | Notes
+ * :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
+ * INACTIVE | Camera device finished AE scan | CONVERGED | Values are already good, transient states are skipped by camera device.
+ * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
+ * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | CONVERGED | Converged after a precapture sequence, transient states are skipped by camera device.
+ * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
+ * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged | CONVERGED | Converged after a precapture sequences canceled, transient states are skipped by camera device.
+ * CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
+ * FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Switching between or enabling AF modes (ACAMERA_CONTROL_AF_MODE) always
+ * resets the AF state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
+ * or ACAMERA_CONTROL_SCENE_MODE if ACAMERA_CONTROL_MODE == USE_SCENE_MODE resets all
+ * the algorithm states to INACTIVE.
+ *
The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. For example: INACTIVE may never actually be
+ * seen in a result.
+ *
The state in the result is the state for this image (in sync with this image): if
+ * AF state becomes FOCUSED, then the image data associated with this result should
+ * be sharp.
+ *
Below are state transition tables for different AF modes.
+ *
When ACAMERA_CONTROL_AF_MODE is AF_MODE_OFF or AF_MODE_EDOF:
+ *
State | Transition Cause | New State | Notes
+ * :------------:|:----------------:|:---------:|:-----------:
+ * INACTIVE | | INACTIVE | Never changes
+ *
When ACAMERA_CONTROL_AF_MODE is AF_MODE_AUTO or AF_MODE_MACRO:
+ *
State | Transition Cause | New State | Notes
+ * :-----------------:|:----------------:|:------------------:|:--------------:
+ * INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep, Lens now moving
+ * ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | Focused, Lens now locked
+ * ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
+ * ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF, Lens now locked
+ * FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF
+ * FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving
+ * NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF
+ * NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving
+ * Any state | Mode change | INACTIVE |
+ *
For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.
+ *
For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
+ * state transitions listed in above table, it is also legal for the camera device to skip
+ * one or more transient states between two results. See below table for examples:
+ *
State | Transition Cause | New State | Notes
+ * :-----------------:|:----------------:|:------------------:|:--------------:
+ * INACTIVE | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked.
+ * INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
+ * FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked.
+ * NOT_FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is good after a scan, lens is not locked.
+ *
When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_VIDEO:
+ *
State | Transition Cause | New State | Notes
+ * :-----------------:|:-----------------------------------:|:------------------:|:--------------:
+ * INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ * INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+ * PASSIVE_SCAN | Camera device completes current scan| PASSIVE_FOCUSED | End AF scan, Lens now locked
+ * PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked
+ * PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, if focus is good. Lens now locked
+ * PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
+ * PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked
+ * PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ * PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ * PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, lens now locked
+ * PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
+ * FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect
+ * FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+ * NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect
+ * NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+ *
When ACAMERA_CONTROL_AF_MODE is AF_MODE_CONTINUOUS_PICTURE:
+ *
State | Transition Cause | New State | Notes
+ * :-----------------:|:------------------------------------:|:------------------:|:--------------:
+ * INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ * INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
+ * PASSIVE_SCAN | Camera device completes current scan | PASSIVE_FOCUSED | End AF scan, Lens now locked
+ * PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked
+ * PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual transition once the focus is good. Lens now locked
+ * PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
+ * PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked
+ * PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ * PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving
+ * PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. Lens now locked
+ * PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
+ * FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect
+ * FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+ * NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect
+ * NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan
+ *
When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
+ * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
+ * camera device. When a trigger is included in a mode switch request, the trigger
+ * will be evaluated in the context of the new mode in the request.
+ * See below table for examples:
+ *
State | Transition Cause | New State | Notes
+ * :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
+ * any state | CAF-->AUTO mode switch | INACTIVE | Mode switch without trigger, initial state must be INACTIVE
+ * any state | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE | Mode switch with trigger, INACTIVE is skipped
+ * any state | AUTO-->CAF mode switch | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Switching between or enabling AWB modes (ACAMERA_CONTROL_AWB_MODE) always
+ * resets the AWB state to INACTIVE. Similarly, switching between ACAMERA_CONTROL_MODE,
+ * or ACAMERA_CONTROL_SCENE_MODE if ACAMERA_CONTROL_MODE == USE_SCENE_MODE resets all
+ * the algorithm states to INACTIVE.
+ *
The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. So INACTIVE may never actually be seen in
+ * a result.
+ *
The state in the result is the state for this image (in sync with this image): if
+ * AWB state becomes CONVERGED, then the image data associated with this result should
+ * be good to use.
+ *
Below are state transition tables for different AWB modes.
+ *
When ACAMERA_CONTROL_AWB_MODE != AWB_MODE_AUTO:
+ *
State | Transition Cause | New State | Notes
+ * :------------:|:----------------:|:---------:|:-----------------------:
+ * INACTIVE | |INACTIVE |Camera device auto white balance algorithm is disabled
+ *
When ACAMERA_CONTROL_AWB_MODE is AWB_MODE_AUTO:
+ *
State | Transition Cause | New State | Notes
+ * :-------------:|:--------------------------------:|:-------------:|:-----------------:
+ * INACTIVE | Camera device initiates AWB scan | SEARCHING | Values changing
+ * INACTIVE | ACAMERA_CONTROL_AWB_LOCK is ON | LOCKED | Values locked
+ * SEARCHING | Camera device finishes AWB scan | CONVERGED | Good values, not changing
+ * SEARCHING | ACAMERA_CONTROL_AWB_LOCK is ON | LOCKED | Values locked
+ * CONVERGED | Camera device initiates AWB scan | SEARCHING | Values changing
+ * CONVERGED | ACAMERA_CONTROL_AWB_LOCK is ON | LOCKED | Values locked
+ * LOCKED | ACAMERA_CONTROL_AWB_LOCK is OFF | SEARCHING | Values not good after unlock
+ *
For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.
+ *
For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
+ * listed in above table, it is also legal for the camera device to skip one or more
+ * transient states between two results. See below table for examples:
+ *
State | Transition Cause | New State | Notes
+ * :-------------:|:--------------------------------:|:-------------:|:-----------------:
+ * INACTIVE | Camera device finished AWB scan | CONVERGED | Values are already good, transient states are skipped by camera device.
+ * LOCKED | ACAMERA_CONTROL_AWB_LOCK is OFF | CONVERGED | Values good after unlock, transient states are skipped by camera device.
List of control modes for ACAMERA_CONTROL_MODE that are supported by this camera
+ * device.
+ *
+ * @see ACAMERA_CONTROL_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This list contains control modes that can be set for the camera device.
+ * LEGACY mode devices will always support AUTO mode. LIMITED and FULL
+ * devices will always support OFF, AUTO modes.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Devices support post RAW sensitivity boost will advertise
+ * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controlling
+ * post RAW sensitivity boost.
+ *
This key will be null for devices that do not support any RAW format
+ * outputs. For devices that do support RAW format outputs, this key will always
+ * present, and if a device does not support post RAW sensitivity boost, it will
+ * list (100, 100) in this key.
The amount of additional sensitivity boost applied to output images
+ * after RAW sensor data is captured.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Some camera devices support additional digital sensitivity boosting in the
+ * camera processing pipeline after sensor RAW image is captured.
+ * Such a boost will be applied to YUV/JPEG format output images but will not
+ * have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.
+ *
This key will be null for devices that do not support any RAW format
+ * outputs. For devices that do support RAW format outputs, this key will always
+ * present, and if a device does not support post RAW sensitivity boost, it will
+ * list 100 in this key.
+ *
If the camera device cannot apply the exact boost requested, it will reduce the
+ * boost to the nearest supported value.
+ * The final boost value used will be available in the output capture result.
+ *
For devices that support post RAW sensitivity boost, the YUV/JPEG output images
+ * of such device will have the total sensitivity of
+ * ACAMERA_SENSOR_SENSITIVITY * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST / 100
+ * The sensitivity of RAW format images will always be ACAMERA_SENSOR_SENSITIVITY
+ *
This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
If enableZsl is true, the camera device may enable zero-shutter-lag mode for requests with
+ * STILL_CAPTURE capture intent. The camera device may use images captured in the past to
+ * produce output images for a zero-shutter-lag request. The result metadata including the
+ * ACAMERA_SENSOR_TIMESTAMP reflects the source frames used to produce output images.
+ * Therefore, the contents of the output images and the result metadata may be out of order
+ * compared to previous regular requests. enableZsl does not affect requests with other
+ * capture intents.
+ *
For example, when requests are submitted in the following order:
+ * Request A: enableZsl is ON, ACAMERA_CONTROL_CAPTURE_INTENT is PREVIEW
+ * Request B: enableZsl is ON, ACAMERA_CONTROL_CAPTURE_INTENT is STILL_CAPTURE
+ *
The output images for request B may have contents captured before the output images for
+ * request A, and the result metadata for request B may be older than the result metadata for
+ * request A.
+ *
Note that when enableZsl is true, it is not guaranteed to get output images captured in
+ * the past for requests with STILL_CAPTURE capture intent.
+ *
For applications targeting SDK versions O and newer, the value of enableZsl in
+ * TEMPLATE_STILL_CAPTURE template may be true. The value in other templates is always
+ * false if present.
+ *
For applications targeting SDK versions older than O, the value of enableZsl in all
+ * capture templates is always false if present.
+ *
For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
When the camera focus routine detects a change in the scene it is looking at,
+ * such as a large shift in camera viewpoint, significant motion in the scene, or a
+ * significant illumination change, this value will be set to DETECTED for a single capture
+ * result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar
+ * to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes.
+ *
This key will be available if the camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
The list of extended scene modes for ACAMERA_CONTROL_EXTENDED_SCENE_MODE that are supported
+ * by this camera device, and each extended scene mode's maximum streaming (non-stall) size
+ * with effect.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
For DISABLED mode, the camera behaves normally with no extended scene mode enabled.
+ *
For BOKEH_STILL_CAPTURE mode, the maximum streaming dimension specifies the limit
+ * under which bokeh is effective when capture intent is PREVIEW. Note that when capture
+ * intent is PREVIEW, the bokeh effect may not be as high in quality compared to
+ * STILL_CAPTURE intent in order to maintain reasonable frame rate. The maximum streaming
+ * dimension must be one of the YUV_420_888 or PRIVATE resolutions in
+ * availableStreamConfigurations, or (0, 0) if preview bokeh is not supported. If the
+ * application configures a stream larger than the maximum streaming dimension, bokeh
+ * effect may not be applied for this stream for PREVIEW intent.
+ *
For BOKEH_CONTINUOUS mode, the maximum streaming dimension specifies the limit under
+ * which bokeh is effective. This dimension must be one of the YUV_420_888 or PRIVATE
+ * resolutions in availableStreamConfigurations, and if the sensor maximum resolution is
+ * larger than or equal to 1080p, the maximum streaming dimension must be at least 1080p.
+ * If the application configures a stream with larger dimension, the stream may not have
+ * bokeh effect applied.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
When extended scene mode is set, the camera device may have limited range of zoom ratios
+ * compared to when extended scene mode is DISABLED. This tag lists the zoom ratio ranges
+ * for all supported non-DISABLED extended scene modes, in the same order as in
+ * android.control.availableExtended.
+ *
Range [1.0, 1.0] means that no zoom (optical or digital) is supported.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
With bokeh mode, the camera device may blur out the parts of scene that are not in
+ * focus, creating a bokeh (or shallow depth of field) effect for people or objects.
+ *
When set to BOKEH_STILL_CAPTURE mode with STILL_CAPTURE capture intent, due to the extra
+ * processing needed for high quality bokeh effect, the stall may be longer than when
+ * capture intent is not STILL_CAPTURE.
+ *
When set to BOKEH_STILL_CAPTURE mode with PREVIEW capture intent,
+ *
+ *
If the camera device has BURST_CAPTURE capability, the frame rate requirement of
+ * BURST_CAPTURE must still be met.
+ *
All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode
+ * (queried via {@link ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES })
+ * will have preview bokeh effect applied.
+ *
+ *
When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's
+ * maximum streaming dimension in order to have bokeh effect applied. Bokeh effect may not
+ * be available for streams larger than the maximum streaming dimension.
+ *
Switching between different extended scene modes may involve reconfiguration of the camera
+ * pipeline, resulting in long latency. The application should check this key against the
+ * available session keys queried via
+ * {@link ACameraManager_getCameraCharacteristics }.
+ *
For a logical multi-camera, bokeh may be implemented by stereo vision from sub-cameras
+ * with different field of view. As a result, when bokeh mode is enabled, the camera device
+ * may override ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO, and the field of
+ * view may be smaller than when bokeh mode is off.
Minimum and maximum zoom ratios supported by this camera device.
+ *
+ *
Type: float[2]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If the camera device supports zoom-out from 1x zoom, minZoom will be less than 1.0, and
+ * setting ACAMERA_CONTROL_ZOOM_RATIO to values less than 1.0 increases the camera's field
+ * of view.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Instead of using ACAMERA_SCALER_CROP_REGION for zoom, the application can now choose to
+ * use this tag to specify the desired zoom level.
+ *
By using this control, the application gains a simpler way to control zoom, which can
+ * be a combination of optical and digital zoom. For example, a multi-camera system may
+ * contain more than one lens with different focal lengths, and the user can use optical
+ * zoom by switching between lenses. Using zoomRatio has benefits in the scenarios below:
+ *
+ *
Zooming in from a wide-angle lens to a telephoto lens: A floating-point ratio provides
+ * better precision compared to an integer value of ACAMERA_SCALER_CROP_REGION.
+ *
Zooming out from a wide lens to an ultrawide lens: zoomRatio supports zoom-out whereas
+ * ACAMERA_SCALER_CROP_REGION doesn't.
+ *
+ *
To illustrate, here are several scenarios of different zoom ratios, crop regions,
+ * and output streams, for a hypothetical camera device with an active array of size
+ * (2000,1500).
+ *
+ *
Camera Configuration:
+ *
Active array size: 2000x1500 (3 MP, 4:3 aspect ratio)
As seen from the graphs above, the coordinate system of cropRegion now changes to the
+ * effective after-zoom field-of-view, and is represented by the rectangle of (0, 0,
+ * activeArrayWith, activeArrayHeight). The same applies to AE/AWB/AF regions, and faces.
+ * This coordinate system change isn't applicable to RAW capture and its related
+ * metadata such as intrinsicCalibration and lensShadingMap.
+ *
Using the same hypothetical example above, and assuming output stream #1 (640x480) is
+ * the viewfinder stream, the application can achieve 2.0x zoom in one of two ways:
If the application intends to set aeRegions to be top-left quarter of the viewfinder
+ * field-of-view, the ACAMERA_CONTROL_AE_REGIONS should be set to (0, 0, 1000, 750) with
+ * zoomRatio set to 2.0. Alternatively, the application can set aeRegions to the equivalent
+ * region of (500, 375, 1000, 750) for zoomRatio of 1.0. If the application doesn't
+ * explicitly set ACAMERA_CONTROL_ZOOM_RATIO, its value defaults to 1.0.
+ *
One limitation of controlling zoom using zoomRatio is that the ACAMERA_SCALER_CROP_REGION
+ * must only be used for letterboxing or pillarboxing of the sensor active array, and no
+ * FREEFORM cropping can be used with ACAMERA_CONTROL_ZOOM_RATIO other than 1.0. If
+ * ACAMERA_CONTROL_ZOOM_RATIO is not 1.0, and ACAMERA_SCALER_CROP_REGION is set to be
+ * windowboxing, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be
+ * the active array.
+ *
In the capture request, if the application sets ACAMERA_CONTROL_ZOOM_RATIO to a
+ * value != 1.0, the ACAMERA_CONTROL_ZOOM_RATIO tag in the capture result reflects the
+ * effective zoom ratio achieved by the camera device, and the ACAMERA_SCALER_CROP_REGION
+ * adjusts for additional crops that are not zoom related. Otherwise, if the application
+ * sets ACAMERA_CONTROL_ZOOM_RATIO to 1.0, or does not set it at all, the
+ * ACAMERA_CONTROL_ZOOM_RATIO tag in the result metadata will also be 1.0.
+ *
When the application requests a physical stream for a logical multi-camera, the
+ * ACAMERA_CONTROL_ZOOM_RATIO in the physical camera result metadata will be 1.0, and
+ * the ACAMERA_SCALER_CROP_REGION tag reflects the amount of zoom and crop done by the
+ * physical camera device.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Edge enhancement improves sharpness and details in the captured image. OFF means
+ * no enhancement will be applied by the camera device.
+ *
FAST/HIGH_QUALITY both mean camera device determined enhancement
+ * will be applied. HIGH_QUALITY mode indicates that the
+ * camera device will use the highest-quality enhancement algorithms,
+ * even if it slows down capture rate. FAST means the camera device will
+ * not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
+ * edge enhancement will slow down capture rate. Every output stream will have a similar
+ * amount of enhancement applied.
+ *
ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * edge enhancement to low-resolution streams (below maximum recording resolution) to
+ * maximize preview quality, but does not apply edge enhancement to high-resolution streams,
+ * since those will be reprocessed later if necessary.
+ *
For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
+ * device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
+ * The camera device may adjust its internal edge enhancement parameters for best
+ * image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
List of edge enhancement modes for ACAMERA_EDGE_MODE that are supported by this camera
+ * device.
+ *
+ * @see ACAMERA_EDGE_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Full-capability camera devices must always support OFF; camera devices that support
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
+ * list FAST.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This control is only effective when flash unit is available
+ * (ACAMERA_FLASH_INFO_AVAILABLE == true).
+ *
When this control is used, the ACAMERA_CONTROL_AE_MODE must be set to ON or OFF.
+ * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
+ * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
+ *
When set to OFF, the camera device will not fire flash for this capture.
+ *
When set to SINGLE, the camera device will fire flash regardless of the camera
+ * device's auto-exposure routine's result. When used in still capture case, this
+ * control should be used along with auto-exposure (AE) precapture metering sequence
+ * (ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER), otherwise, the image may be incorrectly exposed.
+ *
When set to TORCH, the flash will be on continuously. This mode can be used
+ * for use cases such as preview, auto-focus assist, still capture, or video recording.
+ *
The flash status will be reported by ACAMERA_FLASH_STATE in the capture result metadata.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
When the camera device doesn't have flash unit
+ * (i.e. ACAMERA_FLASH_INFO_AVAILABLE == false), this state will always be UNAVAILABLE.
+ * Other states indicate the current flash status.
+ *
In certain conditions, this will be available on LEGACY devices:
+ *
+ *
Flash-less cameras always return UNAVAILABLE.
+ *
Using ACAMERA_CONTROL_AE_MODE == ON_ALWAYS_FLASH
+ * will always return FIRED.
+ *
Using ACAMERA_FLASH_MODE == TORCH
+ * will always return FIRED.
+ *
+ *
In all other conditions the state will not be available on
+ * LEGACY devices (i.e. it will be null).
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If this value is greater than 1, then the device supports controlling the
+ * flashlight brightness level via
+ * CameraManager#turnOnTorchWithStrengthLevel.
+ * If this value is equal to 1, flashlight brightness control is not supported.
+ * The value for this key will be null for devices with no flash unit.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If flash unit is available this will be greater than or equal to 1 and less
+ * or equal to ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL.
+ *
Setting flashlight brightness above the default level
+ * (i.e.ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL) may make the device more
+ * likely to reach thermal throttling conditions and slow down, or drain the
+ * battery quicker than normal. To minimize such issues, it is recommended to
+ * start the flashlight at this default brightness until a user explicitly requests
+ * a brighter level.
+ * Note that the value for this key will be null for devices with no flash unit.
+ * The default level should always be > 0.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Hotpixel correction interpolates out, or otherwise removes, pixels
+ * that do not accurately measure the incoming light (i.e. pixels that
+ * are stuck at an arbitrary value or are oversensitive).
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
The clockwise rotation angle in degrees, relative to the orientation
+ * to the camera, that the JPEG picture needs to be rotated by, to be viewed
+ * upright.
+ *
Camera devices may either encode this value into the JPEG EXIF header, or
+ * rotate the image data to match this orientation. When the image data is rotated,
+ * the thumbnail data will also be rotated.
+ *
Note that this orientation is relative to the orientation of the camera sensor, given
+ * by ACAMERA_SENSOR_ORIENTATION.
+ *
To translate from the device orientation given by the Android sensor APIs for camera
+ * sensors which are not EXTERNAL, the following sample code may be used:
+ *
private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
+ * if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
+ * int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ *
+ * // Round device orientation to a multiple of 90
+ * deviceOrientation = (deviceOrientation + 45) / 90 * 90;
+ *
+ * // Reverse device orientation for front-facing cameras
+ * boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
+ * if (facingFront) deviceOrientation = -deviceOrientation;
+ *
+ * // Calculate desired JPEG orientation relative to camera orientation to make
+ * // the image upright relative to the device orientation
+ * int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+ *
+ * return jpegOrientation;
+ * }
+ *
+ *
For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
+ * also be set to EXTERNAL. The above code is not relevant in such case.
+ *
This tag is also used to describe the orientation of the HEIC image capture, in which
+ * case the rotation is reflected by
+ * EXIF orientation flag, and not by
+ * rotating the image data itself.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
+ * but the captured JPEG will still be a valid image.
+ *
For best results, when issuing a request for a JPEG image, the thumbnail size selected
+ * should have the same aspect ratio as the main JPEG output.
+ *
If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+ * ratio, the camera device creates the thumbnail by cropping it from the primary image.
+ * For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
+ * 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
+ * generate the thumbnail image. The thumbnail image will always have a smaller Field
+ * Of View (FOV) than the primary image when aspect ratios differ.
+ *
When an ACAMERA_JPEG_ORIENTATION of non-zero degree is requested,
+ * the camera device will handle thumbnail rotation in one of the following ways:
Rotate the jpeg and thumbnail image data and not set
+ * EXIF orientation flag. In this
+ * case, LIMITED or FULL hardware level devices will report rotated thumbnail size in
+ * capture result, so the width and height will be interchanged if 90 or 270 degree
+ * orientation is requested. LEGACY device will always report unrotated thumbnail
+ * size.
+ *
+ *
The tag is also used as thumbnail size for HEIC image format capture, in which case the
+ * the thumbnail rotation is reflected by
+ * EXIF orientation flag, and not by
+ * rotating the thumbnail data itself.
List of JPEG thumbnail sizes for ACAMERA_JPEG_THUMBNAIL_SIZE supported by this
+ * camera device.
+ *
+ * @see ACAMERA_JPEG_THUMBNAIL_SIZE
+ *
+ *
Type: int32[2*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This list will include at least one non-zero resolution, plus (0,0) for indicating no
+ * thumbnail should be generated.
+ *
Below conditions will be satisfied for this size list:
+ *
+ *
The sizes will be sorted by increasing pixel area (width x height).
+ * If several resolutions have the same area, they will be sorted by increasing width.
+ *
The aspect ratio of the largest thumbnail size will be same as the
+ * aspect ratio of largest JPEG output size in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS.
+ * The largest size is defined as the size that has the largest pixel area
+ * in a given size list.
+ *
Each output JPEG size in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS will have at least
+ * one corresponding size that has the same aspect ratio in availableThumbnailSizes,
+ * and vice versa.
+ *
All non-(0, 0) sizes will have non-zero widths and heights.
+ *
+ *
This list is also used as supported thumbnail sizes for HEIC image format capture.
The desired lens aperture size, as a ratio of lens focal length to the
+ * effective aperture diameter.
+ *
+ *
Type: float
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Setting this value is only supported on the camera devices that have a variable
+ * aperture lens.
+ *
When this is supported and ACAMERA_CONTROL_AE_MODE is OFF,
+ * this can be set along with ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY, and ACAMERA_SENSOR_FRAME_DURATION
+ * to achieve manual exposure control.
+ *
The requested aperture value may take several frames to reach the
+ * requested value; the camera device will report the current (intermediate)
+ * aperture size in capture result metadata while the aperture is changing.
+ * While the aperture is still changing, ACAMERA_LENS_STATE will be set to MOVING.
+ *
When this is supported and ACAMERA_CONTROL_AE_MODE is one of
+ * the ON modes, this will be overridden by the camera device
+ * auto-exposure algorithm, the overridden values are then provided
+ * back to the user in the corresponding result.
The desired setting for the lens neutral density filter(s).
+ *
+ *
Type: float
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This control will not be supported on most camera devices.
+ *
Lens filters are typically used to lower the amount of light the
+ * sensor is exposed to (measured in steps of EV). As used here, an EV
+ * step is the standard logarithmic representation, which are
+ * non-negative, and inversely proportional to the amount of light
+ * hitting the sensor. For example, setting this to 0 would result
+ * in no reduction of the incoming light, and setting this to 2 would
+ * mean that the filter is set to reduce incoming light by two stops
+ * (allowing 1/4 of the prior amount of light to the sensor).
+ *
It may take several frames before the lens filter density changes
+ * to the requested value. While the filter density is still changing,
+ * ACAMERA_LENS_STATE will be set to MOVING.
The desired lens focal length; used for optical zoom.
+ *
+ *
Type: float
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This setting controls the physical focal length of the camera
+ * device's lens. Changing the focal length changes the field of
+ * view of the camera device, and is usually used for optical zoom.
+ *
Like ACAMERA_LENS_FOCUS_DISTANCE and ACAMERA_LENS_APERTURE, this
+ * setting won't be applied instantaneously, and it may take several
+ * frames before the lens can change to the requested focal length.
+ * While the focal length is still changing, ACAMERA_LENS_STATE will
+ * be set to MOVING.
+ *
Optical zoom via this control will not be supported on most devices. Starting from API
+ * level 30, the camera device may combine optical and digital zoom through the
+ * ACAMERA_CONTROL_ZOOM_RATIO control.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
OIS is used to compensate for motion blur due to small
+ * movements of the camera during capture. Unlike digital image
+ * stabilization (ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE), OIS
+ * makes use of mechanical elements to stabilize the camera
+ * sensor, and thus allows for longer exposure times before
+ * camera shake becomes apparent.
+ *
Switching between different optical stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode in
+ * capture result metadata. For example, When "ON" mode is requested, the
+ * optical stabilization modes in the first several capture results may still
+ * be "OFF", and it will become "ON" when the initialization is done.
+ *
If a camera device supports both OIS and digital image stabilization
+ * (ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE), turning both modes on may produce undesirable
+ * interaction, so it is recommended not to enable both at the same time.
+ *
If ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE is set to "PREVIEW_STABILIZATION",
+ * ACAMERA_LENS_OPTICAL_STABILIZATION_MODE is overridden. The camera sub-system may choose
+ * to turn on hardware based image stabilization in addition to software based stabilization
+ * if it deems that appropriate. This key's value in the capture result will reflect which
+ * OIS mode was chosen.
+ *
Not all devices will support OIS; see
+ * ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION for
+ * available controls.
The orientation of the camera relative to the sensor
+ * coordinate system.
+ *
+ *
Type: float[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The four coefficients that describe the quaternion
+ * rotation from the Android sensor coordinate system to a
+ * camera-aligned coordinate system where the X-axis is
+ * aligned with the long side of the image sensor, the Y-axis
+ * is aligned with the short side of the image sensor, and
+ * the Z-axis is aligned with the optical axis of the sensor.
+ *
To convert from the quaternion coefficients (x,y,z,w)
+ * to the axis of rotation (a_x, a_y, a_z) and rotation
+ * amount theta, the following formulas can be used:
+ *
theta = 2 * acos(w)
+ * a_x = x / sin(theta/2)
+ * a_y = y / sin(theta/2)
+ * a_z = z / sin(theta/2)
+ *
+ *
To create a 3x3 rotation matrix that applies the rotation
+ * defined by this quaternion, the following matrix can be
+ * used:
This matrix can then be used to apply the rotation to a
+ * column vector point with
+ *
p' = Rp
+ *
where p is in the device sensor coordinate system, and
+ * p' is in the camera-oriented coordinate system.
+ *
If ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, the quaternion rotation cannot
+ * be accurately represented by the camera device, and will be represented by
+ * default values matching its default facing.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The position of the camera device's lens optical center,
+ * as a three-dimensional vector (x,y,z).
+ *
Prior to Android P, or when ACAMERA_LENS_POSE_REFERENCE is PRIMARY_CAMERA, this position
+ * is relative to the optical center of the largest camera device facing in the same
+ * direction as this camera, in the Android sensor
+ * coordinate axes. Note that only the axis definitions are shared with the sensor
+ * coordinate system, but not the origin.
+ *
If this device is the largest or only camera device with a given facing, then this
+ * position will be (0, 0, 0); a camera device with a lens optical center located 3 cm
+ * from the main sensor along the +X axis (to the right from the user's perspective) will
+ * report (0.03, 0, 0). Note that this means that, for many computer vision
+ * applications, the position needs to be negated to convert it to a translation from the
+ * camera to the origin.
+ *
To transform a pixel coordinates between two cameras facing the same direction, first
+ * the source camera ACAMERA_LENS_DISTORTION must be corrected for. Then the source
+ * camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs to be applied, followed by the
+ * ACAMERA_LENS_POSE_ROTATION of the source camera, the translation of the source camera
+ * relative to the destination camera, the ACAMERA_LENS_POSE_ROTATION of the destination
+ * camera, and finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION of the destination
+ * camera. This obtains a radial-distortion-free coordinate in the destination camera pixel
+ * coordinates.
+ *
To compare this against a real image from the destination camera, the destination camera
+ * image then needs to be corrected for radial distortion before comparison or sampling.
+ *
When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
+ * the center of the primary gyroscope on the device. The axis definitions are the same as
+ * with PRIMARY_CAMERA.
+ *
When ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, this position cannot be accurately
+ * represented by the camera device, and will be represented as (0, 0, 0).
+ *
When ACAMERA_LENS_POSE_REFERENCE is AUTOMOTIVE, then this position is relative to the
+ * origin of the automotive sensor coordinate system, which is at the center of the rear
+ * axle.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
For lens parameters ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE, when changes are requested,
+ * they may take several frames to reach the requested values. This state indicates
+ * the current status of the lens parameters.
+ *
When the state is STATIONARY, the lens parameters are not changing. This could be
+ * either because the parameters are all fixed, or because the lens has had enough
+ * time to reach the most recently-requested values.
+ * If all these lens parameters are not changeable for a camera device, as listed below:
+ *
+ *
Fixed focus (ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE == 0), which means
+ * ACAMERA_LENS_FOCUS_DISTANCE parameter will always be 0.
+ *
Fixed focal length (ACAMERA_LENS_INFO_AVAILABLE_FOCAL_LENGTHS contains single value),
+ * which means the optical zoom is not supported.
+ *
No ND filter (ACAMERA_LENS_INFO_AVAILABLE_FILTER_DENSITIES contains only 0).
+ *
Fixed aperture (ACAMERA_LENS_INFO_AVAILABLE_APERTURES contains single value).
+ *
+ *
Then this state will always be STATIONARY.
+ *
When the state is MOVING, it indicates that at least one of the lens parameters
+ * is changing.
The parameters for this camera device's intrinsic
+ * calibration.
+ *
+ *
Type: float[5]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The five calibration parameters that describe the
+ * transform from camera-centric 3D coordinates to sensor
+ * pixel coordinates:
+ *
[f_x, f_y, c_x, c_y, s]
+ *
+ *
Where f_x and f_y are the horizontal and vertical
+ * focal lengths, [c_x, c_y] is the position of the optical
+ * axis, and s is a skew parameter for the sensor plane not
+ * being aligned with the lens plane.
+ *
These are typically used within a transformation matrix K:
which can then be combined with the camera pose rotation
+ * R and translation t (ACAMERA_LENS_POSE_ROTATION and
+ * ACAMERA_LENS_POSE_TRANSLATION, respectively) to calculate the
+ * complete transform from world coordinates to pixel
+ * coordinates:
+ *
P = [ K 0 * [ R -Rt
+ * 0 1 ] 0 1 ]
+ *
+ *
(Note the negation of poseTranslation when mapping from camera
+ * to world coordinates, and multiplication by the rotation).
+ *
With p_w being a point in the world coordinate system
+ * and p_s being a point in the camera active pixel array
+ * coordinate system, and with the mapping including the
+ * homogeneous division by z:
so [x_s, y_s] is the pixel coordinates of the world
+ * point, z_s = 1, and w_s is a measurement of disparity
+ * (depth) in pixel coordinates.
+ *
Note that the coordinate system for this transform is the
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE system,
+ * where (0,0) is the top-left of the
+ * preCorrectionActiveArraySize rectangle. Once the pose and
+ * intrinsic calibration transforms have been applied to a
+ * world point, then the ACAMERA_LENS_DISTORTION
+ * transform needs to be applied, and the result adjusted to
+ * be in the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate
+ * system (where (0, 0) is the top-left of the
+ * activeArraySize rectangle), to determine the final pixel
+ * coordinate of the world point for processed (non-RAW)
+ * output buffers.
+ *
For camera devices, the center of pixel (x,y) is located at
+ * coordinate (x + 0.5, y + 0.5). So on a device with a
+ * precorrection active array of size (10,10), the valid pixel
+ * indices go from (0,0)-(9,9), and an perfectly-built camera would
+ * have an optical center at the exact center of the pixel grid, at
+ * coordinates (5.0, 5.0), which is the top-left corner of pixel
+ * (5,5).
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Three radial distortion coefficients [kappa_1, kappa_2,
+ * kappa_3] and two tangential distortion coefficients
+ * [kappa_4, kappa_5] that can be used to correct the
+ * lens's geometric distortion with the mapping equations:
Here, [x_c, y_c] are the coordinates to sample in the
+ * input image that correspond to the pixel values in the
+ * corrected image at the coordinate [x_i, y_i]:
The pixel coordinates are defined in a coordinate system
+ * related to the ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * calibration fields; see that entry for details of the mapping stages.
+ * Both [x_i, y_i] and [x_c, y_c]
+ * have (0,0) at the lens optical center [c_x, c_y], and
+ * the range of the coordinates depends on the focal length
+ * terms of the intrinsic calibration.
+ *
Finally, r represents the radial distance from the
+ * optical center, r^2 = x_i^2 + y_i^2.
+ *
The distortion model used is the Brown-Conrady model.
The correction coefficients to correct for this camera device's
+ * radial and tangential lens distortion for a
+ * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: float[5]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
List of neutral density filter values for
+ * ACAMERA_LENS_FILTER_DENSITY that are supported by this camera device.
+ *
+ * @see ACAMERA_LENS_FILTER_DENSITY
+ *
+ *
Type: float[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If a neutral density filter is not supported by this camera device,
+ * this list will contain only 0. Otherwise, this list will include every
+ * filter density supported by the camera device, in ascending order.
List of focal lengths for ACAMERA_LENS_FOCAL_LENGTH that are supported by this camera
+ * device.
+ *
+ * @see ACAMERA_LENS_FOCAL_LENGTH
+ *
+ *
Type: float[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If optical zoom is not supported, this list will only contain
+ * a single value corresponding to the fixed focal length of the
+ * device. Otherwise, this list will include every focal length supported
+ * by the camera device, in ascending order.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If the lens is not fixed focus, the camera device will report this
+ * field when ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION is APPROXIMATE or CALIBRATED.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The lens focus distance calibration quality determines the reliability of
+ * focus related metadata entries, i.e. ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FOCUS_RANGE, ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE, and
+ * ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE.
+ *
APPROXIMATE and CALIBRATED devices report the focus metadata in
+ * units of diopters (1/meter), so 0.0f represents focusing at infinity,
+ * and increasing positive numbers represent focusing closer and closer
+ * to the camera device. The focus distance control also uses diopters
+ * on these devices.
+ *
UNCALIBRATED devices do not use units that are directly comparable
+ * to any real physical measurement, but 0.0f still represents farthest
+ * focus, and ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE represents the
+ * nearest focus the device can achieve.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
The noise reduction algorithm attempts to improve image quality by removing
+ * excessive noise added by the capture process, especially in dark conditions.
+ *
OFF means no noise reduction will be applied by the camera device, for both raw and
+ * YUV domain.
+ *
MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
+ * demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
+ * This mode is optional, may not be support by all devices. The application should check
+ * ACAMERA_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES before using it.
+ *
FAST/HIGH_QUALITY both mean camera device determined noise filtering
+ * will be applied. HIGH_QUALITY mode indicates that the camera device
+ * will use the highest-quality noise filtering algorithms,
+ * even if it slows down capture rate. FAST means the camera device will not
+ * slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
+ * MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
+ * Every output stream will have a similar amount of enhancement applied.
+ *
ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+ * preview quality, but does not apply noise reduction to high-resolution streams, since
+ * those will be reprocessed later if necessary.
+ *
For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
+ * will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
+ * may adjust the noise reduction parameters for best image quality based on the
+ * android.reprocess.effectiveExposureFactor if it is set.
The maximum numbers of different types of output streams
+ * that can be configured and used simultaneously by a camera device.
+ *
+ *
Type: int32[3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This is a 3 element tuple that contains the max number of output simultaneous
+ * streams for raw sensor, processed (but not stalling), and processed (and stalling)
+ * formats respectively. For example, assuming that JPEG is typically a processed and
+ * stalling stream, if max raw sensor format output stream number is 1, max YUV streams
+ * number is 3, and max JPEG stream number is 2, then this tuple should be (1, 3, 2).
+ *
This lists the upper bound of the number of output streams supported by
+ * the camera device. Using more streams simultaneously may require more hardware and
+ * CPU resources that will consume more power. The image format for an output stream can
+ * be any supported format provided by ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS.
+ * The formats defined in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS can be categorized
+ * into the 3 stream types as below:
+ *
+ *
Processed (but stalling): any non-RAW format with a stallDurations > 0.
+ * Typically {@link AIMAGE_FORMAT_JPEG JPEG format}.
+ *
Raw formats: {@link AIMAGE_FORMAT_RAW16 RAW_SENSOR}, {@link AIMAGE_FORMAT_RAW10 RAW10}, or
+ * {@link AIMAGE_FORMAT_RAW12 RAW12}.
+ *
Processed (but not-stalling): any non-RAW format without a stall duration. Typically
+ * {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888},
+ * NV21, YV12, or {@link AIMAGE_FORMAT_Y8 Y8} .
Specifies the number of pipeline stages the frame went
+ * through from when it was exposed to when the final completed result
+ * was available to the framework.
+ *
+ *
Type: byte
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Depending on what settings are used in the request, and
+ * what streams are configured, the data may undergo less processing,
+ * and some pipeline stages skipped.
+ *
See ACAMERA_REQUEST_PIPELINE_MAX_DEPTH for more details.
Specifies the number of maximum pipeline stages a frame
+ * has to go through from when it's exposed to when it's available
+ * to the framework.
+ *
+ *
Type: byte
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
A typical minimum value for this is 2 (one stage to expose,
+ * one stage to readout) from the sensor. The ISP then usually adds
+ * its own stages to do custom HW processing. Further stages may be
+ * added by SW processing.
+ *
Depending on what settings are used (e.g. YUV, JPEG) and what
+ * processing is enabled (e.g. face detection), the actual pipeline
+ * depth (specified by ACAMERA_REQUEST_PIPELINE_DEPTH) may be less than
+ * the max pipeline depth.
+ *
A pipeline depth of X stages is equivalent to a pipeline latency of
+ * X frame intervals.
+ *
This value will normally be 8 or less, however, for high speed capture session,
+ * the max pipeline depth will be up to 8 x size of high speed capture request list.
Defines how many sub-components
+ * a result will be composed of.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
In order to combat the pipeline latency, partial results
+ * may be delivered to the application layer from the camera device as
+ * soon as they are available.
+ *
Optional; defaults to 1. A value of 1 means that partial
+ * results are not supported, and only the final TotalCaptureResult will
+ * be produced by the camera device.
+ *
A typical use case for this might be: after requesting an
+ * auto-focus (AF) lock the new AF state might be available 50%
+ * of the way through the pipeline. The camera device could
+ * then immediately dispatch this state via a partial result to
+ * the application, and the rest of the metadata via later
+ * partial results.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
A capability is a contract that the camera device makes in order
+ * to be able to satisfy one or more use cases.
+ *
Listing a capability guarantees that the whole set of features
+ * required to support a common use will all be available.
+ *
Using a subset of the functionality provided by an unsupported
+ * capability may be possible on a specific camera device implementation;
+ * to do this query each of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS,
+ * ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS,
+ * ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS.
+ *
The following capabilities are guaranteed to be available on
+ * ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL == FULL devices:
+ *
+ *
MANUAL_SENSOR
+ *
MANUAL_POST_PROCESSING
+ *
+ *
Other capabilities may be available on either FULL or LIMITED
+ * devices, but the application should query this key to be sure.
A list of all keys that the camera device has available
+ * to use with {@link ACaptureRequest }.
+ *
+ *
Type: int32[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Attempting to set a key into a CaptureRequest that is not
+ * listed here will result in an invalid request and will be rejected
+ * by the camera device.
+ *
This field can be used to query the feature set of a camera device
+ * at a more granular level than capabilities. This is especially
+ * important for optional keys that are not listed under any capability
+ * in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.
A list of all keys that the camera device has available to use with {@link ACameraCaptureSession_captureCallback_result }.
+ *
+ *
Type: int32[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Attempting to get a key from a CaptureResult that is not
+ * listed here will always return a null value. Getting a key from
+ * a CaptureResult that is listed here will generally never return a null
+ * value.
+ *
The following keys may return null unless they are enabled:
(Those sometimes-null keys will nevertheless be listed here
+ * if they are available.)
+ *
This field can be used to query the feature set of a camera device
+ * at a more granular level than capabilities. This is especially
+ * important for optional keys that are not listed under any capability
+ * in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.
A list of all keys that the camera device has available to use with {@link ACameraManager_getCameraCharacteristics }.
+ *
+ *
Type: int32[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This entry follows the same rules as
+ * ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS (except that it applies for
+ * CameraCharacteristics instead of CaptureResult). See above for more
+ * details.
A subset of the available request keys that the camera device
+ * can pass as part of the capture session initialization.
+ *
+ *
Type: int32[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which
+ * contains a list of keys that are difficult to apply per-frame and
+ * can result in unexpected delays when modified during the capture session
+ * lifetime. Typical examples include parameters that require a
+ * time-consuming hardware re-configuration or internal camera pipeline
+ * change. For performance reasons we advise clients to pass their initial
+ * values as part of
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ * Once the camera capture session is enabled it is also recommended to avoid
+ * changing them from their initial values set in
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ * Control over session parameters can still be exerted in capture requests
+ * but clients should be aware and expect delays during their application.
+ * An example usage scenario could look like this:
+ *
+ *
The camera client starts by querying the session parameter key list via
+ * {@link ACameraManager_getCameraCharacteristics }.
+ *
Before triggering the capture session create sequence, a capture request
+ * must be built via
+ * {@link ACameraDevice_createCaptureRequest }
+ * using an appropriate template matching the particular use case.
+ *
The client should go over the list of session parameters and check
+ * whether some of the keys listed matches with the parameters that
+ * they intend to modify as part of the first capture request.
+ *
If there is no such match, the capture request can be passed
+ * unmodified to
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ *
If matches do exist, the client should update the respective values
+ * and pass the request to
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ *
After the capture session initialization completes the session parameter
+ * key list can continue to serve as reference when posting or updating
+ * further requests. As mentioned above further changes to session
+ * parameters should ideally be avoided, if updates are necessary
+ * however clients could expect a delay/glitch during the
+ * parameter switch.
A subset of the available request keys that can be overridden for
+ * physical devices backing a logical multi-camera.
+ *
+ *
Type: int32[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which contains a list
+ * of keys that can be overridden using
+ * Builder#setPhysicalCameraKey.
+ * The respective value of such request key can be obtained by calling
+ * Builder#getPhysicalCameraKey.
+ * Capture requests that contain individual physical device requests must be built via
+ * Set).
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Devices supporting the 10-bit output capability
+ * CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT
+ * must list their supported dynamic range profiles. In case the camera is not able to
+ * support every possible profile combination within a single capture request, then the
+ * constraints must be listed here as well.
The desired region of the sensor to read out for this capture.
+ *
+ *
Type: int32[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This control can be used to implement digital zoom.
+ *
For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0, 0) being
+ * the top-left pixel of the active array.
+ *
For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate system
+ * depends on the mode being set. When the distortion correction mode is OFF, the
+ * coordinate system follows ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with (0,
+ * 0) being the top-left pixel of the pre-correction active array. When the distortion
+ * correction mode is not OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0, 0) being the top-left pixel of the
+ * active array.
+ *
Output streams use this rectangle to produce their output, cropping to a smaller region
+ * if necessary to maintain the stream's aspect ratio, then scaling the sensor input to
+ * match the output's configured resolution.
+ *
The crop region is applied after the RAW to other color space (e.g. YUV)
+ * conversion. Since raw streams (e.g. RAW16) don't have the conversion stage, they are not
+ * croppable. The crop region will be ignored by raw streams.
+ *
For non-raw streams, any additional per-stream cropping will be done to maximize the
+ * final pixel area of the stream.
+ *
For example, if the crop region is set to a 4:3 aspect ratio, then 4:3 streams will use
+ * the exact crop region. 16:9 streams will further crop vertically (letterbox).
+ *
Conversely, if the crop region is set to a 16:9, then 4:3 outputs will crop horizontally
+ * (pillarbox), and 16:9 streams will match exactly. These additional crops will be
+ * centered within the crop region.
+ *
To illustrate, here are several scenarios of different crop regions and output streams,
+ * for a hypothetical camera device with an active array of size (2000,1500). Note that
+ * several of these examples use non-centered crop regions for ease of illustration; such
+ * regions are only supported on devices with FREEFORM capability
+ * (ACAMERA_SCALER_CROPPING_TYPE == FREEFORM), but this does not affect the way the crop
+ * rules work otherwise.
+ *
+ *
Camera Configuration:
+ *
Active array size: 2000x1500 (3 MP, 4:3 aspect ratio)
Note that in this case, neither of the two outputs is a subset of the other, with
+ * each containing image data the other doesn't have.
+ *
+ *
+ *
+ *
If the coordinate system is ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, the width and height
+ * of the crop region cannot be set to be smaller than
+ * floor( activeArraySize.width / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM ) and
+ * floor( activeArraySize.height / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM ), respectively.
+ *
If the coordinate system is ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, the width
+ * and height of the crop region cannot be set to be smaller than
+ * floor( preCorrectionActiveArraySize.width / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )
+ * and
+ * floor( preCorrectionActiveArraySize.height / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM ),
+ * respectively.
+ *
The camera device may adjust the crop region to account for rounding and other hardware
+ * requirements; the final crop region used will be included in the output capture result.
+ *
The camera sensor output aspect ratio depends on factors such as output stream
+ * combination and ACAMERA_CONTROL_AE_TARGET_FPS_RANGE, and shouldn't be adjusted by using
+ * this control. And the camera device will treat different camera sensor output sizes
+ * (potentially with in-sensor crop) as the same crop of
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE. As a result, the application shouldn't assume the
+ * maximum crop region always maps to the same aspect ratio or field of view for the
+ * sensor output.
+ *
Starting from API level 30, it's strongly recommended to use ACAMERA_CONTROL_ZOOM_RATIO
+ * to take advantage of better support for zoom with logical multi-camera. The benefits
+ * include better precision with optical-digital zoom combination, and ability to do
+ * zoom-out from 1.0x. When using ACAMERA_CONTROL_ZOOM_RATIO for zoom, the crop region in
+ * the capture request should be left as the default activeArray size. The
+ * coordinate system is post-zoom, meaning that the activeArraySize or
+ * preCorrectionActiveArraySize covers the camera device's field of view "after" zoom. See
+ * ACAMERA_CONTROL_ZOOM_RATIO for details.
The maximum ratio between both active area width
+ * and crop region width, and active area height and
+ * crop region height, for ACAMERA_SCALER_CROP_REGION.
+ *
+ * @see ACAMERA_SCALER_CROP_REGION
+ *
+ *
Type: float
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This represents the maximum amount of zooming possible by
+ * the camera device, or equivalently, the minimum cropping
+ * window size.
+ *
Crop regions that have a width or height that is smaller
+ * than this ratio allows will be rounded up to the minimum
+ * allowed size by the camera device.
+ *
Starting from API level 30, when using ACAMERA_CONTROL_ZOOM_RATIO to zoom in or out,
+ * the application must use ACAMERA_CONTROL_ZOOM_RATIO_RANGE to query both the minimum and
+ * maximum zoom ratio.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The configurations are listed as (format, width, height, input?)
+ * tuples.
+ *
For a given use case, the actual maximum supported resolution
+ * may be lower than what is listed here, depending on the destination
+ * Surface for the image data. For example, for recording video,
+ * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ * can provide.
+ *
Please reference the documentation for the image data destination to
+ * check if it limits the maximum size for image data.
+ *
Not all output formats may be supported in a configuration with
+ * an input stream of a particular format. For more details, see
+ * android.scaler.availableInputOutputFormatsMap.
+ *
For applications targeting SDK version older than 31, the following table
+ * describes the minimum required output stream configurations based on the hardware level
+ * (ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL):
+ *
Format | Size | Hardware Level | Notes
+ * :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+ * JPEG | ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE | Any |
+ * JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize
+ * JPEG | 1280x720 (720) | Any | if 720p <= activeArraySize
+ * JPEG | 640x480 (480p) | Any | if 480p <= activeArraySize
+ * JPEG | 320x240 (240p) | Any | if 240p <= activeArraySize
+ * YUV_420_888 | all output sizes available for JPEG | FULL |
+ * YUV_420_888 | all output sizes available for JPEG, up to the maximum video size | LIMITED |
+ * IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any |
+ *
For applications targeting SDK version 31 or newer, if the mobile device declares to be
+ * media performance class 12 or higher by setting
+ * VERSION#MEDIA_PERFORMANCE_CLASS to be 31 or larger,
+ * the primary camera devices (first rear/front camera in the camera ID list) will not
+ * support JPEG sizes smaller than 1080p. If the application configures a JPEG stream
+ * smaller than 1080p, the camera device will round up the JPEG image size to at least
+ * 1080p. The requirements for IMPLEMENTATION_DEFINED and YUV_420_888 stay the same.
+ * This new minimum required output stream configurations are illustrated by the table below:
+ *
Format | Size | Hardware Level | Notes
+ * :-------------:|:--------------------------------------------:|:--------------:|:--------------:
+ * JPEG | ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE | Any |
+ * JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize
+ * YUV_420_888 | ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE | FULL |
+ * YUV_420_888 | 1920x1080 (1080p) | FULL | if 1080p <= activeArraySize
+ * YUV_420_888 | 1280x720 (720) | FULL | if 720p <= activeArraySize
+ * YUV_420_888 | 640x480 (480p) | FULL | if 480p <= activeArraySize
+ * YUV_420_888 | 320x240 (240p) | FULL | if 240p <= activeArraySize
+ * YUV_420_888 | all output sizes available for FULL hardware level, up to the maximum video size | LIMITED |
+ * IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any |
+ *
For applications targeting SDK version 31 or newer, if the mobile device doesn't declare
+ * to be media performance class 12 or better by setting
+ * VERSION#MEDIA_PERFORMANCE_CLASS to be 31 or larger,
+ * or if the camera device isn't a primary rear/front camera, the minimum required output
+ * stream configurations are the same as for applications targeting SDK version older than
+ * 31.
+ *
Refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES for additional
+ * mandatory stream configurations on a per-capability basis.
+ *
Exception on 176x144 (QCIF) resolution: camera devices usually have a fixed capability for
+ * downscaling from larger resolution to smaller, and the QCIF resolution sometimes is not
+ * fully supported due to this limitation on devices with high-resolution image sensors.
+ * Therefore, trying to configure a QCIF resolution stream together with any other
+ * stream larger than 1920x1080 resolution (either width or height) might not be supported,
+ * and capture session creation will fail if it is not.
This lists the minimum frame duration for each
+ * format/size combination.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.
+ *
When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).
+ *
See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.
This lists the maximum stall duration for each
+ * output format/size combination.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.
+ *
For example, consider JPEG captures which have the following
+ * characteristics:
+ *
+ *
JPEG streams act like processed YUV streams in requests for which
+ * they are not included; in requests in which they are directly
+ * referenced, they act as JPEG streams. This is because supporting a
+ * JPEG stream requires the underlying YUV data to always be ready for
+ * use by a JPEG encoder, but the encoder will only be used (and impact
+ * frame duration) on requests that actually reference a JPEG stream.
+ *
The JPEG processor can run concurrently to the rest of the camera
+ * pipeline, but cannot process more than 1 capture at a time.
+ *
+ *
In other words, using a repeating YUV request would result
+ * in a steady frame rate (let's say it's 30 FPS). If a single
+ * JPEG request is submitted periodically, the frame rate will stay
+ * at 30 FPS (as long as we wait for the previous JPEG to return each
+ * time). If we try to submit a repeating YUV + JPEG request, then
+ * the frame rate will drop from 30 FPS.
+ *
In general, submitting a new request with a non-0 stall time
+ * stream will not cause a frame rate drop unless there are still
+ * outstanding buffers for that stream from previous requests.
+ *
Submitting a repeating request with streams (call this S)
+ * is the same as setting the minimum frame duration from
+ * the normal minimum frame duration corresponding to S, added with
+ * the maximum stall duration for S.
+ *
If interleaving requests with and without a stall duration,
+ * a request will stall by the maximum of the remaining times
+ * for each can-stall stream with outstanding buffers.
+ *
This means that a stalling request will not have an exposure start
+ * until the stall has completed.
+ *
This should correspond to the stall duration when only that stream is
+ * active, with all processing (typically in android.*.mode) set to FAST
+ * or OFF. Setting any of the processing modes to HIGH_QUALITY
+ * effectively results in an indeterminate stall duration for all
+ * streams in a request (the regular stall calculation rules are
+ * ignored).
+ *
The following formats may always have a stall duration:
+ *
+ *
{@link AIMAGE_FORMAT_JPEG }
+ *
{@link AIMAGE_FORMAT_RAW16 }
+ *
+ *
The following formats will never have a stall duration:
+ *
+ *
{@link AIMAGE_FORMAT_YUV_420_888 }
+ *
{@link AIMAGE_FORMAT_RAW10 }
+ *
{@link AIMAGE_FORMAT_RAW12 }
+ *
{@link AIMAGE_FORMAT_Y8 }
+ *
+ *
All other formats may or may not have an allowed stall duration on
+ * a per-capability basis; refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
+ * for more details.
+ *
See ACAMERA_SENSOR_FRAME_DURATION for more information about
+ * calculating the max frame rate (absent stalls).
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
When passing a non-centered crop region (ACAMERA_SCALER_CROP_REGION) to a camera
+ * device that only supports CENTER_ONLY cropping, the camera device will move the
+ * crop region to the center of the sensor active array (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE)
+ * and keep the crop region width and height unchanged. The camera device will return the
+ * final used crop region in metadata result ACAMERA_SCALER_CROP_REGION.
+ *
Camera devices that support FREEFORM cropping will support any crop region that
+ * is inside of the active array. The camera device will apply the same crop region and
+ * return the final used crop region in capture result metadata ACAMERA_SCALER_CROP_REGION.
+ *
Starting from API level 30,
+ *
+ *
If the camera device supports FREEFORM cropping, in order to do FREEFORM cropping, the
+ * application must set ACAMERA_CONTROL_ZOOM_RATIO to 1.0, and use ACAMERA_SCALER_CROP_REGION
+ * for zoom.
+ *
To do CENTER_ONLY zoom, the application has below 2 options:
+ *
Set ACAMERA_CONTROL_ZOOM_RATIO to 1.0; adjust zoom by ACAMERA_SCALER_CROP_REGION.
+ *
Adjust zoom by ACAMERA_CONTROL_ZOOM_RATIO; use ACAMERA_SCALER_CROP_REGION to crop
+ * the field of view vertically (letterboxing) or horizontally (pillarboxing), but not
+ * windowboxing.
+ *
+ *
+ *
Setting ACAMERA_CONTROL_ZOOM_RATIO to values different than 1.0 and
+ * ACAMERA_SCALER_CROP_REGION to be windowboxing at the same time are not supported. In this
+ * case, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be the active
+ * array.
+ *
+ *
LEGACY capability devices will only support CENTER_ONLY cropping.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Optional subset of the ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS that contains
+ * similar tuples listed as
+ * (i.e. width, height, format, output/input stream, usecase bit field).
+ * Camera devices will be able to suggest particular stream configurations which are
+ * power and performance efficient for specific use cases. For more information about
+ * retrieving the suggestions see
+ * CameraCharacteristics#getRecommendedStreamConfigurationMap.
+ *
The data representation is int[5], which maps to
+ * (width, height, format, output/input stream, usecase bit field). The array can be
+ * parsed using the following pseudo code:
Recommended mappings of image formats that are supported by this
+ * camera device for input streams, to their corresponding output formats.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This is a recommended subset of the complete list of mappings found in
+ * android.scaler.availableInputOutputFormatsMap. The same requirements apply here as well.
+ * The list however doesn't need to contain all available and supported mappings. Instead of
+ * this developers must list only recommended and efficient entries.
+ * If set, the information will be available in the ZERO_SHUTTER_LAG recommended stream
+ * configuration see
+ * CameraCharacteristics#getRecommendedStreamConfigurationMap.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This entry lists the valid modes for ACAMERA_SCALER_ROTATE_AND_CROP for this camera device.
+ *
Starting with API level 30, all devices will list at least ROTATE_AND_CROP_NONE.
+ * Devices with support for rotate-and-crop will additionally list at least
+ * ROTATE_AND_CROP_AUTO and ROTATE_AND_CROP_90.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
This control is primarily intended to help camera applications with no support for
+ * multi-window modes to work correctly on devices where multi-window scenarios are
+ * unavoidable, such as foldables or other devices with variable display geometry or more
+ * free-form window placement (such as laptops, which often place portrait-orientation apps
+ * in landscape with pillarboxing).
+ *
If supported, the default value is ROTATE_AND_CROP_AUTO, which allows the camera API
+ * to enable backwards-compatibility support for applications that do not support resizing
+ * / multi-window modes, when the device is in fact in a multi-window mode (such as inset
+ * portrait on laptops, or on a foldable device in some fold states). In addition,
+ * ROTATE_AND_CROP_NONE and ROTATE_AND_CROP_90 will always be available if this control
+ * is supported by the device. If not supported, devices API level 30 or higher will always
+ * list only ROTATE_AND_CROP_NONE.
+ *
When CROP_AUTO is in use, and the camera API activates backward-compatibility mode,
+ * several metadata fields will also be parsed differently to ensure that coordinates are
+ * correctly handled for features like drawing face detection boxes or passing in
+ * tap-to-focus coordinates. The camera API will convert positions in the active array
+ * coordinate system to/from the cropped-and-rotated coordinate system to make the
+ * operation transparent for applications. The following controls are affected:
+ *
+ *
ACAMERA_CONTROL_AE_REGIONS
+ *
ACAMERA_CONTROL_AF_REGIONS
+ *
ACAMERA_CONTROL_AWB_REGIONS
+ *
android.statistics.faces
+ *
+ *
Capture results will contain the actual value selected by the API;
+ * ROTATE_AND_CROP_AUTO will never be seen in a capture result.
+ *
Applications can also select their preferred cropping mode, either to opt out of the
+ * backwards-compatibility treatment, or to use the cropping feature themselves as needed.
+ * In this case, no coordinate translation will be done automatically, and all controls
+ * will continue to use the normal active array coordinates.
+ *
Cropping and rotating is done after the application of digital zoom (via either
+ * ACAMERA_SCALER_CROP_REGION or ACAMERA_CONTROL_ZOOM_RATIO), but before each individual
+ * output is further cropped and scaled. It only affects processed outputs such as
+ * YUV, PRIVATE, and JPEG. It has no effect on RAW outputs.
+ *
When CROP_90 or CROP_270 are selected, there is a significant loss to the field of
+ * view. For example, with a 4:3 aspect ratio output of 1600x1200, CROP_90 will still
+ * produce 1600x1200 output, but these buffers are cropped from a vertical 3:4 slice at the
+ * center of the 4:3 area, then rotated to be 4:3, and then upscaled to 1600x1200. Only
+ * 56.25% of the original FOV is still visible. In general, for an aspect ratio of w:h,
+ * the crop and rotate operation leaves (h/w)^2 of the field of view visible. For 16:9,
+ * this is ~31.6%.
+ *
As a visual example, the figure below shows the effect of ROTATE_AND_CROP_90 on the
+ * outputs for the following parameters:
Default YUV/PRIVATE size to use for requesting secure image buffers.
+ *
+ *
Type: int32[2]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This entry lists the default size supported in the secure camera mode. This entry is
+ * optional on devices support the SECURE_IMAGE_DATA capability. This entry will be null
+ * if the camera device does not list SECURE_IMAGE_DATA capability.
+ *
When the key is present, only a PRIVATE/YUV output of the specified size is guaranteed
+ * to be supported by the camera HAL in the secure camera mode. Any other format or
+ * resolutions might not be supported. Use
+ * {@link ACameraDevice_isSessionConfigurationSupported }
+ * API to query if a secure session configuration is supported if the device supports this
+ * API.
+ *
If this key returns null on a device with SECURE_IMAGE_DATA capability, the application
+ * can assume all output sizes listed in the
+ * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
+ * are supported.
The available multi-resolution stream configurations that this
+ * physical camera device supports
+ * (i.e. format, width, height, output/input stream).
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This list contains a subset of the parent logical camera's multi-resolution stream
+ * configurations which belong to this physical camera, and it will advertise and will only
+ * advertise the maximum supported resolutions for a particular format.
+ *
If this camera device isn't a physical camera device constituting a logical camera,
+ * but a standalone CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+ * camera, this field represents the multi-resolution input/output stream configurations of
+ * default mode and max resolution modes. The sizes will be the maximum resolution of a
+ * particular format for default mode and max resolution mode.
+ *
This field will only be advertised if the device is a physical camera of a
+ * logical multi-camera device or an ultra high resolution sensor camera. For a logical
+ * multi-camera, the camera API will derive the logical camera’s multi-resolution stream
+ * configurations from all physical cameras. For an ultra high resolution sensor camera, this
+ * is used directly as the camera’s multi-resolution stream configurations.
The available stream configurations that this
+ * camera device supports (i.e. format, width, height, output/input stream) for a
+ * CaptureRequest with ACAMERA_SENSOR_PIXEL_MODE set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
Not all output formats may be supported in a configuration with
+ * an input stream of a particular format. For more details, see
+ * android.scaler.availableInputOutputFormatsMapMaximumResolution.
This lists the minimum frame duration for each
+ * format/size combination when the camera device is sent a CaptureRequest with
+ * ACAMERA_SENSOR_PIXEL_MODE set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
When multiple streams are used in a request (if supported, when ACAMERA_SENSOR_PIXEL_MODE
+ * is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION), the
+ * minimum frame duration will be max(individual stream min durations).
+ *
See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+ * calculating the max frame rate.
This lists the maximum stall duration for each
+ * output format/size combination when CaptureRequests are submitted with
+ * ACAMERA_SENSOR_PIXEL_MODE set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
A logical multi-camera or an ultra high resolution camera may support multi-resolution
+ * input or output streams. With multi-resolution output streams, the camera device is able
+ * to output different resolution images depending on the current active physical camera or
+ * pixel mode. With multi-resolution input streams, the camera device can reprocess images
+ * of different resolutions from different physical cameras or sensor pixel modes.
+ *
When set to TRUE:
+ *
+ *
For a logical multi-camera, the camera framework derives
+ * android.scaler.multiResolutionStreamConfigurationMap by combining the
+ * ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS from its physical
+ * cameras.
+ *
For an ultra-high resolution sensor camera, the camera framework directly copies
+ * the value of ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS to
+ * android.scaler.multiResolutionStreamConfigurationMap.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The stream use case indicates the purpose of a particular camera stream from
+ * the end-user perspective. Some examples of camera use cases are: preview stream for
+ * live viewfinder shown to the user, still capture for generating high quality photo
+ * capture, video record for encoding the camera output for the purpose of future playback,
+ * and video call for live realtime video conferencing.
+ *
With this flag, the camera device can optimize the image processing pipeline
+ * parameters, such as tuning, sensor mode, and ISP settings, independent of
+ * the properties of the immediate camera output surface. For example, if the output
+ * surface is a SurfaceTexture, the stream use case flag can be used to indicate whether
+ * the camera frames eventually go to display, video encoder,
+ * still image capture, or all of them combined.
The guaranteed stream combinations related to stream use case for a camera device with
+ * CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE
+ * capability is documented in the camera device
+ * guideline. The
+ * application is strongly recommended to use one of the guaranteed stream combinations.
+ * If the application creates a session with a stream combination not in the guaranteed
+ * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
+ * the camera device may ignore some stream use cases due to hardware constraints
+ * and implementation details.
+ *
For stream combinations not covered by the stream use case mandatory lists, such as
+ * reprocessable session, constrained high speed session, or RAW stream combinations, the
+ * application should leave stream use cases within the session as DEFAULT.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
If the sensor can't expose this exact duration, it will shorten the
+ * duration exposed to the nearest possible value (rather than expose longer).
+ * The final exposure time used will be available in the output capture result.
+ *
This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.
Duration from start of frame exposure to
+ * start of next frame exposure.
+ *
+ *
Type: int64
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
The maximum frame rate that can be supported by a camera subsystem is
+ * a function of many factors:
+ *
+ *
Requested resolutions of output image streams
+ *
Availability of binning / skipping modes on the imager
+ *
The bandwidth of the imager interface
+ *
The bandwidth of the various ISP processing blocks
+ *
+ *
Since these factors can vary greatly between different ISPs and
+ * sensors, the camera abstraction tries to represent the bandwidth
+ * restrictions with as simple a model as possible.
+ *
The model presented has the following characteristics:
+ *
+ *
The image sensor is always configured to output the smallest
+ * resolution possible given the application's requested output stream
+ * sizes. The smallest resolution is defined as being at least as large
+ * as the largest requested output stream size; the camera pipeline must
+ * never digitally upsample sensor data when the crop region covers the
+ * whole sensor. In general, this means that if only small output stream
+ * resolutions are configured, the sensor can provide a higher frame
+ * rate.
+ *
Since any request may use any or all the currently configured
+ * output streams, the sensor and ISP must be configured to support
+ * scaling a single capture to all the streams at the same time. This
+ * means the camera pipeline must be ready to produce the largest
+ * requested output size without any delay. Therefore, the overall
+ * frame rate of a given configured stream set is governed only by the
+ * largest requested stream resolution.
+ *
Using more than one output stream in a request does not affect the
+ * frame duration.
+ *
Certain format-streams may need to do additional background processing
+ * before data is consumed/produced by that stream. These processors
+ * can run concurrently to the rest of the camera pipeline, but
+ * cannot process more than 1 capture at a time.
+ *
+ *
The necessary information for the application, given the model above, is provided via
+ * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }.
+ * These are used to determine the maximum frame rate / minimum frame duration that is
+ * possible for a given stream configuration.
+ *
Specifically, the application can use the following rules to
+ * determine the minimum frame duration it can request from the camera
+ * device:
+ *
+ *
Let the set of currently configured input/output streams be called S.
+ *
Find the minimum frame durations for each stream in S, by looking it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
+ * (with its respective size/format). Let this set of frame durations be called F.
+ *
For any given request R, the minimum frame duration allowed for R is the maximum
+ * out of all values in F. Let the streams used in R be called S_r.
+ *
+ *
If none of the streams in S_r have a stall time (listed in {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }
+ * using its respective size/format), then the frame duration in F determines the steady
+ * state frame rate that the application will get if it uses R as a repeating request. Let
+ * this special kind of request be called Rsimple.
+ *
A repeating request Rsimple can be occasionally interleaved by a single capture of a
+ * new request Rstall (which has at least one in-use stream with a non-0 stall time) and if
+ * Rstall has the same minimum frame duration this will not cause a frame rate loss if all
+ * buffers from the previous Rstall have already been delivered.
+ *
For more details about stalling, see {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }.
+ *
This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.
The amount of gain applied to sensor data
+ * before processing.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
The sensitivity is the standard ISO sensitivity value,
+ * as defined in ISO 12232:2006.
+ *
The sensitivity must be within ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE, and
+ * if if it less than ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY, the camera device
+ * is guaranteed to use only analog amplification for applying the gain.
+ *
If the camera device cannot apply the exact sensitivity
+ * requested, it will reduce the gain to the nearest supported
+ * value. The final sensitivity used will be available in the
+ * output capture result.
+ *
This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.
+ *
Note that for devices supporting postRawSensitivityBoost, the total sensitivity applied
+ * to the final processed image is the combination of ACAMERA_SENSOR_SENSITIVITY and
+ * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST. In case the application uses the sensor
+ * sensitivity from last capture result of an auto request for a manual request, in order
+ * to achieve the same brightness in the output image, the application should also
+ * set postRawSensitivityBoost.
The standard reference illuminant used as the scene light source when
+ * calculating the ACAMERA_SENSOR_COLOR_TRANSFORM1,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM1, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX1 matrices.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The values in this key correspond to the values defined for the
+ * EXIF LightSource tag. These illuminants are standard light sources
+ * that are often used calibrating camera devices.
+ *
If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM1,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM1, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX1 will also be present.
+ *
Some devices may choose to provide a second set of calibration
+ * information for improved quality, including
+ * ACAMERA_SENSOR_REFERENCE_ILLUMINANT2 and its corresponding matrices.
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
The standard reference illuminant used as the scene light source when
+ * calculating the ACAMERA_SENSOR_COLOR_TRANSFORM2,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM2, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX2 matrices.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
See ACAMERA_SENSOR_REFERENCE_ILLUMINANT1 for more details.
+ *
If this key is present, then ACAMERA_SENSOR_COLOR_TRANSFORM2,
+ * ACAMERA_SENSOR_CALIBRATION_TRANSFORM2, and
+ * ACAMERA_SENSOR_FORWARD_MATRIX2 will also be present.
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
A per-device calibration transform matrix that maps from the
+ * reference sensor colorspace to the actual device sensor colorspace.
+ *
+ *
Type: rational[3*3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This matrix is used to correct for per-device variations in the
+ * sensor colorspace, and is used for processing raw buffer data.
+ *
The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a per-device calibration transform that maps colors
+ * from reference sensor color space (i.e. the "golden module"
+ * colorspace) into this camera device's native sensor color
+ * space under the first reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT1).
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
A per-device calibration transform matrix that maps from the
+ * reference sensor colorspace to the actual device sensor colorspace
+ * (this is the colorspace of the raw buffer data).
+ *
+ *
Type: rational[3*3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This matrix is used to correct for per-device variations in the
+ * sensor colorspace, and is used for processing raw buffer data.
+ *
The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a per-device calibration transform that maps colors
+ * from reference sensor color space (i.e. the "golden module"
+ * colorspace) into this camera device's native sensor color
+ * space under the second reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT2).
+ *
This matrix will only be present if the second reference
+ * illuminant is present.
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
A matrix that transforms color values from CIE XYZ color space to
+ * reference sensor color space.
+ *
+ *
Type: rational[3*3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This matrix is used to convert from the standard CIE XYZ color
+ * space to the reference sensor colorspace, and is used when processing
+ * raw buffer data.
+ *
The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a color transform matrix that maps colors from the CIE
+ * XYZ color space to the reference sensor color space (i.e. the
+ * "golden module" colorspace) under the first reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT1).
+ *
The white points chosen in both the reference sensor color space
+ * and the CIE XYZ colorspace when calculating this transform will
+ * match the standard white point for the first reference illuminant
+ * (i.e. no chromatic adaptation will be applied by this transform).
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
A matrix that transforms color values from CIE XYZ color space to
+ * reference sensor color space.
+ *
+ *
Type: rational[3*3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This matrix is used to convert from the standard CIE XYZ color
+ * space to the reference sensor colorspace, and is used when processing
+ * raw buffer data.
+ *
The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a color transform matrix that maps colors from the CIE
+ * XYZ color space to the reference sensor color space (i.e. the
+ * "golden module" colorspace) under the second reference illuminant
+ * (ACAMERA_SENSOR_REFERENCE_ILLUMINANT2).
+ *
The white points chosen in both the reference sensor color space
+ * and the CIE XYZ colorspace when calculating this transform will
+ * match the standard white point for the second reference illuminant
+ * (i.e. no chromatic adaptation will be applied by this transform).
+ *
This matrix will only be present if the second reference
+ * illuminant is present.
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
A matrix that transforms white balanced camera colors from the reference
+ * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
+ *
+ *
Type: rational[3*3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This matrix is used to convert to the standard CIE XYZ colorspace, and
+ * is used when processing raw buffer data.
+ *
This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ * a color transform matrix that maps white balanced colors from the
+ * reference sensor color space to the CIE XYZ color space with a D50 white
+ * point.
+ *
Under the first reference illuminant (ACAMERA_SENSOR_REFERENCE_ILLUMINANT1)
+ * this matrix is chosen so that the standard white point for this reference
+ * illuminant in the reference sensor colorspace is mapped to D50 in the
+ * CIE XYZ colorspace.
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
A matrix that transforms white balanced camera colors from the reference
+ * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
+ *
+ *
Type: rational[3*3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This matrix is used to convert to the standard CIE XYZ colorspace, and
+ * is used when processing raw buffer data.
+ *
This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ * a color transform matrix that maps white balanced colors from the
+ * reference sensor color space to the CIE XYZ color space with a D50 white
+ * point.
+ *
Under the second reference illuminant (ACAMERA_SENSOR_REFERENCE_ILLUMINANT2)
+ * this matrix is chosen so that the standard white point for this reference
+ * illuminant in the reference sensor colorspace is mapped to D50 in the
+ * CIE XYZ colorspace.
+ *
This matrix will only be present if the second reference
+ * illuminant is present.
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
A fixed black level offset for each of the color filter arrangement
+ * (CFA) mosaic channels.
+ *
+ *
Type: int32[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This key specifies the zero light value for each of the CFA mosaic
+ * channels in the camera sensor. The maximal value output by the
+ * sensor is represented by the value in ACAMERA_SENSOR_INFO_WHITE_LEVEL.
+ *
The values are given in the same order as channels listed for the CFA
+ * layout key (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT), i.e. the
+ * nth value given corresponds to the black level offset for the nth
+ * color channel listed in the CFA.
+ *
The black level values of captured images may vary for different
+ * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY). This key
+ * represents a coarse approximation for such case. It is recommended to
+ * use ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL or use pixels from
+ * ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS directly for captures when
+ * supported by the camera device, which provides more accurate black
+ * level values. For raw capture in particular, it is recommended to use
+ * pixels from ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS to calculate black
+ * level values for each frame.
+ *
For a MONOCHROME camera device, all of the 2x2 channels must have the same values.
Maximum sensitivity that is implemented
+ * purely through analog gain.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
For ACAMERA_SENSOR_SENSITIVITY values less than or
+ * equal to this, all applied gain must be analog. For
+ * values above this, the gain applied can be a mix of analog and
+ * digital.
Clockwise angle through which the output image needs to be rotated to be
+ * upright on the device screen in its native orientation.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Also defines the direction of rolling shutter readout, which is from top to bottom in
+ * the sensor's coordinate system.
+ *
Starting with Android API level 32, camera clients that query the orientation via
+ * CameraCharacteristics#get on foldable devices which
+ * include logical cameras can receive a value that can dynamically change depending on the
+ * device/fold state.
+ * Clients are advised to not cache or store the orientation value of such logical sensors.
+ * In case repeated queries to CameraCharacteristics are not preferred, then clients can
+ * also access the entire mapping from device state to sensor orientation in
+ * DeviceStateSensorOrientationMap.
+ * Do note that a dynamically changing sensor orientation value in camera characteristics
+ * will not be the best way to establish the orientation per frame. Clients that want to
+ * know the sensor orientation of a particular captured frame should query the
+ * ACAMERA_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID from the corresponding capture result and
+ * check the respective physical camera orientation.
+ *
Native camera clients must query ACAMERA_INFO_DEVICE_STATE_ORIENTATIONS for the mapping
+ * between device state and camera sensor orientation. Dynamic updates to the sensor
+ * orientation are not supported in this code path.
Time at start of exposure of first
+ * row of the image sensor active array, in nanoseconds.
+ *
+ *
Type: int64
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The timestamps are also included in all image
+ * buffers produced for the same capture, and will be identical
+ * on all the outputs.
+ *
When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE == UNKNOWN,
+ * the timestamps measure time since an unspecified starting point,
+ * and are monotonically increasing. They can be compared with the
+ * timestamps for other captures from the same camera device, but are
+ * not guaranteed to be comparable to any other time source.
+ *
When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE == REALTIME, the
+ * timestamps measure time in the same timebase as SystemClock#elapsedRealtimeNanos, and they can
+ * be compared to other timestamps from other subsystems that
+ * are using that base.
+ *
For reprocessing, the timestamp will match the start of exposure of
+ * the input image, i.e. the
+ * timestamp in the TotalCaptureResult that was used to create the
+ * reprocess capture request.
The estimated camera neutral color in the native sensor colorspace at
+ * the time of capture.
+ *
+ *
Type: rational[3]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
This value gives the neutral color point encoded as an RGB value in the
+ * native sensor color space. The neutral color point indicates the
+ * currently estimated white point of the scene illumination. It can be
+ * used to interpolate between the provided color transforms when
+ * processing raw sensor data.
+ *
The order of the values is R, G, B; where R is in the lowest index.
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
Noise model coefficients for each CFA mosaic channel.
+ *
+ *
Type: double[2*CFA Channels]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
This key contains two noise model coefficients for each CFA channel
+ * corresponding to the sensor amplification (S) and sensor readout
+ * noise (O). These are given as pairs of coefficients for each channel
+ * in the same order as channels listed for the CFA layout key
+ * (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT). This is
+ * represented as an array of Pair<Double, Double>, where
+ * the first member of the Pair at index n is the S coefficient and the
+ * second member is the O coefficient for the nth color channel in the CFA.
+ *
These coefficients are used in a two parameter noise model to describe
+ * the amount of noise present in the image for each CFA channel. The
+ * noise model used here is:
+ *
N(x) = sqrt(Sx + O)
+ *
Where x represents the recorded signal of a CFA channel normalized to
+ * the range [0, 1], and S and O are the noise model coefficients for
+ * that channel.
+ *
A more detailed description of the noise model can be found in the
+ * Adobe DNG specification for the NoiseProfile tag.
+ *
For a MONOCHROME camera, there is only one color channel. So the noise model coefficients
+ * will only contain one S and one O.
The worst-case divergence between Bayer green channels.
+ *
+ *
Type: float
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
This value is an estimate of the worst case split between the
+ * Bayer green channels in the red and blue rows in the sensor color
+ * filter array.
+ *
The green split is calculated as follows:
+ *
+ *
A 5x5 pixel (or larger) window W within the active sensor array is
+ * chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
+ * mosaic channels (R, Gr, Gb, B). The location and size of the window
+ * chosen is implementation defined, and should be chosen to provide a
+ * green split estimate that is both representative of the entire image
+ * for this camera sensor, and can be calculated quickly.
+ *
The arithmetic mean of the green channels from the red
+ * rows (mean_Gr) within W is computed.
+ *
The arithmetic mean of the green channels from the blue
+ * rows (mean_Gb) within W is computed.
+ *
The maximum ratio R of the two means is computed as follows:
+ * R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))
+ *
+ *
The ratio R is the green split divergence reported for this property,
+ * which represents how much the green channels differ in the mosaic
+ * pattern. This value is typically used to determine the treatment of
+ * the green mosaic channels when demosaicing.
+ *
The green split value can be roughly interpreted as follows:
+ *
+ *
R < 1.03 is a negligible split (<3% divergence).
+ *
1.20 <= R >= 1.03 will require some software
+ * correction to avoid demosaic errors (3-20% divergence).
+ *
R > 1.20 will require strong software correction to produce
+ * a usable image (>20% divergence).
+ *
+ *
Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
+ * the camera device has RAW capability.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Each color channel is treated as an unsigned 32-bit integer.
+ * The camera device then uses the most significant X bits
+ * that correspond to how many bits are in its Bayer raw sensor
+ * output.
+ *
For example, a sensor with RAW10 Bayer output would use the
+ * 10 most significant bits from each color channel.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
When a test pattern is enabled, all manual sensor controls specified
+ * by ACAMERA_SENSOR_* will be ignored. All other controls should
+ * work as normal.
+ *
For example, if manual flash is enabled, flash firing should still
+ * occur (and that the test pattern remain unmodified, since the flash
+ * would not actually affect it).
Duration between the start of exposure for the first row of the image sensor,
+ * and the start of exposure for one past the last row of the image sensor.
+ *
+ *
Type: int64
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
This is the exposure time skew between the first and (last+1) row exposure start times. The
+ * first row and the last row are the first and last rows inside of the
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.
+ *
For typical camera sensors that use rolling shutters, this is also equivalent to the frame
+ * readout time.
+ *
If the image sensor is operating in a binned or cropped mode due to the current output
+ * target resolutions, it's possible this skew is reported to be larger than the exposure
+ * time, for example, since it is based on the full array even if a partial array is read
+ * out. Be sure to scale the number to cover the section of the sensor actually being used
+ * for the outputs you care about. So if your output covers N rows of the active array of
+ * height H, scale this value by N/H to get the total skew for that viewport.
+ *
Note: Prior to Android 11, this field was described as measuring duration from
+ * first to last row of the image sensor, which is not equal to the frame readout time for a
+ * rolling shutter sensor. Implementations generally reported the latter value, so to resolve
+ * the inconsistency, the description has been updated to range from (first, last+1) row
+ * exposure start, instead.
List of disjoint rectangles indicating the sensor
+ * optically shielded black pixel regions.
+ *
+ *
Type: int32[4*num_regions]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
In most camera sensors, the active array is surrounded by some
+ * optically shielded pixel areas. By blocking light, these pixels
+ * provides a reliable black reference for black level compensation
+ * in active array region.
+ *
This key provides a list of disjoint rectangles specifying the
+ * regions of optically shielded (with metal shield) black pixel
+ * regions if the camera device is capable of reading out these black
+ * pixels in the output raw images. In comparison to the fixed black
+ * level values reported by ACAMERA_SENSOR_BLACK_LEVEL_PATTERN, this key
+ * may provide a more accurate way for the application to calculate
+ * black level of each captured raw images.
+ *
When this key is reported, the ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL and
+ * ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL will also be reported.
+ *
The data representation is int[4], which maps to (left, top, width, height).
A per-frame dynamic black level offset for each of the color filter
+ * arrangement (CFA) mosaic channels.
+ *
+ *
Type: float[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Camera sensor black levels may vary dramatically for different
+ * capture settings (e.g. ACAMERA_SENSOR_SENSITIVITY). The fixed black
+ * level reported by ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may be too
+ * inaccurate to represent the actual value on a per-frame basis. The
+ * camera device internal pipeline relies on reliable black level values
+ * to process the raw images appropriately. To get the best image
+ * quality, the camera device may choose to estimate the per frame black
+ * level values either based on optically shielded black regions
+ * (ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS) or its internal model.
+ *
This key reports the camera device estimated per-frame zero light
+ * value for each of the CFA mosaic channels in the camera sensor. The
+ * ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may only represent a coarse
+ * approximation of the actual black level values. This value is the
+ * black level used in camera device internal image processing pipeline
+ * and generally more accurate than the fixed black level values.
+ * However, since they are estimated values by the camera device, they
+ * may not be as accurate as the black level values calculated from the
+ * optical black pixels reported by ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS.
+ *
The values are given in the same order as channels listed for the CFA
+ * layout key (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT), i.e. the
+ * nth value given corresponds to the black level offset for the nth
+ * color channel listed in the CFA.
+ *
For a MONOCHROME camera, all of the 2x2 channels must have the same values.
+ *
This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is available or the
+ * camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
Maximum raw value output by sensor for this frame.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Since the ACAMERA_SENSOR_BLACK_LEVEL_PATTERN may change for different
+ * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY), the white
+ * level will change accordingly. This key is similar to
+ * ACAMERA_SENSOR_INFO_WHITE_LEVEL, but specifies the camera device
+ * estimated white level for each frame.
+ *
This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
+ * available or the camera device advertises this key via
+ * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
The area of the image sensor which corresponds to active pixels after any geometric
+ * distortion correction has been applied.
+ *
+ *
Type: int32[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This is the rectangle representing the size of the active region of the sensor (i.e.
+ * the region that actually receives light from the scene) after any geometric correction
+ * has been applied, and should be treated as the maximum size in pixels of any of the
+ * image output formats aside from the raw formats.
+ *
This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ * the full pixel array, and the size of the full pixel array is given by
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
+ *
The coordinate system for most other keys that list pixel coordinates, including
+ * ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
+ * this field, with (0, 0) being the top-left of this rectangle.
+ *
The active array may be smaller than the full pixel array, since the full array may
+ * include black calibration pixels or other inactive regions.
+ *
For devices that do not support ACAMERA_DISTORTION_CORRECTION_MODE control, the active
+ * array must be the same as ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.
+ *
For devices that support ACAMERA_DISTORTION_CORRECTION_MODE control, the active array must
+ * be enclosed by ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE. The difference between
+ * pre-correction active array and active array accounts for scaling or cropping caused
+ * by lens geometric distortion correction.
+ *
In general, application should always refer to active array size for controls like
+ * metering regions or crop region. Two exceptions are when the application is dealing with
+ * RAW image buffers (RAW_SENSOR, RAW10, RAW12 etc), or when application explicitly set
+ * ACAMERA_DISTORTION_CORRECTION_MODE to OFF. In these cases, application should refer
+ * to ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.
+ *
The data representation is int[4], which maps to (left, top, width, height).
The arrangement of color filters on sensor;
+ * represents the colors in the top-left 2x2 section of
+ * the sensor, in reading order, for a Bayer camera, or the
+ * light spectrum it captures for MONOCHROME camera.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Attempting to use frame durations beyond the maximum will result in the frame
+ * duration being clipped to the maximum. See that control for a full definition of frame
+ * durations.
+ *
Refer to {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
+ * for the minimum frame duration values.
Dimensions of the full pixel array, possibly
+ * including black calibration pixels.
+ *
+ *
Type: int32[2]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The pixel count of the full pixel array of the image sensor, which covers
+ * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
+ * the raw buffers produced by this sensor.
+ *
If a camera device supports raw sensor formats, either this or
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is the maximum dimensions for the raw
+ * output formats listed in {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS }
+ * (this depends on whether or not the image sensor returns buffers containing pixels that
+ * are not part of the active array region for blacklevel calibration or other purposes).
+ *
Some parts of the full pixel array may not receive light from the scene,
+ * or be otherwise inactive. The ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE key
+ * defines the rectangle of active pixels that will be included in processed image
+ * formats.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This specifies the fully-saturated encoding level for the raw
+ * sample values from the sensor. This is typically caused by the
+ * sensor becoming highly non-linear or clipping. The minimum for
+ * each channel is specified by the offset in the
+ * ACAMERA_SENSOR_BLACK_LEVEL_PATTERN key.
+ *
The white level is typically determined either by sensor bit depth
+ * (8-14 bits is expected), or by the point where the sensor response
+ * becomes too non-linear to be useful. The default value for this is
+ * maximum representable value for a 16-bit raw sample (2^16 - 1).
+ *
The white level values of captured images may vary for different
+ * capture settings (e.g., ACAMERA_SENSOR_SENSITIVITY). This key
+ * represents a coarse approximation for such case. It is recommended
+ * to use ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL for captures when supported
+ * by the camera device, which provides more accurate white level values.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The timestamps provided for captures are always in nanoseconds and monotonic, but
+ * may not based on a time source that can be compared to other system time sources.
+ *
This characteristic defines the source for the timestamps, and therefore whether they
+ * can be compared against other system time sources/timestamps.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If TRUE, all images produced by the camera device in the RAW image formats will
+ * have lens shading correction already applied to it. If FALSE, the images will
+ * not be adjusted for lens shading correction.
+ * See android.request.maxNumOutputRaw for a list of RAW image formats.
+ *
This key will be null for all devices do not report this information.
+ * Devices with RAW capability will always report this information in this key.
The area of the image sensor which corresponds to active pixels prior to the
+ * application of any geometric distortion correction.
+ *
+ *
Type: int32[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This is the rectangle representing the size of the active region of the sensor (i.e.
+ * the region that actually receives light from the scene) before any geometric correction
+ * has been applied, and should be treated as the active region rectangle for any of the
+ * raw formats. All metadata associated with raw processing (e.g. the lens shading
+ * correction map, and radial distortion fields) treats the top, left of this rectangle as
+ * the origin, (0,0).
+ *
The size of this region determines the maximum field of view and the maximum number of
+ * pixels that an image from this sensor can contain, prior to the application of
+ * geometric distortion correction. The effective maximum pixel dimensions of a
+ * post-distortion-corrected image is given by the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * field, and the effective maximum field of view for a post-distortion-corrected image
+ * can be calculated by applying the geometric distortion correction fields to this
+ * rectangle, and cropping to the rectangle given in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.
+ *
E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
+ * dimensions in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE given the position of a pixel,
+ * (x', y'), in the raw pixel array with dimensions given in
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE:
+ *
+ *
Choose a pixel (x', y') within the active array region of the raw buffer given in
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, otherwise this pixel is considered
+ * to be outside of the FOV, and will not be shown in the processed output image.
+ *
Apply geometric distortion correction to get the post-distortion pixel coordinate,
+ * (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
+ * buffers is defined relative to the top, left of the
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE rectangle.
+ *
If the resulting corrected pixel coordinate is within the region given in
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, then the position of this pixel in the
+ * processed output image buffer is (x_i - activeArray.left, y_i - activeArray.top),
+ * when the top, left coordinate of that buffer is treated as (0, 0).
+ *
+ *
Thus, for pixel x',y' = (25, 25) on a sensor where ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+ * is (100,100), ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is (10, 10, 100, 100),
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE is (20, 20, 80, 80), and the geometric distortion
+ * correction doesn't change the pixel coordinate, the resulting pixel selected in
+ * pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
+ * with dimensions given in ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, and would be (5, 5)
+ * relative to the top,left of post-processed YUV output buffer with dimensions given in
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.
+ *
The currently supported fields that correct for geometric distortion are:
+ *
+ *
ACAMERA_LENS_DISTORTION.
+ *
+ *
If the camera device doesn't support geometric distortion correction, or all of the
+ * geometric distortion fields are no-ops, this rectangle will be the same as the
+ * post-distortion-corrected rectangle given in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.
+ *
This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ * the full pixel array, and the size of the full pixel array is given by
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
+ *
The pre-correction active array may be smaller than the full pixel array, since the
+ * full array may include black calibration pixels or other inactive regions.
+ *
The data representation is int[4], which maps to (left, top, width, height).
The area of the image sensor which corresponds to active pixels after any geometric
+ * distortion correction has been applied, when the sensor runs in maximum resolution mode.
+ *
+ *
Type: int32[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
Dimensions of the full pixel array, possibly
+ * including black calibration pixels, when the sensor runs in maximum resolution mode.
+ * Analogous to ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE, when ACAMERA_SENSOR_PIXEL_MODE is
+ * set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The pixel count of the full pixel array of the image sensor, which covers
+ * ACAMERA_SENSOR_INFO_PHYSICAL_SIZE area. This represents the full pixel dimensions of
+ * the raw buffers produced by this sensor, when it runs in maximum resolution mode. That
+ * is, when ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ * This key will only be present for devices which advertise the
+ * CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
+ * capability.
The area of the image sensor which corresponds to active pixels prior to the
+ * application of any geometric distortion correction, when the sensor runs in maximum
+ * resolution mode. This key must be used for crop / metering regions, only when
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int32[4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
Dimensions of the group of pixels which are under the same color filter.
+ * This specifies the width and height (pair of integers) of the group of pixels which fall
+ * under the same color filter for ULTRA_HIGH_RESOLUTION sensors.
+ *
+ *
Type: int32[2]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Sensors can have pixels grouped together under the same color filter in order
+ * to improve various aspects of imaging such as noise reduction, low light
+ * performance etc. These groups can be of various sizes such as 2X2 (quad bayer),
+ * 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under
+ * the same color filter.
+ *
This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW images
+ * will have a regular bayer pattern.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
When set to OFF mode, no lens shading correction will be applied by the
+ * camera device, and an identity lens shading map data will be provided
+ * if ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE == ON. For example, for lens
+ * shading map with size of [ 4, 3 ],
+ * the output android.statistics.lensShadingCorrectionMap for this case will be an identity
+ * map shown below:
When set to other modes, lens shading correction will be applied by the camera
+ * device. Applications can request lens shading map data by setting
+ * ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE to ON, and then the camera device will provide lens
+ * shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
+ * data will be the one applied by the camera device for this capture request.
+ *
The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
+ * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
+ * AWB are in AUTO modes(ACAMERA_CONTROL_AE_MODE != OFF and ACAMERA_CONTROL_AWB_MODE !=
+ * OFF), to get best results, it is recommended that the applications wait for the AE and AWB
+ * to be converged before using the returned shading map data.
List of lens shading modes for ACAMERA_SHADING_MODE that are supported by this camera device.
+ *
+ * @see ACAMERA_SHADING_MODE
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This list contains lens shading modes that can be set for the camera device.
+ * Camera devices that support the MANUAL_POST_PROCESSING capability will always
+ * list OFF and FAST mode. This includes all FULL level devices.
+ * LEGACY devices will always only support FAST mode.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Each detected face is given a unique ID that is valid for as long as the face is visible
+ * to the camera device. A face that leaves the field of view and later returns may be
+ * assigned a new ID.
+ *
Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE == FULL
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0, 0) being
+ * the top-left pixel of the active array.
+ *
For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system depends on the mode being set.
+ * When the distortion correction mode is OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the pre-correction active array.
+ * When the distortion correction mode is not OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the active array.
+ *
Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE == FULL.
+ *
Starting from API level 30, the coordinate system of activeArraySize or
+ * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+ * pre-zoomRatio field of view. This means that if the relative position of faces and
+ * the camera device doesn't change, when zooming in by increasing
+ * ACAMERA_CONTROL_ZOOM_RATIO, the face landmarks move farther away from the center of the
+ * activeArray or preCorrectionActiveArray. If ACAMERA_CONTROL_ZOOM_RATIO is set to 1.0
+ * (default), the face landmarks coordinates won't change as ACAMERA_SCALER_CROP_REGION
+ * changes. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use activeArraySize or
+ * preCorrectionActiveArraySize still depends on distortion correction mode.
List of the bounding rectangles for detected
+ * faces.
+ *
+ *
Type: int32[n*4]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0, 0) being
+ * the top-left pixel of the active array.
+ *
For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+ * system depends on the mode being set.
+ * When the distortion correction mode is OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the pre-correction active array.
+ * When the distortion correction mode is not OFF, the coordinate system follows
+ * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+ * (0, 0) being the top-left pixel of the active array.
+ *
Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF.
+ *
Starting from API level 30, the coordinate system of activeArraySize or
+ * preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
+ * pre-zoomRatio field of view. This means that if the relative position of faces and
+ * the camera device doesn't change, when zooming in by increasing
+ * ACAMERA_CONTROL_ZOOM_RATIO, the face rectangles grow larger and move farther away from
+ * the center of the activeArray or preCorrectionActiveArray. If ACAMERA_CONTROL_ZOOM_RATIO
+ * is set to 1.0 (default), the face rectangles won't change as ACAMERA_SCALER_CROP_REGION
+ * changes. See ACAMERA_CONTROL_ZOOM_RATIO for details. Whether to use activeArraySize or
+ * preCorrectionActiveArraySize still depends on distortion correction mode.
+ *
The data representation is int[4], which maps to (left, top, right, bottom).
The shading map is a low-resolution floating-point map
+ * that lists the coefficients used to correct for vignetting and color shading,
+ * for each Bayer color channel of RAW image data.
+ *
+ *
Type: float[4*n*m]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The map provided here is the same map that is used by the camera device to
+ * correct both color shading and vignetting for output non-RAW images.
+ *
When there is no lens shading correction applied to RAW
+ * output images (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED ==
+ * false), this map is the complete lens shading correction
+ * map; when there is some lens shading correction applied to
+ * the RAW output image (ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED== true), this map reports the remaining lens shading
+ * correction map that needs to be applied to get shading
+ * corrected images that match the camera device's output for
+ * non-RAW formats.
+ *
For a complete shading correction map, the least shaded
+ * section of the image will have a gain factor of 1; all
+ * other sections will have gains above 1.
+ *
When ACAMERA_COLOR_CORRECTION_MODE = TRANSFORM_MATRIX, the map
+ * will take into account the colorCorrection settings.
+ *
The shading map is for the entire active pixel array, and is not
+ * affected by the crop region specified in the request. Each shading map
+ * entry is the value of the shading compensation map over a specific
+ * pixel on the sensor. Specifically, with a (N x M) resolution shading
+ * map, and an active pixel array size (W x H), shading map entry
+ * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+ * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+ * The map is assumed to be bilinearly interpolated between the sample points.
+ *
For a Bayer camera, the channel order is [R, Geven, Godd, B], where Geven is
+ * the green channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+ * The shading map is stored in a fully interleaved format, and its size
+ * is provided in the camera static metadata by ACAMERA_LENS_INFO_SHADING_MAP_SIZE.
+ *
The shading map will generally have on the order of 30-40 rows and columns,
+ * and will be smaller than 64x64.
+ *
As an example, given a very small map for a Bayer camera defined as:
The low-resolution scaling map images for each channel are
+ * (displayed using nearest-neighbor interpolation):
+ *
+ *
+ *
+ *
+ *
As a visualization only, inverting the full-color map to recover an
+ * image of a gray wall (using bicubic interpolation for visual quality)
+ * as captured by the sensor gives:
+ *
+ *
For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example
+ * shading map for such a camera is defined as:
Note that the RAW image data might be subject to lens shading
+ * correction not reported on this map. Query
+ * ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED to see if RAW image data has subject
+ * to lens shading correction. If ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED
+ * is TRUE, the RAW image data is subject to partial or full lens shading
+ * correction. In the case full lens shading correction is applied to RAW
+ * images, the gain factor map reported in this key will contain all 1.0 gains.
+ * In other words, the map reported in this key is the remaining lens shading
+ * that needs to be applied on the RAW image to get images without lens shading
+ * artifacts. See android.request.maxNumOutputRaw for a list of RAW image
+ * formats.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
Many light sources, such as most fluorescent lights, flicker at a rate
+ * that depends on the local utility power standards. This flicker must be
+ * accounted for by auto-exposure routines to avoid artifacts in captured images.
+ * The camera device uses this entry to tell the application what the scene
+ * illuminant frequency is.
+ *
When manual exposure control is enabled
+ * (ACAMERA_CONTROL_AE_MODE == OFF or ACAMERA_CONTROL_MODE ==
+ * OFF), the ACAMERA_CONTROL_AE_ANTIBANDING_MODE doesn't perform
+ * antibanding, and the application can ensure it selects
+ * exposure times that do not cause banding issues by looking
+ * into this metadata field. See
+ * ACAMERA_CONTROL_AE_ANTIBANDING_MODE for more details.
+ *
Reports NONE if there doesn't appear to be flickering illumination.
List of (x, y) coordinates of hot/defective pixels on the sensor.
+ *
+ *
Type: int32[2*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
A coordinate (x, y) must lie between (0, 0), and
+ * (width - 1, height - 1) (inclusive), which are the top-left and
+ * bottom-right of the pixel array, respectively. The width and
+ * height dimensions are given in ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
+ * This may include hot pixels that lie outside of the active array
+ * bounds given by ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Since optical image stabilization generally involves motion much faster than the duration
+ * of individual image exposure, multiple OIS samples can be included for a single capture
+ * result. For example, if the OIS reporting operates at 200 Hz, a typical camera operating
+ * at 30fps may have 6-7 OIS samples per capture result. This information can be combined
+ * with the rolling shutter skew to account for lens motion during image exposure in
+ * post-processing algorithms.
An array of shifts of OIS samples, in x direction.
+ *
+ *
Type: float[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The array contains the amount of shifts in x direction, in pixels, based on OIS samples.
+ * A positive value is a shift from left to right in the pre-correction active array
+ * coordinate system. For example, if the optical center is (1000, 500) in pre-correction
+ * active array coordinates, a shift of (3, 0) puts the new optical center at (1003, 500).
+ *
The number of shifts must match the number of timestamps in
+ * ACAMERA_STATISTICS_OIS_TIMESTAMPS.
+ *
The OIS samples are not affected by whether lens distortion correction is enabled (on
+ * supporting devices). They are always reported in pre-correction active array coordinates,
+ * since the scaling of OIS shifts would depend on the specific spot on the sensor the shift
+ * is needed.
An array of shifts of OIS samples, in y direction.
+ *
+ *
Type: float[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The array contains the amount of shifts in y direction, in pixels, based on OIS samples.
+ * A positive value is a shift from top to bottom in pre-correction active array coordinate
+ * system. For example, if the optical center is (1000, 500) in active array coordinates, a
+ * shift of (0, 5) puts the new optical center at (1000, 505).
+ *
The number of shifts must match the number of timestamps in
+ * ACAMERA_STATISTICS_OIS_TIMESTAMPS.
+ *
The OIS samples are not affected by whether lens distortion correction is enabled (on
+ * supporting devices). They are always reported in pre-correction active array coordinates,
+ * since the scaling of OIS shifts would depend on the specific spot on the sensor the shift
+ * is needed.
These are sorted in order of increasing Pin; it is
+ * required that input values 0.0 and 1.0 are included in the list to
+ * define a complete mapping. For input values between control points,
+ * the camera device must linearly interpolate between the control
+ * points.
+ *
Each curve can have an independent number of points, and the number
+ * of points can be less than max (that is, the request doesn't have to
+ * always provide a curve with number of points equivalent to
+ * ACAMERA_TONEMAP_MAX_CURVE_POINTS).
+ *
For devices with MONOCHROME capability, all three channels must have the same set of
+ * control points.
+ *
A few examples, and their corresponding graphical mappings; these
+ * only specify the red channel and the precision is limited to 4
+ * digits, for conciseness.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
When switching to an application-defined contrast curve by setting
+ * ACAMERA_TONEMAP_MODE to CONTRAST_CURVE, the curve is defined
+ * per-channel with a set of (in, out) points that specify the
+ * mapping from input high-bit-depth pixel value to the output
+ * low-bit-depth value. Since the actual pixel ranges of both input
+ * and output may change depending on the camera pipeline, the values
+ * are specified by normalized floating-point numbers.
+ *
More-complex color mapping operations such as 3D color look-up
+ * tables, selective chroma enhancement, or other non-linear color
+ * transforms will be disabled when ACAMERA_TONEMAP_MODE is
+ * CONTRAST_CURVE.
+ *
When using either FAST or HIGH_QUALITY, the camera device will
+ * emit its own tonemap curve in android.tonemap.curve.
+ * These values are always available, and as close as possible to the
+ * actually used nonlinear/nonglobal transforms.
+ *
If a request is sent with CONTRAST_CURVE with the camera device's
+ * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
+ * roughly the same.
Maximum number of supported points in the
+ * tonemap curve that can be used for android.tonemap.curve.
+ *
+ *
Type: int32
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If the actual number of points provided by the application (in ACAMERA_TONEMAPCURVE_*) is
+ * less than this maximum, the camera device will resample the curve to its internal
+ * representation, using linear interpolation.
+ *
The output curves in the result metadata may have a different number
+ * of points than the input curves, and will represent the actual
+ * hardware curves used as closely as possible when linearly interpolated.
Tonemapping curve to use when ACAMERA_TONEMAP_MODE is
+ * GAMMA_VALUE
+ *
+ * @see ACAMERA_TONEMAP_MODE
+ *
+ *
Type: float
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
The tonemap curve will be defined the following formula:
+ *
+ *
OUT = pow(IN, 1.0 / gamma)
+ *
+ *
where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+ * pow is the power function and gamma is the gamma value specified by this
+ * key.
+ *
The same curve will be applied to all color channels. The camera device
+ * may clip the input gamma value to its supported range. The actual applied
+ * value will be returned in capture result.
+ *
The valid range of gamma value varies on different devices, but values
+ * within [1.0, 5.0] are guaranteed not to be clipped.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The supported hardware level is a high-level description of the camera device's
+ * capabilities, summarizing several capabilities into one field. Each level adds additional
+ * features to the previous one, and is always a strict superset of the previous level.
+ * The ordering is LEGACY < LIMITED < FULL < LEVEL_3.
+ *
Starting from LEVEL_3, the level enumerations are guaranteed to be in increasing
+ * numerical value as well. To check if a given device is at least at a given hardware level,
+ * the following code snippet can be used:
+ *
// Returns true if the device supports the required hardware level, or better.
+ * boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
+ * final int[] sortedHwLevels = {
+ * CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
+ * CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL,
+ * CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
+ * CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
+ * CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3
+ * };
+ * int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ * if (requiredLevel == deviceLevel) {
+ * return true;
+ * }
+ *
+ * for (int sortedlevel : sortedHwLevels) {
+ * if (sortedlevel == requiredLevel) {
+ * return true;
+ * } else if (sortedlevel == deviceLevel) {
+ * return false;
+ * }
+ * }
+ * return false; // Should never reach here
+ * }
+ *
+ *
At a high level, the levels are:
+ *
+ *
LEGACY devices operate in a backwards-compatibility mode for older
+ * Android devices, and have very limited capabilities.
+ *
LIMITED devices represent the
+ * baseline feature set, and may also include additional capabilities that are
+ * subsets of FULL.
+ *
FULL devices additionally support per-frame manual control of sensor, flash, lens and
+ * post-processing settings, and image capture at a high rate.
+ *
LEVEL_3 devices additionally support YUV reprocessing and RAW image capture, along
+ * with additional output stream configurations.
+ *
EXTERNAL devices are similar to LIMITED devices with exceptions like some sensor or
+ * lens information not reported or less stable framerates.
+ *
+ *
See the individual level enums for full descriptions of the supported capabilities. The
+ * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a
+ * finer-grain level, if needed. In addition, many controls have their available settings or
+ * ranges defined in individual entries from {@link ACameraManager_getCameraCharacteristics }.
+ *
Some features are not part of any particular hardware level or capability and must be
+ * queried separately. These include:
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
HAL must populate the array with
+ * (hardware::camera::provider::V2_5::DeviceState, sensorOrientation) pairs for each
+ * supported device state bitwise combination.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
Whether the black level offset was locked for this frame. Should be
+ * ON if ACAMERA_BLACK_LEVEL_LOCK was ON in the capture request, unless
+ * a change in other capture settings forced the camera device to
+ * perform a black level reset.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
When a request is submitted to the camera device, there is usually a
+ * delay of several frames before the controls get applied. A camera
+ * device may either choose to account for this delay by implementing a
+ * pipeline and carefully submit well-timed atomic control updates, or
+ * it may start streaming control changes that span over several frame
+ * boundaries.
+ *
In the latter case, whenever a request's settings change relative to
+ * the previous submitted request, the full set of changes may take
+ * multiple frame durations to fully take effect. Some settings may
+ * take effect sooner (in less frame durations) than others.
+ *
While a set of control changes are being propagated, this value
+ * will be CONVERGING.
+ *
Once it is fully known that a set of control changes have been
+ * finished propagating, and the resulting updated control settings
+ * have been read back by the camera device, this value will be set
+ * to a non-negative frame number (corresponding to the request to
+ * which the results have synchronized to).
+ *
Older camera device implementations may not have a way to detect
+ * when all camera controls have been applied, and will always set this
+ * value to UNKNOWN.
+ *
FULL capability devices will always have this value set to the
+ * frame number of the request corresponding to this result.
+ *
Further details:
+ *
+ *
Whenever a request differs from the last request, any future
+ * results not yet returned may have this value set to CONVERGING (this
+ * could include any in-progress captures not yet returned by the camera
+ * device, for more details see pipeline considerations below).
+ *
Submitting a series of multiple requests that differ from the
+ * previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
+ * moves the new synchronization frame to the last non-repeating
+ * request (using the smallest frame number from the contiguous list of
+ * repeating requests).
+ *
Submitting the same request repeatedly will not change this value
+ * to CONVERGING, if it was already a non-negative value.
+ *
When this value changes to non-negative, that means that all of the
+ * metadata controls from the request have been applied, all of the
+ * metadata controls from the camera device have been read to the
+ * updated values (into the result), and all of the graphics buffers
+ * corresponding to this result are also synchronized to the request.
+ *
+ *
Pipeline considerations:
+ *
Submitting a request with updated controls relative to the previously
+ * submitted requests may also invalidate the synchronization state
+ * of all the results corresponding to currently in-flight requests.
+ *
In other words, results for this current request and up to
+ * ACAMERA_REQUEST_PIPELINE_MAX_DEPTH prior requests may have their
+ * ACAMERA_SYNC_FRAME_NUMBER change to CONVERGING.
The maximum number of frames that can occur after a request
+ * (different than the previous) has been submitted, and before the
+ * result's state becomes synchronized.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This defines the maximum distance (in number of metadata results),
+ * between the frame number of the request that has new controls to apply
+ * and the frame number of the result that has all the controls applied.
+ *
In other words this acts as an upper boundary for how many frames
+ * must occur before the camera device knows for a fact that the new
+ * submitted camera settings have been applied in outgoing frames.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
These are output stream configurations for use with
+ * dataSpace HAL_DATASPACE_DEPTH. The configurations are
+ * listed as (format, width, height, input?) tuples.
+ *
Only devices that support depth output for at least
+ * the HAL_PIXEL_FORMAT_Y16 dense depth map may include
+ * this entry.
+ *
A device that also supports the HAL_PIXEL_FORMAT_BLOB
+ * sparse depth point cloud must report a single entry for
+ * the format in this list as (HAL_PIXEL_FORMAT_BLOB,
+ * android.depth.maxDepthSamples, 1, OUTPUT) in addition to
+ * the entries for HAL_PIXEL_FORMAT_Y16.
This lists the minimum frame duration for each
+ * format/size combination for depth output formats.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.
+ *
When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).
+ *
The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output.
+ *
See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.
This lists the maximum stall duration for each
+ * output format/size combination for depth streams.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.
+ *
This functions similarly to
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for depth
+ * streams.
+ *
All depth output stream formats may have a nonzero stall
+ * duration.
Indicates whether a capture request may target both a
+ * DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
+ * YUV_420_888, JPEG, or RAW) simultaneously.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
If TRUE, including both depth and color outputs in a single
+ * capture request is not supported. An application must interleave color
+ * and depth requests. If FALSE, a single request can target both types
+ * of output.
+ *
Typically, this restriction exists on camera devices that
+ * need to emit a specific pattern or wavelength of light to
+ * measure depth values, which causes the color image to be
+ * corrupted during depth measurement.
Recommended depth stream configurations for common client use cases.
+ *
+ *
Type: int32[n*5]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Optional subset of the ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS that
+ * contains similar tuples listed as
+ * (i.e. width, height, format, output/input stream, usecase bit field).
+ * Camera devices will be able to suggest particular depth stream configurations which are
+ * power and performance efficient for specific use cases. For more information about
+ * retrieving the suggestions see
+ * CameraCharacteristics#getRecommendedStreamConfigurationMap.
+ *
For data representation please refer to
+ * ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS
The available dynamic depth dataspace stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
These are output stream configurations for use with
+ * dataSpace DYNAMIC_DEPTH. The configurations are
+ * listed as (format, width, height, input?) tuples.
+ *
Only devices that support depth output for at least
+ * the HAL_PIXEL_FORMAT_Y16 dense depth map along with
+ * HAL_PIXEL_FORMAT_BLOB with the same size or size with
+ * the same aspect ratio can have dynamic depth dataspace
+ * stream configuration. ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE also
+ * needs to be set to FALSE.
This lists the minimum frame duration for each
+ * format/size combination for dynamic depth output streams.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.
+ *
When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).
+ *
The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output.
This lists the maximum stall duration for each
+ * output format/size combination for dynamic depth streams.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.
+ *
All dynamic depth output streams may have a nonzero stall
+ * duration.
The available depth dataspace stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream) when a CaptureRequest is submitted with
+ * ACAMERA_SENSOR_PIXEL_MODE set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, for configurations which
+ * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
This lists the minimum frame duration for each
+ * format/size combination for depth output formats when a CaptureRequest is submitted with
+ * ACAMERA_SENSOR_PIXEL_MODE set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Analogous to ACAMERA_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, for configurations which
+ * are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION for more details about
+ * calculating the max frame rate.
This lists the maximum stall duration for each
+ * output format/size combination for depth streams for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
The available dynamic depth dataspace stream
+ * configurations that this camera device supports (i.e. format, width, height,
+ * output/input stream) for CaptureRequests where ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, for configurations
+ * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
This lists the minimum frame duration for each
+ * format/size combination for dynamic depth output streams for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, for configurations
+ * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
This lists the maximum stall duration for each
+ * output format/size combination for dynamic depth streams for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Analogous to ACAMERA_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, for configurations
+ * which are applicable when ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
String containing the ids of the underlying physical cameras.
+ *
+ *
Type: byte[n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
For a logical camera, this is concatenation of all underlying physical camera IDs.
+ * The null terminator for physical camera ID must be preserved so that the whole string
+ * can be tokenized using '\0' to generate list of physical camera IDs.
+ *
For example, if the physical camera IDs of the logical camera are "2" and "3", the
+ * value of this tag will be ['2', '\0', '3', '\0'].
+ *
The number of physical camera IDs must be no less than 2.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The accuracy of the frame timestamp synchronization determines the physical cameras'
+ * ability to start exposure at the same time. If the sensorSyncType is CALIBRATED, the
+ * physical camera sensors usually run in leader/follower mode where one sensor generates a
+ * timing signal for the other, so that their shutter time is synchronized. For APPROXIMATE
+ * sensorSyncType, the camera sensors usually run in leader/leader mode, where both sensors
+ * use their own timing generator, and there could be offset between their start of exposure.
+ *
In both cases, all images generated for a particular capture request still carry the same
+ * timestamps, so that they can be used to look up the matching frame number and
+ * onCaptureStarted callback.
+ *
This tag is only applicable if the logical camera device supports concurrent physical
+ * streams from different physical cameras.
String containing the ID of the underlying active physical camera.
+ *
+ *
Type: byte
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ *
The ID of the active physical camera that's backing the logical camera. All camera
+ * streams and metadata that are not physical camera specific will be originating from this
+ * physical camera.
+ *
For a logical camera made up of physical cameras where each camera's lenses have
+ * different characteristics, the camera device may choose to switch between the physical
+ * cameras when application changes FOCAL_LENGTH or SCALER_CROP_REGION.
+ * At the time of lens switch, this result metadata reflects the new active physical camera
+ * ID.
+ *
This key will be available if the camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
+ * When available, this must be one of valid physical IDs backing this logical multi-camera.
+ * If this key is not available for a logical multi-camera, the camera device implementation
+ * may still switch between different active physical cameras based on use case, but the
+ * current active physical camera information won't be available to the application.
ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
ACaptureRequest
+ *
+ *
+ *
The lens distortion correction block attempts to improve image quality by fixing
+ * radial, tangential, or other geometric aberrations in the camera device's optics. If
+ * available, the ACAMERA_LENS_DISTORTION field documents the lens's distortion parameters.
+ *
OFF means no distortion correction is done.
+ *
FAST/HIGH_QUALITY both mean camera device determined distortion correction will be
+ * applied. HIGH_QUALITY mode indicates that the camera device will use the highest-quality
+ * correction algorithms, even if it slows down capture rate. FAST means the camera device
+ * will not slow down capture rate when applying correction. FAST may be the same as OFF if
+ * any correction at all would slow down capture rate. Every output stream will have a
+ * similar amount of enhancement applied.
+ *
The correction only applies to processed outputs such as YUV, Y8, JPEG, or DEPTH16; it is
+ * not applied to any RAW output.
+ *
This control will be on by default on devices that support this control. Applications
+ * disabling distortion correction need to pay extra attention with the coordinate system of
+ * metering regions, crop region, and face rectangles. When distortion correction is OFF,
+ * metadata coordinates follow the coordinate system of
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE. When distortion is not OFF, metadata
+ * coordinates follow the coordinate system of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE. The
+ * camera device will map these metadata fields to match the corrected image produced by the
+ * camera device, for both capture requests and results. However, this mapping is not very
+ * precise, since rectangles do not generally map to rectangles when corrected. Only linear
+ * scaling between the active array and precorrection active array coordinates is
+ * performed. Applications that require precise correction of metadata need to undo that
+ * linear scaling, and apply a more complete correction that takes into the account the app's
+ * own requirements.
+ *
The full list of metadata that is affected in this way by distortion correction is:
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
No device is required to support this API; such devices will always list only 'OFF'.
+ * All devices that support this API will list both FAST and HIGH_QUALITY.
The available HEIC (ISO/IEC 23008-12) stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
The configurations are listed as (format, width, height, input?) tuples.
+ *
If the camera device supports HEIC image format, it will support identical set of stream
+ * combinations involving HEIC image format, compared to the combinations involving JPEG
+ * image format as required by the device's hardware level and capabilities.
+ *
All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats.
+ * Configuring JPEG and HEIC streams at the same time is not supported.
+ *
All the configuration tuples (format, width, height, input?) will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.
This lists the minimum frame duration for each
+ * format/size combination for HEIC output formats.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.
+ *
When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).
+ *
See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.
This lists the maximum stall duration for each
+ * output format/size combination for HEIC streams.
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.
+ *
This functions similarly to
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for HEIC
+ * streams.
+ *
All HEIC output stream formats may have a nonzero stall
+ * duration.
The available HEIC (ISO/IEC 23008-12) stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).
This lists the minimum frame duration for each
+ * format/size combination for HEIC output formats for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Refer to ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS for details.
This lists the maximum stall duration for each
+ * output format/size combination for HEIC streams for CaptureRequests where
+ * ACAMERA_SENSOR_PIXEL_MODE is set to
+ * CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION.
+ *
+ * @see ACAMERA_SENSOR_PIXEL_MODE
+ *
+ *
Type: int64[4*n]
+ *
+ *
This tag may appear in:
+ *
+ *
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
Refer to ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS for details.
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This enum defines the locations of the cameras relative to the vehicle body frame on
+ * the automotive sensor coordinate system.
+ * If the system has FEATURE_AUTOMOTIVE, the camera will have this entry in its static
+ * metadata.
+ *
+ *
INTERIOR is the inside of the vehicle body frame (or the passenger cabin).
+ *
EXTERIOR is the outside of the vehicle body frame.
+ *
EXTRA is the extra vehicle such as a trailer.
+ *
+ *
Each side of the vehicle body frame on this coordinate system is defined as below:
+ *
+ *
FRONT is where the Y-axis increases toward.
+ *
REAR is where the Y-axis decreases toward.
+ *
LEFT is where the X-axis decreases toward.
+ *
RIGHT is where the X-axis increases toward.
+ *
+ *
If the camera has either EXTERIOR_OTHER or EXTRA_OTHER, its static metadata will list
+ * the following entries, so that applications can determine the camera's exact location:
ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ *
This enum defines the lens facing characteristic of the cameras on the automotive
+ * devices with locations ACAMERA_AUTOMOTIVE_LOCATION defines. If the system has
+ * FEATURE_AUTOMOTIVE, the camera will have this entry in its static metadata.
+ *
When ACAMERA_AUTOMOTIVE_LOCATION is INTERIOR, this has one or more INTERIOR_*
+ * values or a single EXTERIOR_* value. When this has more than one INTERIOR_*,
+ * the first value must be the one for the seat closest to the optical axis. If this
+ * contains INTERIOR_OTHER, all other values will be ineffective.
+ *
When ACAMERA_AUTOMOTIVE_LOCATION is EXTERIOR_* or EXTRA, this has a single
+ * EXTERIOR_* value.
+ *
If a camera has INTERIOR_OTHER or EXTERIOR_OTHER, or more than one camera is at the
+ * same location and facing the same direction, their static metadata will list the
+ * following entries, so that applications can determine their lenses' exact facing
+ * directions:
+ *
+ *
ACAMERA_LENS_POSE_REFERENCE
+ *
ACAMERA_LENS_POSE_ROTATION
+ *
ACAMERA_LENS_POSE_TRANSLATION
+ *
+ *
+ * @see ACAMERA_AUTOMOTIVE_LOCATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING = // byte[n] (acamera_metadata_enum_android_automotive_lens_facing_t)
+ acamera_metadata_section_start.ACAMERA_AUTOMOTIVE_LENS_START,
+ ACAMERA_AUTOMOTIVE_LENS_END,
+
+ }
+
+ /**
+ * Enumeration definitions for the various entries that need them
+ */
+
+// ACAMERA_COLOR_CORRECTION_MODE
+ public enum acamera_metadata_enum_acamera_color_correction_mode
+ {
+ /**
+ *
Use the ACAMERA_COLOR_CORRECTION_TRANSFORM matrix
+ * and ACAMERA_COLOR_CORRECTION_GAINS to do color conversion.
+ *
All advanced white balance adjustments (not specified
+ * by our white balance pipeline) must be disabled.
+ *
If AWB is enabled with ACAMERA_CONTROL_AWB_MODE != OFF, then
+ * TRANSFORM_MATRIX is ignored. The camera device will override
+ * this value to either FAST or HIGH_QUALITY.
Color correction processing must not slow down
+ * capture rate relative to sensor raw output.
+ *
Advanced white balance adjustments above and beyond
+ * the specified white balance pipeline may be applied.
+ *
If AWB is enabled with ACAMERA_CONTROL_AWB_MODE != OFF, then
+ * the camera device uses the last frame's AWB values
+ * (or defaults if AWB has never been run).
Color correction processing operates at improved
+ * quality but the capture rate might be reduced (relative to sensor
+ * raw output rate)
+ *
Advanced white balance adjustments above and beyond
+ * the specified white balance pipeline may be applied.
+ *
If AWB is enabled with ACAMERA_CONTROL_AWB_MODE != OFF, then
+ * the camera device uses the last frame's AWB values
+ * (or defaults if AWB has never been run).
The camera device will automatically adapt its
+ * antibanding routine to the current illumination
+ * condition. This is the default mode if AUTO is
+ * available on given camera device.
The camera device's autoexposure routine is disabled.
+ *
The application-selected ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY and
+ * ACAMERA_SENSOR_FRAME_DURATION are used by the camera
+ * device, along with ACAMERA_FLASH_* fields, if there's
+ * a flash unit for this camera device.
+ *
Note that auto-white balance (AWB) and auto-focus (AF)
+ * behavior is device dependent when AE is in OFF mode.
+ * To have consistent behavior across different devices,
+ * it is recommended to either set AWB and AF to OFF mode
+ * or lock AWB and AF before setting AE to OFF.
+ * See ACAMERA_CONTROL_AWB_MODE, ACAMERA_CONTROL_AF_MODE,
+ * ACAMERA_CONTROL_AWB_LOCK, and ACAMERA_CONTROL_AF_TRIGGER
+ * for more details.
+ *
LEGACY devices do not support the OFF mode and will
+ * override attempts to use this value to ON.
The camera device's autoexposure routine is active,
+ * with no flash control.
+ *
The application's values for
+ * ACAMERA_SENSOR_EXPOSURE_TIME,
+ * ACAMERA_SENSOR_SENSITIVITY, and
+ * ACAMERA_SENSOR_FRAME_DURATION are ignored. The
+ * application has control over the various
+ * ACAMERA_FLASH_* fields.
Like ON, except that the camera device also controls
+ * the camera's flash unit, firing it in low-light
+ * conditions.
+ *
The flash may be fired during a precapture sequence
+ * (triggered by ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER) and
+ * may be fired for captures for which the
+ * ACAMERA_CONTROL_CAPTURE_INTENT field is set to
+ * STILL_CAPTURE
Like ON, except that the camera device also controls
+ * the camera's flash unit, always firing it for still
+ * captures.
+ *
The flash may be fired during a precapture sequence
+ * (triggered by ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER) and
+ * will always be fired for captures for which the
+ * ACAMERA_CONTROL_CAPTURE_INTENT field is set to
+ * STILL_CAPTURE
It informs the camera device that an external flash has been turned on, and that
+ * metering (and continuous focus if active) should be quickly recaculated to account
+ * for the external flash. Otherwise, this mode acts like ON.
+ *
When the external flash is turned off, AE mode should be changed to one of the
+ * other available AE modes.
+ *
If the camera device supports AE external flash mode, ACAMERA_CONTROL_AE_STATE must
+ * be FLASH_REQUIRED after the camera device finishes AE scan and it's too dark without
+ * flash.
The camera device will cancel any currently active or completed
+ * precapture metering sequence, the auto-exposure routine will return to its
+ * initial state.
In this mode, the lens does not move unless
+ * the autofocus trigger action is called. When that trigger
+ * is activated, AF will transition to ACTIVE_SCAN, then to
+ * the outcome of the scan (FOCUSED or NOT_FOCUSED).
+ *
Always supported if lens is not fixed focus.
+ *
Use ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE to determine if lens
+ * is fixed-focus.
+ *
Triggering AF_CANCEL resets the lens position to default,
+ * and sets the AF state to INACTIVE.
In this mode, the lens does not move unless the
+ * autofocus trigger action is called. When that trigger is
+ * activated, AF will transition to ACTIVE_SCAN, then to
+ * the outcome of the scan (FOCUSED or NOT_FOCUSED). This
+ * mode is optimized for focusing on objects very close to
+ * the camera.
+ *
When that trigger is activated, AF will transition to
+ * ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
+ * NOT_FOCUSED). Triggering cancel AF resets the lens
+ * position to default, and sets the AF state to
+ * INACTIVE.
In this mode, the AF algorithm modifies the lens
+ * position continually to attempt to provide a
+ * constantly-in-focus image stream.
+ *
The focusing behavior should be suitable for good quality
+ * video recording; typically this means slower focus
+ * movement and no overshoots. When the AF trigger is not
+ * involved, the AF algorithm should start in INACTIVE state,
+ * and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
+ * states as appropriate. When the AF trigger is activated,
+ * the algorithm should immediately transition into
+ * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ * lens position until a cancel AF trigger is received.
+ *
Once cancel is received, the algorithm should transition
+ * back to INACTIVE and resume passive scan. Note that this
+ * behavior is not identical to CONTINUOUS_PICTURE, since an
+ * ongoing PASSIVE_SCAN must immediately be
+ * canceled.
In this mode, the AF algorithm modifies the lens
+ * position continually to attempt to provide a
+ * constantly-in-focus image stream.
+ *
The focusing behavior should be suitable for still image
+ * capture; typically this means focusing as fast as
+ * possible. When the AF trigger is not involved, the AF
+ * algorithm should start in INACTIVE state, and then
+ * transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
+ * appropriate as it attempts to maintain focus. When the AF
+ * trigger is activated, the algorithm should finish its
+ * PASSIVE_SCAN if active, and then transition into
+ * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ * lens position until a cancel AF trigger is received.
+ *
When the AF cancel trigger is activated, the algorithm
+ * should transition back to INACTIVE and then act as if it
+ * has just been started.
The camera device will produce images with an extended
+ * depth of field automatically; no special focusing
+ * operations need to be done before taking a picture.
+ *
AF triggers are ignored, and the AF state will always be
+ * INACTIVE.
The camera device's auto-white balance routine is disabled.
+ *
The application-selected color transform matrix
+ * (ACAMERA_COLOR_CORRECTION_TRANSFORM) and gains
+ * (ACAMERA_COLOR_CORRECTION_GAINS) are used by the camera
+ * device for manual white balance control.
The camera device's auto-white balance routine is active.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
The camera device's auto-white balance routine is disabled;
+ * the camera device uses incandescent light as the assumed scene
+ * illumination for white balance.
+ *
While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant A.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
The camera device's auto-white balance routine is disabled;
+ * the camera device uses fluorescent light as the assumed scene
+ * illumination for white balance.
+ *
While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F2.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
The camera device's auto-white balance routine is disabled;
+ * the camera device uses warm fluorescent light as the assumed scene
+ * illumination for white balance.
+ *
While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F4.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
The camera device's auto-white balance routine is disabled;
+ * the camera device uses daylight light as the assumed scene
+ * illumination for white balance.
+ *
While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant D65.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
The camera device's auto-white balance routine is disabled;
+ * the camera device uses cloudy daylight light as the assumed scene
+ * illumination for white balance.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
The camera device's auto-white balance routine is disabled;
+ * the camera device uses twilight light as the assumed scene
+ * illumination for white balance.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
The camera device's auto-white balance routine is disabled;
+ * the camera device uses shade light as the assumed scene
+ * illumination for white balance.
+ *
The application's values for ACAMERA_COLOR_CORRECTION_TRANSFORM
+ * and ACAMERA_COLOR_CORRECTION_GAINS are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.
This request is for a video snapshot (still
+ * image while recording video) use case.
+ *
The camera device should take the highest-quality image
+ * possible (given the other settings) without disrupting the
+ * frame rate of video recording.
This request is for a ZSL usecase; the
+ * application will stream full-resolution images and
+ * reprocess one or several later for a final
+ * capture.
This request is for a motion tracking use case, where
+ * the application will use camera and inertial sensor data to
+ * locate and track objects in the world.
+ *
The camera device auto-exposure routine will limit the exposure time
+ * of the camera to no more than 20 milliseconds, to minimize motion blur.
All control by the device's metering and focusing (3A)
+ * routines is disabled, and no other settings in
+ * ACAMERA_CONTROL_* have any effect, except that
+ * ACAMERA_CONTROL_CAPTURE_INTENT may be used by the camera
+ * device to select post-processing values for processing
+ * blocks that do not allow for manual control, or are not
+ * exposed by the camera API.
+ *
However, the camera device's 3A routines may continue to
+ * collect statistics and update their internal state so that
+ * when control is switched to AUTO mode, good control values
+ * can be immediately applied.
Manual control of capture parameters is disabled. All
+ * controls in ACAMERA_CONTROL_* besides sceneMode take
+ * effect.
+ */
+ ACAMERA_CONTROL_MODE_AUTO = 1,
+
+ /**
+ *
Use a specific scene mode.
+ *
Enabling this disables control.aeMode, control.awbMode and
+ * control.afMode controls; the camera device will ignore
+ * those settings while USE_SCENE_MODE is active (except for
+ * FACE_PRIORITY scene mode). Other control entries are still active.
+ * This setting can only be used if scene mode is supported (i.e.
+ * ACAMERA_CONTROL_AVAILABLE_SCENE_MODES
+ * contain some modes other than DISABLED).
+ *
For extended scene modes such as BOKEH, please use USE_EXTENDED_SCENE_MODE instead.
Same as OFF mode, except that this capture will not be
+ * used by camera device background auto-exposure, auto-white balance and
+ * auto-focus algorithms (3A) to update their statistics.
+ *
Specifically, the 3A routines are locked to the last
+ * values set from a request with AUTO, OFF, or
+ * USE_SCENE_MODE, and any statistics or state updates
+ * collected from manual captures with OFF_KEEP_STATE will be
+ * discarded by the camera device.
When extended scene mode is on, the camera device may override certain control
+ * parameters, such as targetFpsRange, AE, AWB, and AF modes, to achieve best power and
+ * quality tradeoffs. Only the mandatory stream combinations of LIMITED hardware level
+ * are guaranteed.
+ *
This setting can only be used if extended scene mode is supported (i.e.
+ * android.control.availableExtendedSceneModes
+ * contains some modes other than DISABLED).
If face detection support exists, use face
+ * detection data for auto-focus, auto-white balance, and
+ * auto-exposure routines.
+ *
If face detection statistics are disabled
+ * (i.e. ACAMERA_STATISTICS_FACE_DETECT_MODE is set to OFF),
+ * this should still operate correctly (but will not return
+ * face detection statistics to the framework).
+ *
Unlike the other scene modes, ACAMERA_CONTROL_AE_MODE,
+ * ACAMERA_CONTROL_AWB_MODE, and ACAMERA_CONTROL_AF_MODE
+ * remain active when FACE_PRIORITY is set.
Turn on a device-specific high dynamic range (HDR) mode.
+ *
In this scene mode, the camera device captures images
+ * that keep a larger range of scene illumination levels
+ * visible in the final image. For example, when taking a
+ * picture of a object in front of a bright window, both
+ * the object and the scene through the window may be
+ * visible when using HDR mode, while in normal AUTO mode,
+ * one or the other may be poorly exposed. As a tradeoff,
+ * HDR mode generally takes much longer to capture a single
+ * image, has no user control, and may have other artifacts
+ * depending on the HDR method used.
+ *
Therefore, HDR captures operate at a much slower rate
+ * than regular captures.
+ *
In this mode, on LIMITED or FULL devices, when a request
+ * is made with a ACAMERA_CONTROL_CAPTURE_INTENT of
+ * STILL_CAPTURE, the camera device will capture an image
+ * using a high dynamic range capture technique. On LEGACY
+ * devices, captures that target a JPEG-format output will
+ * be captured with HDR, and the capture intent is not
+ * relevant.
+ *
The HDR capture may involve the device capturing a burst
+ * of images internally and combining them into one, or it
+ * may involve the device using specialized high dynamic
+ * range capture hardware. In all cases, a single image is
+ * produced in response to a capture request submitted
+ * while in HDR mode.
+ *
Since substantial post-processing is generally needed to
+ * produce an HDR image, only YUV, PRIVATE, and JPEG
+ * outputs are supported for LIMITED/FULL device HDR
+ * captures, and only JPEG outputs are supported for LEGACY
+ * HDR captures. Using a RAW output for HDR capture is not
+ * supported.
+ *
Some devices may also support always-on HDR, which
+ * applies HDR processing at full frame rate. For these
+ * devices, intents other than STILL_CAPTURE will also
+ * produce an HDR output with no frame rate impact compared
+ * to normal operation, though the quality may be lower
+ * than for STILL_CAPTURE intents.
+ *
If SCENE_MODE_HDR is used with unsupported output types
+ * or capture intents, the images captured will be as if
+ * the SCENE_MODE was not enabled at all.
Preview stabilization, where the preview in addition to all other non-RAW streams are
+ * stabilized with the same quality of stabilization, is enabled. This mode aims to give
+ * clients a 'what you see is what you get' effect. In this mode, the FoV reduction will
+ * be a maximum of 20 % both horizontally and vertically
+ * (10% from left, right, top, bottom) for the given zoom ratio / crop region.
+ * The resultant FoV will also be the same across all processed streams
+ * (that have the same aspect ratio).
When a camera device is opened, it starts in
+ * this state. This is a transient state, the camera device may skip reporting
+ * this state in capture result.
AE has been asked to do a precapture sequence
+ * and is currently executing it.
+ *
Precapture can be triggered through setting
+ * ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER to START. Currently
+ * active and completed (if it causes camera device internal AE lock) precapture
+ * metering sequence can be canceled through setting
+ * ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER to CANCEL.
+ *
Once PRECAPTURE completes, AE will transition to CONVERGED
+ * or FLASH_REQUIRED as appropriate. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.
AF is off or has not yet tried to scan/been asked
+ * to scan.
+ *
When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.
AF believes it is focused correctly and has locked
+ * focus.
+ *
This state is reached only after an explicit START AF trigger has been
+ * sent (ACAMERA_CONTROL_AF_TRIGGER), when good focus has been obtained.
+ *
The lens will remain stationary until the AF mode (ACAMERA_CONTROL_AF_MODE) is changed or
+ * a new AF trigger is sent to the camera device (ACAMERA_CONTROL_AF_TRIGGER).
AF has failed to focus successfully and has locked
+ * focus.
+ *
This state is reached only after an explicit START AF trigger has been
+ * sent (ACAMERA_CONTROL_AF_TRIGGER), when good focus cannot be obtained.
+ *
The lens will remain stationary until the AF mode (ACAMERA_CONTROL_AF_MODE) is changed or
+ * a new AF trigger is sent to the camera device (ACAMERA_CONTROL_AF_TRIGGER).
AWB is not in auto mode, or has not yet started metering.
+ *
When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.
High quality bokeh mode is enabled for all non-raw streams (including YUV,
+ * JPEG, and IMPLEMENTATION_DEFINED) when capture intent is STILL_CAPTURE. Due to the
+ * extra image processing, this mode may introduce additional stall to non-raw streams.
+ * This mode should be used in high quality still capture use case.
Bokeh effect must not slow down capture rate relative to sensor raw output,
+ * and the effect is applied to all processed streams no larger than the maximum
+ * streaming dimension. This mode should be used if performance and power are a
+ * priority, such as video recording.
Apply edge enhancement at a quality level that does not slow down frame rate
+ * relative to sensor output. It may be the same as OFF if edge enhancement will
+ * slow down frame rate relative to sensor.
+ */
+ ACAMERA_EDGE_MODE_FAST = 1,
+
+ /**
+ *
Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
Edge enhancement is applied at different
+ * levels for different output streams, based on resolution. Streams at maximum recording
+ * resolution (see {@link ACameraDevice_createCaptureSession })
+ * or below have edge enhancement applied, while higher-resolution streams have no edge
+ * enhancement applied. The level of edge enhancement for low-resolution streams is tuned
+ * so that frame rate is not impacted, and the quality is equal to or better than FAST
+ * (since it is only applied to lower-resolution outputs, quality may improve from FAST).
+ *
This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ * with YUV or PRIVATE reprocessing, where the application continuously captures
+ * high-resolution intermediate buffers into a circular buffer, from which a final image is
+ * produced via reprocessing when a user takes a picture. For such a use case, the
+ * high-resolution buffers must not have edge enhancement applied to maximize efficiency of
+ * preview and to avoid double-applying enhancement when reprocessed, while low-resolution
+ * buffers (used for recording or preview, generally) need edge enhancement applied for
+ * reasonable preview quality.
+ *
This mode is guaranteed to be supported by devices that support either the
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ * (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES lists either of those capabilities) and it will
+ * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
The lens parameters (ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FILTER_DENSITY and ACAMERA_LENS_APERTURE) are not changing.
One or several of the lens parameters
+ * (ACAMERA_LENS_FOCAL_LENGTH, ACAMERA_LENS_FOCUS_DISTANCE,
+ * ACAMERA_LENS_FILTER_DENSITY or ACAMERA_LENS_APERTURE) is
+ * currently changing.
The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the optical center of
+ * the largest camera device facing the same direction as this camera.
+ *
This is the default value for API levels before Android P.
The camera device cannot represent the values of ACAMERA_LENS_POSE_TRANSLATION
+ * and ACAMERA_LENS_POSE_ROTATION accurately enough. One such example is a camera device
+ * on the cover of a foldable phone: in order to measure the pose translation and rotation,
+ * some kind of hinge position sensor would be needed.
+ *
The value of ACAMERA_LENS_POSE_TRANSLATION must be all zeros, and
+ * ACAMERA_LENS_POSE_ROTATION must be values matching its default facing.
The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the origin of the
+ * automotive sensor coordinate system, which is at the center of the rear axle.
The lens focus distance is not accurate, and the units used for
+ * ACAMERA_LENS_FOCUS_DISTANCE do not correspond to any physical units.
+ *
Setting the lens to the same focus distance on separate occasions may
+ * result in a different real focus distance, depending on factors such
+ * as the orientation of the device, the age of the focusing mechanism,
+ * and the device temperature. The focus distance value will still be
+ * in the range of [0, ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE], where 0
+ * represents the farthest focus.
However, setting the lens to the same focus distance
+ * on separate occasions may result in a different real
+ * focus distance, depending on factors such as the
+ * orientation of the device, the age of the focusing
+ * mechanism, and the device temperature.
The lens focus distance is measured in diopters, and
+ * is calibrated.
+ *
The lens mechanism is calibrated so that setting the
+ * same focus distance is repeatable on multiple
+ * occasions with good accuracy, and the focus distance
+ * corresponds to the real physical distance to the plane
+ * of best focus.
Noise reduction is applied without reducing frame rate relative to sensor
+ * output. It may be the same as OFF if noise reduction will reduce frame rate
+ * relative to sensor.
Noise reduction is applied at different levels for different output streams,
+ * based on resolution. Streams at maximum recording resolution (see {@link ACameraDevice_createCaptureSession })
+ * or below have noise reduction applied, while higher-resolution streams have MINIMAL (if
+ * supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of
+ * noise reduction for low-resolution streams is tuned so that frame rate is not impacted,
+ * and the quality is equal to or better than FAST (since it is only applied to
+ * lower-resolution outputs, quality may improve from FAST).
+ *
This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ * with YUV or PRIVATE reprocessing, where the application continuously captures
+ * high-resolution intermediate buffers into a circular buffer, from which a final image is
+ * produced via reprocessing when a user takes a picture. For such a use case, the
+ * high-resolution buffers must not have noise reduction applied to maximize efficiency of
+ * preview and to avoid over-applying noise filtering when reprocessing, while
+ * low-resolution buffers (used for recording or preview, generally) need noise reduction
+ * applied for reasonable preview quality.
+ *
This mode is guaranteed to be supported by devices that support either the
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ * (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES lists either of those capabilities) and it will
+ * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
The minimal set of capabilities that every camera
+ * device (regardless of ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL)
+ * supports.
+ *
This capability is listed by all normal devices, and
+ * indicates that the camera device has a feature set
+ * that's comparable to the baseline requirements for the
+ * older android.hardware.Camera API.
+ *
Devices with the DEPTH_OUTPUT capability might not list this
+ * capability, indicating that they support only depth measurement,
+ * not standard color output.
The camera device can be manually controlled (3A algorithms such
+ * as auto-exposure, and auto-focus can be bypassed).
+ * The camera device supports basic manual control of the sensor image
+ * acquisition related stages. This means the following controls are
+ * guaranteed to be supported:
+ *
+ *
Manual frame duration control
+ *
ACAMERA_SENSOR_FRAME_DURATION
+ *
ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION
+ *
+ *
+ *
Manual exposure control
+ *
ACAMERA_SENSOR_EXPOSURE_TIME
+ *
ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE
+ *
+ *
+ *
Manual sensitivity control
+ *
ACAMERA_SENSOR_SENSITIVITY
+ *
ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
+ *
+ *
+ *
Manual lens control (if the lens is adjustable)
+ *
ACAMERA_LENS_*
+ *
+ *
+ *
Manual flash control (if a flash unit is present)
+ *
ACAMERA_FLASH_*
+ *
+ *
+ *
Manual black level locking
+ *
ACAMERA_BLACK_LEVEL_LOCK
+ *
+ *
+ *
Auto exposure lock
+ *
ACAMERA_CONTROL_AE_LOCK
+ *
+ *
+ *
+ *
If any of the above 3A algorithms are enabled, then the camera
+ * device will accurately report the values applied by 3A in the
+ * result.
+ *
A given camera device may also support additional manual sensor controls,
+ * but this capability only covers the above list of controls.
+ *
If this is supported, android.scaler.streamConfigurationMap will
+ * additionally return a min frame duration that is greater than
+ * zero for each supported size-format combination.
+ *
For camera devices with LOGICAL_MULTI_CAMERA capability, when the underlying active
+ * physical camera switches, exposureTime, sensitivity, and lens properties may change
+ * even if AE/AF is locked. However, the overall auto exposure and auto focus experience
+ * for users will be consistent. Refer to LOGICAL_MULTI_CAMERA capability for details.
The camera device post-processing stages can be manually controlled.
+ * The camera device supports basic manual control of the image post-processing
+ * stages. This means the following controls are guaranteed to be supported:
+ *
+ *
+ *
Manual tonemap control
+ *
+ *
android.tonemap.curve
+ *
ACAMERA_TONEMAP_MODE
+ *
ACAMERA_TONEMAP_MAX_CURVE_POINTS
+ *
ACAMERA_TONEMAP_GAMMA
+ *
ACAMERA_TONEMAP_PRESET_CURVE
+ *
+ *
+ *
+ *
Manual white balance control
+ *
+ *
ACAMERA_COLOR_CORRECTION_TRANSFORM
+ *
ACAMERA_COLOR_CORRECTION_GAINS
+ *
+ *
+ *
Manual lens shading map control
+ *
ACAMERA_SHADING_MODE
+ *
ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE
+ *
ACAMERA_STATISTICS_LENS_SHADING_MAP
+ *
ACAMERA_LENS_INFO_SHADING_MAP_SIZE
+ *
+ *
+ *
Manual aberration correction control (if aberration correction is supported)
If auto white balance is enabled, then the camera device
+ * will accurately report the values applied by AWB in the result.
+ *
A given camera device may also support additional post-processing
+ * controls, but this capability only covers the above list of controls.
+ *
For camera devices with LOGICAL_MULTI_CAMERA capability, when underlying active
+ * physical camera switches, tonemap, white balance, and shading map may change even if
+ * awb is locked. However, the overall post-processing experience for users will be
+ * consistent. Refer to LOGICAL_MULTI_CAMERA capability for details.
The camera device supports outputting RAW buffers and
+ * metadata for interpreting them.
+ *
Devices supporting the RAW capability allow both for
+ * saving DNG files, and for direct application processing of
+ * raw sensor images.
+ *
+ *
RAW_SENSOR is supported as an output format.
+ *
The maximum available resolution for RAW_SENSOR streams
+ * will match either the value in
+ * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE or
+ * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.
+ *
All DNG-related optional metadata entries are provided
+ * by the camera device.
The camera device supports accurately reporting the sensor settings for many of
+ * the sensor controls while the built-in 3A algorithm is running. This allows
+ * reporting of sensor settings even when these settings cannot be manually changed.
+ *
The values reported for the following controls are guaranteed to be available
+ * in the CaptureResult, including when 3A is enabled:
+ *
+ *
Exposure control
+ *
ACAMERA_SENSOR_EXPOSURE_TIME
+ *
+ *
+ *
Sensitivity control
+ *
ACAMERA_SENSOR_SENSITIVITY
+ *
+ *
+ *
Lens controls (if the lens is adjustable)
+ *
ACAMERA_LENS_FOCUS_DISTANCE
+ *
ACAMERA_LENS_APERTURE
+ *
+ *
+ *
+ *
This capability is a subset of the MANUAL_SENSOR control capability, and will
+ * always be included if the MANUAL_SENSOR capability is available.
The camera device supports capturing high-resolution images at >= 20 frames per
+ * second, in at least the uncompressed YUV format, when post-processing settings are
+ * set to FAST. Additionally, all image resolutions less than 24 megapixels can be
+ * captured at >= 10 frames per second. Here, 'high resolution' means at least 8
+ * megapixels, or the maximum resolution of the device, whichever is smaller.
+ *
More specifically, this means that at least one output {@link AIMAGE_FORMAT_YUV_420_888 } size listed in
+ * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS }
+ * is larger or equal to the 'high resolution' defined above, and can be captured at at
+ * least 20 fps. For the largest {@link AIMAGE_FORMAT_YUV_420_888 } size listed in
+ * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS },
+ * camera device can capture this size for at least 10 frames per second if the size is
+ * less than 24 megapixels. Also the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry
+ * lists at least one FPS range where the minimum FPS is >= 1 / minimumFrameDuration
+ * for the largest YUV_420_888 size.
+ *
If the device supports the {@link AIMAGE_FORMAT_RAW10 }, {@link AIMAGE_FORMAT_RAW12 }, {@link AIMAGE_FORMAT_Y8 }, then those can also be
+ * captured at the same rate as the maximum-size YUV_420_888 resolution is.
+ *
In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranteed to have a value between 0
+ * and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
+ * are also guaranteed to be true so burst capture with these two locks ON yields
+ * consistent image output.
The camera device can produce depth measurements from its field of view.
+ *
This capability requires the camera device to support the following:
+ *
+ *
{@link AIMAGE_FORMAT_DEPTH16 } is supported as
+ * an output format.
+ *
{@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD } is
+ * optionally supported as an output format.
+ *
This camera device, and all camera devices with the same ACAMERA_LENS_FACING, will
+ * list the following calibration metadata entries in both {@link ACameraManager_getCameraCharacteristics }
+ * and {@link ACameraCaptureSession_captureCallback_result }:
+ *
ACAMERA_LENS_POSE_TRANSLATION
+ *
ACAMERA_LENS_POSE_ROTATION
+ *
ACAMERA_LENS_INTRINSIC_CALIBRATION
+ *
ACAMERA_LENS_DISTORTION
+ *
+ *
+ *
The ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE entry is listed by this device.
+ *
As of Android P, the ACAMERA_LENS_POSE_REFERENCE entry is listed by this device.
+ *
A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
+ * normal YUV_420_888, Y8, JPEG, and PRIV-format outputs. It only has to support the
+ * DEPTH16 format.
+ *
+ *
Generally, depth output operates at a slower frame rate than standard color capture,
+ * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
+ * should be accounted for (see {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS }).
+ * On a device that supports both depth and color-based output, to enable smooth preview,
+ * using a repeating burst is recommended, where a depth-output target is only included
+ * once every N frames, where N is the ratio between preview output rate and depth output
+ * rate, including depth stall time.
The camera device supports the MOTION_TRACKING value for
+ * ACAMERA_CONTROL_CAPTURE_INTENT, which limits maximum exposure time to 20 ms.
+ *
This limits the motion blur of capture images, resulting in better image tracking
+ * results for use cases such as image stabilization or augmented reality.
The camera device is a logical camera backed by two or more physical cameras.
+ *
In API level 28, the physical cameras must also be exposed to the application via
+ * CameraManager#getCameraIdList.
+ *
Starting from API level 29:
+ *
+ *
Some or all physical cameras may not be independently exposed to the application,
+ * in which case the physical camera IDs will not be available in
+ * CameraManager#getCameraIdList. But the
+ * application can still query the physical cameras' characteristics by calling
+ * CameraManager#getCameraCharacteristics.
+ *
If a physical camera is hidden from camera ID list, the mandatory stream
+ * combinations for that physical camera must be supported through the logical camera
+ * using physical streams. One exception is that in API level 30, a physical camera
+ * may become unavailable via
+ * {@link ACameraManager_PhysicalCameraAvailabilityCallback }
+ * callback.
+ *
+ *
Combinations of logical and physical streams, or physical streams from different
+ * physical cameras are not guaranteed. However, if the camera device supports
+ * {@link ACameraDevice_isSessionConfigurationSupported },
+ * application must be able to query whether a stream combination involving physical
+ * streams is supported by calling
+ * {@link ACameraDevice_isSessionConfigurationSupported }.
+ *
Camera application shouldn't assume that there are at most 1 rear camera and 1 front
+ * camera in the system. For an application that switches between front and back cameras,
+ * the recommendation is to switch between the first rear camera and the first front
+ * camera in the list of supported camera devices.
+ *
This capability requires the camera device to support the following:
This camera device must list static metadata
+ * ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE in
+ * CameraCharacteristics.
+ *
The underlying physical cameras' static metadata must list the following entries,
+ * so that the application can correlate pixels from the physical streams:
+ *
ACAMERA_LENS_POSE_REFERENCE
+ *
ACAMERA_LENS_POSE_ROTATION
+ *
ACAMERA_LENS_POSE_TRANSLATION
+ *
ACAMERA_LENS_INTRINSIC_CALIBRATION
+ *
ACAMERA_LENS_DISTORTION
+ *
+ *
+ *
The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be
+ * the same.
+ *
The logical camera must be LIMITED or higher device.
+ *
+ *
A logical camera device's dynamic metadata may contain
+ * ACAMERA_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID to notify the application of the current
+ * active physical camera Id. An active physical camera is the physical camera from which
+ * the logical camera's main image data outputs (YUV or RAW) and metadata come from.
+ * In addition, this serves as an indication which physical camera is used to output to
+ * a RAW stream, or in case only physical cameras support RAW, which physical RAW stream
+ * the application should request.
+ *
Logical camera's static metadata tags below describe the default active physical
+ * camera. An active physical camera is default if it's used when application directly
+ * uses requests built from a template. All templates will default to the same active
+ * physical camera.
+ *
+ *
ACAMERA_SENSOR_INFO_SENSITIVITY_RANGE
+ *
ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ *
ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE
+ *
ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION
+ *
ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+ *
ACAMERA_SENSOR_INFO_WHITE_LEVEL
+ *
ACAMERA_SENSOR_INFO_LENS_SHADING_APPLIED
+ *
ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
+ *
ACAMERA_SENSOR_REFERENCE_ILLUMINANT2
+ *
ACAMERA_SENSOR_CALIBRATION_TRANSFORM1
+ *
ACAMERA_SENSOR_CALIBRATION_TRANSFORM2
+ *
ACAMERA_SENSOR_COLOR_TRANSFORM1
+ *
ACAMERA_SENSOR_COLOR_TRANSFORM2
+ *
ACAMERA_SENSOR_FORWARD_MATRIX1
+ *
ACAMERA_SENSOR_FORWARD_MATRIX2
+ *
ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ *
ACAMERA_SENSOR_MAX_ANALOG_SENSITIVITY
+ *
ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS
+ *
ACAMERA_SENSOR_AVAILABLE_TEST_PATTERN_MODES
+ *
ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
+ *
ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ *
ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ *
ACAMERA_LENS_POSE_ROTATION
+ *
ACAMERA_LENS_POSE_TRANSLATION
+ *
ACAMERA_LENS_INTRINSIC_CALIBRATION
+ *
ACAMERA_LENS_POSE_REFERENCE
+ *
ACAMERA_LENS_DISTORTION
+ *
+ *
The field of view of non-RAW physical streams must not be smaller than that of the
+ * non-RAW logical streams, or the maximum field-of-view of the physical camera,
+ * whichever is smaller. The application should check the physical capture result
+ * metadata for how the physical streams are cropped or zoomed. More specifically, given
+ * the physical camera result metadata, the effective horizontal field-of-view of the
+ * physical camera is:
where the equation parameters are the physical camera's crop region width, physical
+ * sensor width, zoom ratio, active array width, and focal length respectively. Typically
+ * the physical stream of active physical camera has the same field-of-view as the
+ * logical streams. However, the same may not be true for physical streams from
+ * non-active physical cameras. For example, if the logical camera has a wide-ultrawide
+ * configuration where the wide lens is the default, when the crop region is set to the
+ * logical camera's active array size, (and the zoom ratio set to 1.0 starting from
+ * Android 11), a physical stream for the ultrawide camera may prefer outputting images
+ * with larger field-of-view than that of the wide camera for better stereo matching
+ * margin or more robust motion tracking. At the same time, the physical non-RAW streams'
+ * field of view must not be smaller than the requested crop region and zoom ratio, as
+ * long as it's within the physical lens' capability. For example, for a logical camera
+ * with wide-tele lens configuration where the wide lens is the default, if the logical
+ * camera's crop region is set to maximum size, and zoom ratio set to 1.0, the physical
+ * stream for the tele lens will be configured to its maximum size crop region (no zoom).
+ *
Deprecated: Prior to Android 11, the field of view of all non-RAW physical streams
+ * cannot be larger than that of non-RAW logical streams. If the logical camera has a
+ * wide-ultrawide lens configuration where the wide lens is the default, when the logical
+ * camera's crop region is set to maximum size, the FOV of the physical streams for the
+ * ultrawide lens will be the same as the logical stream, by making the crop region
+ * smaller than its active array size to compensate for the smaller focal length.
+ *
There are two ways for the application to capture RAW images from a logical camera
+ * with RAW capability:
+ *
+ *
Because the underlying physical cameras may have different RAW capabilities (such
+ * as resolution or CFA pattern), to maintain backward compatibility, when a RAW stream
+ * is configured, the camera device makes sure the default active physical camera remains
+ * active and does not switch to other physical cameras. (One exception is that, if the
+ * logical camera consists of identical image sensors and advertises multiple focalLength
+ * due to different lenses, the camera device may generate RAW images from different
+ * physical cameras based on the focalLength being set by the application.) This
+ * backward-compatible approach usually results in loss of optical zoom, to telephoto
+ * lens or to ultrawide lens.
+ *
Alternatively, to take advantage of the full zoomRatio range of the logical camera,
+ * the application should use MultiResolutionImageReader
+ * to capture RAW images from the currently active physical camera. Because different
+ * physical camera may have different RAW characteristics, the application needs to use
+ * the characteristics and result metadata of the active physical camera for the
+ * relevant RAW metadata.
+ *
+ *
The capture request and result metadata tags required for backward compatible camera
+ * functionalities will be solely based on the logical camera capability. On the other
+ * hand, the use of manual capture controls (sensor or post-processing) with a
+ * logical camera may result in unexpected behavior when the HAL decides to switch
+ * between physical cameras with different characteristics under the hood. For example,
+ * when the application manually sets exposure time and sensitivity while zooming in,
+ * the brightness of the camera images may suddenly change because HAL switches from one
+ * physical camera to the other.
The camera device is a monochrome camera that doesn't contain a color filter array,
+ * and for YUV_420_888 stream, the pixel values on U and V planes are all 128.
+ *
A MONOCHROME camera must support the guaranteed stream combinations required for
+ * its device level and capabilities. Additionally, if the monochrome camera device
+ * supports Y8 format, all mandatory stream combination requirements related to {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888} apply
+ * to {@link AIMAGE_FORMAT_Y8 Y8} as well. There are no
+ * mandatory stream combination requirements with regard to
+ * {@link AIMAGE_FORMAT_Y8 Y8} for Bayer camera devices.
+ *
Starting from Android Q, the SENSOR_INFO_COLOR_FILTER_ARRANGEMENT of a MONOCHROME
+ * camera will be either MONO or NIR.
The camera device is capable of writing image data into a region of memory
+ * inaccessible to Android userspace or the Android kernel, and only accessible to
+ * trusted execution environments (TEE).
The camera device is only accessible by Android's system components and privileged
+ * applications. Processes need to have the android.permission.SYSTEM_CAMERA in
+ * addition to android.permission.CAMERA in order to connect to this camera device.
This camera device is capable of producing ultra high resolution images in
+ * addition to the image sizes described in the
+ * android.scaler.streamConfigurationMap.
+ * It can operate in 'default' mode and 'max resolution' mode. It generally does this
+ * by binning pixels in 'default' mode and not binning them in 'max resolution' mode.
+ * android.scaler.streamConfigurationMap describes the streams supported in 'default'
+ * mode.
+ * The stream configurations supported in 'max resolution' mode are described by
+ * android.scaler.streamConfigurationMapMaximumResolution.
+ * The maximum resolution mode pixel array size of a camera device
+ * (ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE) with this capability,
+ * will be at least 24 megapixels.
The camera device supports selecting a per-stream use case via
+ * OutputConfiguration#setStreamUseCase
+ * so that the device can optimize camera pipeline parameters such as tuning, sensor
+ * mode, or ISP settings for a specific user scenario.
+ * Some sample usages of this capability are:
+ *
+ *
Distinguish high quality YUV captures from a regular YUV stream where
+ * the image quality may not be as good as the JPEG stream, or
+ *
Use one stream to serve multiple purposes: viewfinder, video recording and
+ * still capture. This is common with applications that wish to apply edits equally
+ * to preview, saved images, and saved videos.
+ *
+ *
This capability requires the camera device to support the following
+ * stream use cases:
+ *
+ *
DEFAULT for backward compatibility where the application doesn't set
+ * a stream use case
+ *
PREVIEW for live viewfinder and in-app image analysis
+ *
STILL_CAPTURE for still photo capture
+ *
VIDEO_RECORD for recording video clips
+ *
PREVIEW_VIDEO_STILL for one single stream used for viewfinder, video
+ * recording, and still capture.
10-bit pixel samples encoded using the SMPTE ST 2084 transfer function.
+ * This profile utilizes internal static metadata to increase the quality
+ * of the capture.
10-bit pixel samples encoded using the SMPTE ST 2084 transfer function.
+ * In contrast to HDR10, this profile uses internal per-frame metadata
+ * to further enhance the quality of the capture.
This is a camera mode for Dolby Vision capture optimized for a more scene
+ * accurate capture. This would typically differ from what a specific device
+ * might want to tune for a consumer optimized Dolby Vision general capture.
This is the camera mode for the default Dolby Vision capture mode for the
+ * specific device. This would be tuned by each specific device for consumer
+ * pleasing results that resonate with their particular audience. We expect
+ * that each specific device would have a different look for their default
+ * Dolby Vision capture.
Preview must only include non-stalling processed stream configurations with
+ * output formats like
+ * {@link AIMAGE_FORMAT_YUV_420_888 },
+ * {@link AIMAGE_FORMAT_PRIVATE }, etc.
Video record must include stream configurations that match the advertised
+ * supported media profiles CamcorderProfile with
+ * IMPLEMENTATION_DEFINED format.
Video snapshot must include stream configurations at least as big as
+ * the maximum RECORD resolutions and only with
+ * {@link AIMAGE_FORMAT_JPEG JPEG output format}.
+ * Additionally the configurations shouldn't cause preview glitches and also be able to
+ * run at 30 fps.
Recommended snapshot stream configurations must include at least one with
+ * size close to ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE and
+ * {@link AIMAGE_FORMAT_JPEG JPEG output format}.
+ * Taking into account restrictions on aspect ratio, alignment etc. the area of the
+ * maximum suggested size shouldn’t be less than 97% of the sensor array size area.
If supported, the recommended low latency stream configurations must have
+ * end-to-end latency that does not exceed 200 ms. under standard operating conditions
+ * (reasonable light levels, not loaded system) and using template
+ * TEMPLATE_STILL_CAPTURE. This is primarily for listing configurations for the
+ * {@link AIMAGE_FORMAT_JPEG JPEG output format}
+ * however other supported output formats can be added as well.
If supported, the recommended 10-bit output stream configurations must include
+ * a subset of the advertised ImageFormat#YCBCR_P010 and
+ * ImageFormat#PRIVATE outputs that are optimized for power
+ * and performance when registered along with a supported 10-bit dynamic range profile.
+ * see android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile for
+ * details.
The camera API automatically selects the best concrete value for
+ * rotate-and-crop based on the application's support for resizability and the current
+ * multi-window mode.
+ *
If the application does not support resizing but the display mode for its main
+ * Activity is not in a typical orientation, the camera API will set ROTATE_AND_CROP_90
+ * or some other supported rotation value, depending on device configuration,
+ * to ensure preview and captured images are correctly shown to the user. Otherwise,
+ * ROTATE_AND_CROP_NONE will be selected.
+ *
When a value other than NONE is selected, several metadata fields will also be parsed
+ * differently to ensure that coordinates are correctly handled for features like drawing
+ * face detection boxes or passing in tap-to-focus coordinates. The camera API will
+ * convert positions in the active array coordinate system to/from the cropped-and-rotated
+ * coordinate system to make the operation transparent for applications.
+ *
No coordinate mapping will be done when the application selects a non-AUTO mode.
This use case is the same as when the application doesn't set any use case for
+ * the stream. The camera device uses the properties of the output target, such as
+ * format, dataSpace, or surface class type, to optimize the image processing pipeline.
Optimized for performance and usability as a viewfinder, but not necessarily for
+ * image quality. The output is not meant to be persisted as saved images or video.
+ *
No stall if ACAMERA_CONTROL_* are set to FAST. There may be stall if
+ * they are set to HIGH_QUALITY. This use case has the same behavior as the
+ * default SurfaceView and SurfaceTexture targets. Additionally, this use case can be
+ * used for in-app image analysis.
Optimized for high-quality high-resolution capture, and not expected to maintain
+ * preview-like frame rates.
+ *
The stream may have stalls regardless of whether ACAMERA_CONTROL_* is HIGH_QUALITY.
+ * This use case has the same behavior as the default JPEG and RAW related formats.
Optimized for high-quality video capture, including high-quality image stabilization
+ * if supported by the device and enabled by the application. As a result, may produce
+ * output frames with a substantial lag from real time, to allow for highest-quality
+ * stabilization or other processing. As such, such an output is not suitable for drawing
+ * to screen directly, and is expected to be persisted to disk or similar for later
+ * playback or processing. Only streams that set the VIDEO_RECORD use case are guaranteed
+ * to have video stabilization applied when the video stabilization control is set
+ * to ON, as opposed to PREVIEW_STABILIZATION.
+ *
This use case has the same behavior as the default MediaRecorder and MediaCodec
+ * targets.
One single stream used for combined purposes of preview, video, and still capture.
+ *
For such multi-purpose streams, the camera device aims to make the best tradeoff
+ * between the individual use cases. For example, the STILL_CAPTURE use case by itself
+ * may have stalls for achieving best image quality. But if combined with PREVIEW and
+ * VIDEO_RECORD, the camera device needs to trade off the additional image processing
+ * for speed so that preview and video recording aren't slowed down.
+ *
Similarly, VIDEO_RECORD may produce frames with a substantial lag, but
+ * PREVIEW_VIDEO_STILL must have minimal output delay. This means that to enable video
+ * stabilization with this use case, the device must support and the app must select the
+ * PREVIEW_STABILIZATION mode for video stabilization.
Long-running video call optimized for both power efficiency and video quality.
+ *
The camera sensor may run in a lower-resolution mode to reduce power consumption
+ * at the cost of some image and digital zoom quality. Unlike VIDEO_RECORD, VIDEO_CALL
+ * outputs are expected to work in dark conditions, so are usually accompanied with
+ * variable frame rate settings to allow sufficient exposure time in low light.
All pixel data is replaced with an 8-bar color pattern.
+ *
The vertical bars (left-to-right) are as follows:
+ *
+ *
100% white
+ *
yellow
+ *
cyan
+ *
green
+ *
magenta
+ *
red
+ *
blue
+ *
black
+ *
+ *
In general the image would look like the following:
+ *
W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * . . . . . . . .
+ * . . . . . . . .
+ * . . . . . . . .
+ *
+ * (B = Blue, K = Black)
+ *
+ *
Each bar should take up 1/8 of the sensor pixel array width.
+ * When this is not possible, the bar size should be rounded
+ * down to the nearest integer and the pattern can repeat
+ * on the right side.
+ *
Each bar's height must always take up the full sensor
+ * pixel array height.
+ *
Each pixel in this test pattern must be set to either
+ * 0% intensity or 100% intensity.
The test pattern is similar to COLOR_BARS, except that
+ * each bar should start at its specified color at the top,
+ * and fade to gray at the bottom.
+ *
Furthermore each bar is further subdivided into a left and
+ * right half. The left half should have a smooth gradient,
+ * and the right half should have a quantized gradient.
+ *
In particular, the right half's should consist of blocks of the
+ * same color for 1/16th active sensor pixel array width.
+ *
The least significant bits in the quantized gradient should
+ * be copied from the most significant bits of the smooth gradient.
+ *
The height of each bar should always be a multiple of 128.
+ * When this is not the case, the pattern should repeat at the bottom
+ * of the image.
All pixel data is replaced by a pseudo-random sequence
+ * generated from a PN9 512-bit sequence (typically implemented
+ * in hardware with a linear feedback shift register).
+ *
The generator should be reset at the beginning of each frame,
+ * and thus each subsequent raw frame with this test pattern should
+ * be exactly the same as the last.
Sensor doesn't have any Bayer color filter.
+ * Such sensor captures visible light in monochrome. The exact weighting and
+ * wavelengths captured is not specified, but generally only includes the visible
+ * frequencies. This value implies a MONOCHROME camera.
Sensor has a near infrared filter capturing light with wavelength between
+ * roughly 750nm and 1400nm, and the same filter covers the whole sensor array. This
+ * value implies a MONOCHROME camera.
Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic, but can
+ * not be compared to timestamps from other subsystems (e.g. accelerometer, gyro etc.),
+ * or other instances of the same or different camera devices in the same system with
+ * accuracy. However, the timestamps are roughly in the same timebase as
+ * SystemClock#uptimeMillis. The accuracy is sufficient for tasks
+ * like A/V synchronization for video recording, at least, and the timestamps can be
+ * directly used together with timestamps from the audio subsystem for that task.
+ *
Timestamps between streams and results for a single camera instance are comparable,
+ * and the timestamps for all buffers and the result metadata generated by a single
+ * capture are identical.
Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
+ * SystemClock#elapsedRealtimeNanos,
+ * and they can be compared to other timestamps using that base.
+ *
When buffers from a REALTIME device are passed directly to a video encoder from the
+ * camera, automatic compensation is done to account for differing timebases of the
+ * audio and camera subsystems. If the application is receiving buffers and then later
+ * sending them to a video encoder or other application where they are compared with
+ * audio subsystem timestamps or similar, this compensation is not present. In those
+ * cases, applications need to adjust the timestamps themselves. Since SystemClock#elapsedRealtimeNanos and SystemClock#uptimeMillis only diverge while the device is asleep, an
+ * offset between the two sources can be measured once per active session and applied
+ * to timestamps for sufficient accuracy for A/V sync.
ACAMERA_STATISTICS_OIS_TIMESTAMPS, ACAMERA_STATISTICS_OIS_X_SHIFTS,
+ * and ACAMERA_STATISTICS_OIS_Y_SHIFTS provide OIS data in the output result metadata.
This camera device does not have enough capabilities to qualify as a FULL device or
+ * better.
+ *
Only the stream configurations listed in the LEGACY and LIMITED tables in the
+ * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.
+ *
All LIMITED devices support the BACKWARDS_COMPATIBLE capability, indicating basic
+ * support for color image capture. The only exception is that the device may
+ * alternatively support only the DEPTH_OUTPUT capability, if it can only output depth
+ * measurements and not color images.
+ *
LIMITED devices and above require the use of ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * to lock exposure metering (and calculate flash power, for cameras with flash) before
+ * capturing a high-quality still image.
+ *
A LIMITED device that only lists the BACKWARDS_COMPATIBLE capability is only
+ * required to support full-automatic operation and post-processing (OFF is not
+ * supported for ACAMERA_CONTROL_AE_MODE, ACAMERA_CONTROL_AF_MODE, or
+ * ACAMERA_CONTROL_AWB_MODE)
+ *
Additional capabilities may optionally be supported by a LIMITED-level device, and
+ * can be checked for in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.
This camera device is capable of supporting advanced imaging applications.
+ *
The stream configurations listed in the FULL, LEGACY and LIMITED tables in the
+ * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.
Per frame control (ACAMERA_SYNC_MAX_LATENCY == PER_FRAME_CONTROL)
+ *
Manual sensor control (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains MANUAL_SENSOR)
+ *
Manual post-processing control (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MANUAL_POST_PROCESSING)
+ *
The required exposure time range defined in ACAMERA_SENSOR_INFO_EXPOSURE_TIME_RANGE
+ *
The required maxFrameDuration defined in ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION
+ *
+ *
Note:
+ * Pre-API level 23, FULL devices also supported arbitrary cropping region
+ * (ACAMERA_SCALER_CROPPING_TYPE == FREEFORM); this requirement was relaxed in API level
+ * 23, and FULL devices may only support CENTERED cropping.
This camera device is running in backward compatibility mode.
+ *
Only the stream configurations listed in the LEGACY table in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are supported.
+ *
A LEGACY device does not support per-frame control, manual sensor control, manual
+ * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
+ * No additional capabilities beyond BACKWARD_COMPATIBLE will ever be listed by a
+ * LEGACY device in ACAMERA_REQUEST_AVAILABLE_CAPABILITIES.
+ *
In addition, the ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is not functional on LEGACY
+ * devices. Instead, every request that includes a JPEG-format output target is treated
+ * as triggering a still capture, internally executing a precapture trigger. This may
+ * fire the flash for flash power metering during precapture, and then fire the flash
+ * for the final capture, if a flash is available on the device and the AE mode is set to
+ * enable the flash.
+ *
Devices that initially shipped with Android version Q or newer will not include any LEGACY-level devices.
This camera device is capable of YUV reprocessing and RAW data capture, in addition to
+ * FULL-level capabilities.
+ *
The stream configurations listed in the LEVEL_3, RAW, FULL, LEGACY and
+ * LIMITED tables in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.
+ *
The following additional capabilities are guaranteed to be supported:
The current result is not yet fully synchronized to any request.
+ *
Synchronization is in progress, and reading metadata from this
+ * result may include a mix of data that have taken effect since the
+ * last synchronization time.
+ *
In some future result, within ACAMERA_SYNC_MAX_LATENCY frames,
+ * this value will update to the actual frame number frame number
+ * the result is guaranteed to be synchronized to (as long as the
+ * request settings remain constant).
The current result's synchronization status is unknown.
+ *
The result may have already converged, or it may be in
+ * progress. Reading from this result may include some mix
+ * of settings from past requests.
+ *
After a settings change, the new settings will eventually all
+ * take effect for the output buffers and results. However, this
+ * value will not change when that happens. Altering settings
+ * rapidly may provide outcomes using mixes of settings from recent
+ * requests.
+ *
This value is intended primarily for backwards compatibility with
+ * the older camera implementations (for android.hardware.Camera).
Each new frame has some subset (potentially the entire set)
+ * of the past requests applied to the camera settings.
+ *
By submitting a series of identical requests, the camera device
+ * will eventually have the camera settings applied, but it is
+ * unknown when that exact point will be.
+ *
All LEGACY capability devices will have this as their maxLatency.
A software mechanism is used to synchronize between the physical cameras. As a result,
+ * the timestamp of an image from a physical stream is only an approximation of the
+ * image sensor start-of-exposure time.
The camera device supports frame timestamp synchronization at the hardware level,
+ * and the timestamp of a physical stream image accurately reflects its
+ * start-of-exposure time.
Lens distortion correction is applied without reducing frame rate
+ * relative to sensor output. It may be the same as OFF if distortion correction would
+ * reduce frame rate relative to sensor.
The camera exists outside of the vehicle body frame but not exactly on one of the
+ * exterior locations this enum defines. The applications should determine the exact
+ * location from ACAMERA_LENS_POSE_TRANSLATION.
The camera device exists on an extra vehicle, such as the trailer, but not exactly
+ * on one of front, rear, left, or right side. Applications should determine the exact
+ * location from ACAMERA_LENS_POSE_TRANSLATION.
The camera device faces the outside of the vehicle body frame but not exactly
+ * one of the exterior sides defined by this enum. Applications should determine
+ * the exact facing direction from ACAMERA_LENS_POSE_ROTATION and
+ * ACAMERA_LENS_POSE_TRANSLATION.
The camera device faces the inside of the vehicle body frame but not exactly
+ * one of seats described by this enum. Applications should determine the exact
+ * facing direction from ACAMERA_LENS_POSE_ROTATION and ACAMERA_LENS_POSE_TRANSLATION.
The camera device faces the right seat of the third row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_RIGHT = 14,
+
+ }
+
+
+ /** @} */
+}
diff --git a/Runtime/APIs/AndroidCamera/AndroidCameraMetadataTags.cs.meta b/Runtime/APIs/AndroidCamera/AndroidCameraMetadataTags.cs.meta
new file mode 100644
index 0000000..85d29f1
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/AndroidCameraMetadataTags.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 85d03c160dc5421f9596b8a1af9d7802
+timeCreated: 1698103267
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/AndroidCameraPlayerLoopUtility.cs b/Runtime/APIs/AndroidCamera/AndroidCameraPlayerLoopUtility.cs
new file mode 100644
index 0000000..a520975
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/AndroidCameraPlayerLoopUtility.cs
@@ -0,0 +1,92 @@
+
+namespace MagicLeap.Android
+{
+ using System;
+ using System.Collections.Generic;
+ using UnityEngine.LowLevel;
+ using UnityEngine.PlayerLoop;
+
+ internal static class AndroidCameraPlayerLoopUtility
+ {
+ class UpdateSubscription : IDisposable where T : class
+ {
+ private List _List;
+ private T _Obj;
+
+ public UpdateSubscription(List list, T obj)
+ => (_List = list).Add(_Obj = obj);
+
+ ~UpdateSubscription()
+ => Dispose(false);
+
+ public void Dispose()
+ => Dispose(true);
+
+ private void Dispose(bool disposing)
+ {
+ if (disposing && (_List == null || _Obj == null))
+ throw new ObjectDisposedException("object is already disposed");
+
+ _List?.Remove(_Obj);
+ _List = null;
+ _Obj = null;
+ }
+ }
+
+ private static Type[] _InstallPath = {
+ typeof(Initialization),
+ typeof(Initialization.XREarlyUpdate)
+ };
+
+
+
+ internal static IDisposable LazyRegisterPlayerLoopUpdateInternal(ref List list, T obj, Type updateType,
+ PlayerLoopSystem.UpdateFunction updateFunction, params Type[] installPath) where T : class
+ {
+ if (installPath == null || installPath.Length == 0)
+ installPath = _InstallPath;
+ if (list == null)
+ {
+ list = new List();
+ var updateSystem = new PlayerLoopSystem
+ {
+ subSystemList = Array.Empty(),
+ type = updateType,
+ updateDelegate = updateFunction,
+ };
+ var playerLoop = PlayerLoop.GetCurrentPlayerLoop();
+ if (!InstallIntoPlayerLoop(ref playerLoop, updateSystem, installPath))
+ throw new Exception("unable to install update system into player loop!");
+ PlayerLoop.SetPlayerLoop(playerLoop);
+ }
+
+ return new UpdateSubscription(list, obj);
+ }
+
+ private static bool InstallIntoPlayerLoop(ref PlayerLoopSystem topLevelPlayerLoop, PlayerLoopSystem systemToInstall, params Type[] installPath)
+ {
+ installPath ??= Array.Empty();
+
+ ref var current = ref topLevelPlayerLoop;
+ foreach (var path in installPath)
+ {
+ var idx = Array.FindIndex(current.subSystemList, s => s.type == path);
+ if (idx == -1)
+ return false;
+ current = ref current.subSystemList[idx];
+ }
+
+ InstallSystem(ref current, systemToInstall);
+ return true;
+ }
+
+ private static void InstallSystem(ref PlayerLoopSystem parentSystem, PlayerLoopSystem targetSystem)
+ {
+ var subsystems = parentSystem.subSystemList ?? Array.Empty();
+ var length = subsystems.Length;
+ Array.Resize(ref subsystems, length + 1);
+ subsystems[length] = targetSystem;
+ parentSystem.subSystemList = subsystems;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/AndroidCameraPlayerLoopUtility.cs.meta b/Runtime/APIs/AndroidCamera/AndroidCameraPlayerLoopUtility.cs.meta
new file mode 100644
index 0000000..692ec00
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/AndroidCameraPlayerLoopUtility.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 9335605ac68248329f02ad1a95c40bf2
+timeCreated: 1703215271
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/CaptureRequest.cs b/Runtime/APIs/AndroidCamera/CaptureRequest.cs
new file mode 100644
index 0000000..967695d
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/CaptureRequest.cs
@@ -0,0 +1,91 @@
+namespace MagicLeap.Android
+{
+ using System;
+ using System.Runtime.InteropServices;
+ using Unity.Jobs;
+ using NDK.Camera;
+
+ public sealed class CaptureRequest : IDisposable
+ {
+ public delegate JobHandle OnFrameAvailable(AndroidCamera camera, CaptureRequest request,
+ NativeImage image, IntPtr context);
+
+ // parent camera object.
+ private AndroidCamera camera;
+ // native request pointer.
+ private ACaptureRequest nativeRequest;
+ // optional user context pointer.
+ private IntPtr context;
+ // processing delegate.
+ private OnFrameAvailable onFrameAvailable;
+
+ internal ACaptureRequest NativeRequest => nativeRequest;
+
+
+ internal CaptureRequest(AndroidCamera camera, ACaptureRequest nativeRequest,
+ OnFrameAvailable onFrameAvailable, IntPtr context = default)
+ {
+ this.camera = camera ?? throw new ArgumentNullException(nameof(camera));
+ if (nativeRequest.IsNull)
+ throw new ArgumentNullException(nameof(nativeRequest));
+ this.nativeRequest = nativeRequest;
+ this.context = context;
+ this.onFrameAvailable = onFrameAvailable ?? throw new ArgumentNullException(nameof(onFrameAvailable));
+
+ if (this.nativeRequest.IsNull)
+ throw new ArgumentException($"'{nameof(nativeRequest)}' must be a valid native pointer");
+
+ var weakHandle = GCHandle.Alloc(this, GCHandleType.Weak);
+ this.nativeRequest.UserContext = GCHandle.ToIntPtr(weakHandle);
+ }
+
+ ~CaptureRequest()
+ {
+ Dispose(false);
+ }
+
+ public void Dispose()
+ {
+ Dispose(true);
+ }
+
+ private void Dispose(bool disposing)
+ {
+ if (disposing)
+ {
+ if (nativeRequest.IsNull)
+ throw new ObjectDisposedException("object is already disposed");
+
+ var handle = GCHandle.FromIntPtr(nativeRequest.UserContext);
+ if (handle.IsAllocated)
+ handle.Free();
+ }
+
+ camera = null;
+ context = IntPtr.Zero;
+ onFrameAvailable = null;
+
+ if (!nativeRequest.IsNull)
+ nativeRequest.Dispose();
+ nativeRequest = default;
+ }
+
+ internal JobHandle ProcessNewFrame(NativeImage image)
+ => onFrameAvailable(camera, this, image, context);
+
+ internal static bool TryGetFromIntPtr(IntPtr ptr, out CaptureRequest outCaptureRequest)
+ {
+ outCaptureRequest = null;
+
+ if (ptr == IntPtr.Zero)
+ return false;
+
+ var handle = GCHandle.FromIntPtr(ptr);
+ if (!handle.IsAllocated)
+ return false;
+
+ outCaptureRequest = (CaptureRequest)handle.Target;
+ return true;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/CaptureRequest.cs.meta b/Runtime/APIs/AndroidCamera/CaptureRequest.cs.meta
new file mode 100644
index 0000000..107b9cf
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/CaptureRequest.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 4ee36f934de04fabac4d42c53b89bc75
+timeCreated: 1704402052
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/JPEGUtility.cs b/Runtime/APIs/AndroidCamera/JPEGUtility.cs
new file mode 100644
index 0000000..3e7ce7c
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/JPEGUtility.cs
@@ -0,0 +1,14 @@
+using Unity.Collections;
+
+namespace MagicLeap.Android
+{
+ public static class JPEGUtility
+ {
+ public static byte[] ExtractJPEGData(NativePlane plane)
+ {
+ using (var data = plane.CopyToNativeArray(Allocator.Temp))
+ return data.ToArray();
+ }
+
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/JPEGUtility.cs.meta b/Runtime/APIs/AndroidCamera/JPEGUtility.cs.meta
new file mode 100644
index 0000000..5000d12
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/JPEGUtility.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: d494943a03404f98baeb72d7d12dc324
+timeCreated: 1707874174
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK.meta b/Runtime/APIs/AndroidCamera/NDK.meta
new file mode 100644
index 0000000..3ddaf5f
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 91ba441e97c64b79bdf0e9a491288a63
+timeCreated: 1696019885
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureFailure.cs b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureFailure.cs
new file mode 100644
index 0000000..3656169
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureFailure.cs
@@ -0,0 +1,60 @@
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using Unity.Collections.LowLevel.Unsafe;
+ public unsafe struct ACameraCaptureFailure : INullablePointer
+ {
+ private struct Data
+ {
+ public long frameNumber;
+ public int reason;
+ public int sequenceId;
+ public byte wasCaptured;
+ }
+
+ [NativeDisableUnsafePtrRestriction]
+ private Data* data;
+
+ public long FrameNumber
+ {
+ get
+ {
+ this.CheckNullAndThrow();
+ return data->frameNumber;
+ }
+ }
+
+ public int Reason
+ {
+ get
+ {
+ this.CheckNullAndThrow();
+ return data->reason;
+ }
+ }
+
+ public int SequenceId
+ {
+ get
+ {
+ this.CheckNullAndThrow();
+ return data->sequenceId;
+ }
+ }
+
+ public bool WasCaptured
+ {
+ get
+ {
+ this.CheckNullAndThrow();
+ return data->wasCaptured != 0;
+ }
+ }
+
+ public bool IsNull => data == null;
+
+ void IDisposable.Dispose()
+ => throw new InvalidOperationException(
+ "This object doesn't need to be disposed, and boxing it into an IDisposable simply to dispose it wastes GC memory");
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureFailure.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureFailure.cs.meta
new file mode 100644
index 0000000..347f969
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureFailure.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: a37f595a3ac843f4a493bb38d871b5b8
+timeCreated: 1703202505
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureSession.cs b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureSession.cs
new file mode 100644
index 0000000..7500815
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureSession.cs
@@ -0,0 +1,211 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.InteropServices;
+ using Unity.Collections;
+ using Unity.Collections.LowLevel.Unsafe;
+
+ using static CameraNativeBindings;
+
+ using StateCallback = System.Action;
+
+ using CaptureStartedCallback = System.Action;
+ using CaptureResultCallback = System.Action;
+ using CaptureFailedCallback = System.Action;
+ using CaptureSequenceEndedCallback = System.Action;
+ using CaptureSequenceAbortedCallback = System.Action;
+ using BufferLostCallback = System.Action;
+
+ public struct ACameraCaptureSession : INullablePointer
+ {
+ public struct CaptureCallbacks
+ {
+ private IntPtr context;
+ private IntPtr onCaptureStarted;
+ private IntPtr onCaptureProgressed;
+ private IntPtr onCaptureCompleted;
+ private IntPtr onCaptureFailed;
+ private IntPtr onCaptureSequenceCompleted;
+ private IntPtr onCaptureSequenceAborted;
+ private IntPtr onCaptureBufferLost;
+
+ public static CaptureCallbacks Create(CaptureStartedCallback captureStarted = null,
+ CaptureResultCallback captureProgressed = null, CaptureResultCallback captureCompleted = null,
+ CaptureFailedCallback captureFailed = null, CaptureSequenceEndedCallback sequenceEnded = null,
+ CaptureSequenceAbortedCallback sequenceAborted = null, BufferLostCallback bufferLost = null,
+ IntPtr context = default)
+ {
+ if (captureStarted == null
+ && captureProgressed == null
+ && captureCompleted == null
+ && captureFailed == null
+ && sequenceEnded == null
+ && sequenceAborted == null
+ && bufferLost == null)
+ throw new InvalidOperationException("at least one callback must not be null");
+
+ return new CaptureCallbacks
+ {
+ context = context,
+ onCaptureStarted = captureStarted == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(captureStarted),
+ onCaptureProgressed = captureProgressed == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(captureProgressed),
+ onCaptureCompleted = captureCompleted == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(captureCompleted),
+ onCaptureFailed = captureFailed == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(captureFailed),
+ onCaptureSequenceCompleted = sequenceEnded == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(sequenceEnded),
+ onCaptureSequenceAborted = sequenceAborted == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(sequenceAborted),
+ onCaptureBufferLost = bufferLost == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(bufferLost),
+ };
+ }
+ }
+
+ public struct StateCallbacks
+ {
+ private IntPtr context;
+ private IntPtr onClosed;
+ private IntPtr onReady;
+ private IntPtr onActive;
+
+ public static StateCallbacks Create(StateCallback active = null, StateCallback closed = null,
+ StateCallback ready = null, IntPtr context = default)
+ {
+ if (active == null && closed == null && ready == null)
+ throw new InvalidOperationException("at least one callback must not be null");
+ return new StateCallbacks
+ {
+ context = context,
+ onActive = active == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(active),
+ onClosed = closed == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(closed),
+ onReady = ready == null
+ ? IntPtr.Zero
+ : Marshal.GetFunctionPointerForDelegate(ready),
+ };
+ }
+ }
+
+ private IntPtr value;
+
+ public ACameraDevice Device
+ {
+ get
+ {
+ var result = ACameraCaptureSession_getDevice(this, out var devicePtr);
+ result.CheckReturnValueAndThrow();
+ return devicePtr;
+ }
+ }
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACameraCaptureSession_close(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public bool TryCapture(NativeArray requests, out int sequenceId)
+ {
+ this.CheckNullAndThrow();
+ CheckValidRequestArrayAndThrow(ref requests);
+ unsafe
+ {
+ var result = ACameraCaptureSession_capture(this, null, requests.Length,
+ (ACaptureRequest*)requests.GetUnsafeReadOnlyPtr(), out sequenceId);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+
+ public bool TryCaptureWithCallbacks(NativeArray requests, CaptureCallbacks callbacks, out int sequenceId)
+ {
+ this.CheckNullAndThrow();
+ CheckValidRequestArrayAndThrow(ref requests);
+ unsafe
+ {
+ var result = ACameraCaptureSession_capture(this, &callbacks, requests.Length,
+ (ACaptureRequest*)requests.GetUnsafeReadOnlyPtr(), out sequenceId);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+
+ public bool TrySetRepeatingRequest(NativeArray requests, out int sequenceId)
+ {
+ this.CheckNullAndThrow();
+ CheckValidRequestArrayAndThrow(ref requests);
+ unsafe
+ {
+ var result = ACameraCaptureSession_setRepeatingRequest(this, null, requests.Length,
+ (ACaptureRequest*)requests.GetUnsafeReadOnlyPtr(), out sequenceId);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+
+ public bool TrySetRepeatingRequestWithCallbacks(NativeArray requests, CaptureCallbacks callbacks, out int sequenceId)
+ {
+ this.CheckNullAndThrow();
+ CheckValidRequestArrayAndThrow(ref requests);
+ unsafe
+ {
+ var result = ACameraCaptureSession_setRepeatingRequest(this, &callbacks, requests.Length,
+ (ACaptureRequest*)requests.GetUnsafeReadOnlyPtr(), out sequenceId);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+
+ public bool TryStopRepeating()
+ {
+ this.CheckNullAndThrow();
+ var ret = ACameraCaptureSession_stopRepeating(this);
+ ret.CheckReturnValueAndThrow();
+ return ret == CameraStatus.Ok;
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ private static void CheckValidRequestArrayAndThrow(ref NativeArray requests)
+ {
+ if (!requests.IsCreated)
+ throw new ArgumentNullException(nameof(requests));
+
+ if (requests.Length == 0)
+ throw new InvalidOperationException("request array is empty");
+
+ for (var i = 0; i < requests.Length; ++i)
+ if (requests[i].IsNull)
+ throw new NullReferenceException($"request at index '{i}' is null");
+
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureSession.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureSession.cs.meta
new file mode 100644
index 0000000..5acce3d
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraCaptureSession.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 5afaacb3819a40c0a3d797d612219370
+timeCreated: 1703031122
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraDevice.cs b/Runtime/APIs/AndroidCamera/NDK/ACameraDevice.cs
new file mode 100644
index 0000000..858f623
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraDevice.cs
@@ -0,0 +1,145 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.InteropServices;
+
+ using static CameraNativeBindings;
+
+ using ErrorStateCallback = System.Action;
+ using StateCallback = System.Action;
+
+ public struct ACameraDevice : INullablePointer
+ {
+ public enum ErrorCode
+ {
+ ///
+ /// The camera device is in use already.
+ ///
+ CameraInUse = 1,
+
+ ///
+ /// The system-wide limit for number of open cameras or camera resources has
+ /// been reached, and more camera devices cannot be opened until previous
+ /// instances are closed.
+ ///
+ MaxCamerasInUse = 2,
+
+ ///
+ /// The camera is disabled due to a device policy, and cannot be opened.
+ ///
+ CameraDisabled = 3,
+
+ ///
+ /// The camera device has encountered a fatal error.
+ ///
The camera device needs to be re-opened to be used again.
+ ///
+ CameraDeviceFatalError = 4,
+
+ ///
+ /// The camera service has encountered a fatal error.
+ ///
The Android device may need to be shut down and restarted to restore
+ /// camera function, or there may be a persistent hardware problem.
+ /// An attempt at recovery may be possible by closing the
+ /// CameraDevice and the CameraManager, and trying to acquire all resources
+ /// again from scratch.
+ ///
+ CameraServiceFatalError = 5
+ }
+
+ public struct StateCallbacks
+ {
+ private IntPtr context;
+ private IntPtr onDisconnected;
+ private IntPtr onError;
+
+ public static StateCallbacks Create(StateCallback onDisconnected = null, ErrorStateCallback onError = null,
+ IntPtr context = default)
+ {
+ return new StateCallbacks
+ {
+ context = context,
+ onDisconnected = onDisconnected != null
+ ? Marshal.GetFunctionPointerForDelegate(onDisconnected)
+ : IntPtr.Zero,
+ onError = onError != null
+ ? Marshal.GetFunctionPointerForDelegate(onDisconnected)
+ : IntPtr.Zero,
+ };
+ }
+ }
+
+ private IntPtr value;
+
+ ///
+ /// Note: Be careful when repeatedly accessing this property, because it allocates a new C# string each time
+ /// and can generate additional GC pressure.
+ ///
+ public string Id
+ {
+ get
+ {
+ unsafe
+ {
+ return Marshal.PtrToStringAuto(new IntPtr(ACameraDevice_getId(this)));
+ }
+ }
+ }
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACameraDevice_close(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public bool TryCreateCaptureRequest(ACaptureRequest.Template template, out ACaptureRequest request)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraDevice_createCaptureRequest(this, template, out request);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TryCreateCaptureSession(ACaptureSessionOutputContainer container,
+ out ACameraCaptureSession session)
+ {
+ this.CheckNullAndThrow();
+ container.CheckNullAndThrow();
+ unsafe
+ {
+ var result = ACameraDevice_createCaptureSession(this, container, null, out session);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+
+ public bool TryCreateCaptureSessionWithCallbacks(ACaptureSessionOutputContainer container,
+ ACameraCaptureSession.StateCallbacks callbacks, out ACameraCaptureSession session)
+ {
+ this.CheckNullAndThrow();
+ container.CheckNullAndThrow();
+ unsafe
+ {
+ var result = ACameraDevice_createCaptureSession(this, container, &callbacks, out session);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+ }
+
+
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraDevice.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACameraDevice.cs.meta
new file mode 100644
index 0000000..19098ce
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraDevice.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 6831902707c3411589ea5f550a5b7d80
+timeCreated: 1702934548
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraIdList.cs b/Runtime/APIs/AndroidCamera/NDK/ACameraIdList.cs
new file mode 100644
index 0000000..0ad8c1a
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraIdList.cs
@@ -0,0 +1,67 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.InteropServices;
+
+ public unsafe struct ACameraIdList : INullablePointer
+ {
+ private struct Data
+ {
+ public int NumCameras;
+ public byte** CameraIds;
+ }
+
+ private Data* data;
+
+ public bool IsNull => data == null;
+
+ public int NumCameras
+ {
+ get
+ {
+ this.CheckNullAndThrow();
+ return data->NumCameras;
+ }
+ }
+
+ public string CameraAt(int index)
+ {
+ this.CheckNullAndThrow();
+ CheckIndexAndThrow(index);
+ return Marshal.PtrToStringAnsi(new IntPtr(data->CameraIds[index]));
+ }
+
+ public byte* CameraAtNonAlloc(int index)
+ {
+ this.CheckNullAndThrow();
+ CheckIndexAndThrow(index);
+ return data->CameraIds[index];
+ }
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ CameraNativeBindings.ACameraManager_deleteCameraIdList(this);
+
+ data = null;
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ private void CheckIndexAndThrow(int index)
+ {
+ if (index < 0 || index >= NumCameras)
+ throw new IndexOutOfRangeException($"camera index must be between 0 and {NumCameras - 1}, inclusive");
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraIdList.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACameraIdList.cs.meta
new file mode 100644
index 0000000..e1fd57e
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraIdList.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: e8060379d4f64561bb91ad6e221e0072
+timeCreated: 1703032275
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraManager.cs b/Runtime/APIs/AndroidCamera/NDK/ACameraManager.cs
new file mode 100644
index 0000000..25ba236
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraManager.cs
@@ -0,0 +1,185 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+using AOT;
+using UnityEngine;
+using UnityEngine.XR.MagicLeap;
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.InteropServices;
+
+ using static CameraNativeBindings;
+
+
+ using AvailabilityCallback = System.Action;
+
+ public struct ACameraManager : INullablePointer
+ {
+ public struct AvailabilityCallbacks
+ {
+ private IntPtr context;
+ private IntPtr onCameraAvailable;
+ private IntPtr onCameraUnavailable;
+
+ public static AvailabilityCallbacks Create(AvailabilityCallback onCameraAvailable = null, AvailabilityCallback onCameraUnavailable = null,
+ IntPtr context = default)
+ {
+ IntPtr oca = onCameraAvailable != null
+ ? Marshal.GetFunctionPointerForDelegate(onCameraAvailable)
+ : IntPtr.Zero;
+ IntPtr ocu = onCameraUnavailable != null
+ ? Marshal.GetFunctionPointerForDelegate(onCameraUnavailable)
+ : IntPtr.Zero;
+ return new AvailabilityCallbacks
+ {
+ context = context,
+ onCameraAvailable = oca,
+ onCameraUnavailable = ocu,
+ };
+ }
+ }
+
+ public struct AvailabilityCallbackSubscription : IDisposable
+ {
+ private AvailabilityCallbacks callbacks;
+ private ACameraManager cameraManager;
+
+ internal static AvailabilityCallbackSubscription Create(ACameraManager manager, AvailabilityCallbacks callbacks)
+ => new AvailabilityCallbackSubscription()
+ {
+ callbacks = callbacks,
+ cameraManager = manager
+ };
+
+ public void Dispose()
+ {
+ if (!cameraManager.IsNull)
+ cameraManager.TryUnregisterAvailabilityCallbacks(ref callbacks);
+ }
+ }
+
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public static ACameraManager Create()
+ => ACameraManager_create();
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACameraManager_delete(this);
+ value = IntPtr.Zero;
+ }
+
+ public bool TryGetCameraIds(out ACameraIdList list)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraManager_getCameraIdList(this, out list);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TryGetCameraMetadata(string cameraId, out ACameraMetadata metadata)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraManager_getCameraCharacteristics(this, cameraId, out metadata);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public unsafe bool TryGetCameraMetadataNonAlloc(byte* cameraId, out ACameraMetadata metadata)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraManager_getCameraCharacteristicsNonAlloc(this, cameraId, out metadata);
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TryOpenCamera(string cameraId, out ACameraDevice camera)
+ {
+ this.CheckNullAndThrow();
+ unsafe
+ {
+ ACameraDevice.StateCallbacks callbacks = ACameraDevice.StateCallbacks.Create(OnCameraDisconnected, OnCameraError);
+ var result = ACameraManager_openCamera(this, cameraId, &callbacks, out camera);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+
+ public bool TryOpenCameraWithCallbacks(string cameraId, ACameraDevice.StateCallbacks callbacks, out ACameraDevice camera)
+ {
+ this.CheckNullAndThrow();
+ unsafe
+ {
+ var result = ACameraManager_openCamera(this, cameraId, &callbacks, out camera);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+
+ public unsafe bool TryOpenCameraNonAlloc(byte* cameraId, out ACameraDevice camera)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraManager_openCameraNonAlloc(this, cameraId, null, out camera);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public unsafe bool TryOpenCameraWithCallbacksNonAlloc(byte* cameraId, ACameraDevice.StateCallbacks callbacks, out ACameraDevice camera)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraManager_openCameraNonAlloc(this, cameraId, &callbacks, out camera);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TryRegisterAvailabilityCallbacks(ref AvailabilityCallbacks callbacks)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraManager_registerAvailabilityCallback(this, ref callbacks);
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TrySubscribeToAvailabilityCallbacks(AvailabilityCallbacks callbacks,
+ out AvailabilityCallbackSubscription sub)
+ {
+ sub = default;
+ if (!TryRegisterAvailabilityCallbacks(ref callbacks))
+ return false;
+
+ sub = AvailabilityCallbackSubscription.Create(this, callbacks);
+ return true;
+ }
+
+ public bool TryUnregisterAvailabilityCallbacks(ref AvailabilityCallbacks callbacks)
+ {
+ this.CheckNullAndThrow();
+ var result = ACameraManager_unregisterAvailabilityCallback(this, ref callbacks);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ [MonoPInvokeCallback(typeof(Action))]
+ static void OnCameraDisconnected(IntPtr context, ACameraDevice camera)
+ {
+ UnityEngine.Debug.LogFormat(LogType.Warning, LogOption.NoStacktrace, null, $"Warning: Camera '{camera.Id}' has been disconnected");
+ }
+
+ [MonoPInvokeCallback(typeof(Action))]
+ static void OnCameraError(IntPtr context, ACameraDevice camera, ACameraDevice.ErrorCode errorCode)
+ {
+ UnityEngine.Debug.LogFormat(LogType.Error, LogOption.NoStacktrace, null, $"Error: Camera '{camera.Id} reported error: {errorCode}");
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraManager.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACameraManager.cs.meta
new file mode 100644
index 0000000..f1d1d2c
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraManager.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 14550c0c1d9240d6be393191dd3c8734
+timeCreated: 1696019334
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraMetadata.cs b/Runtime/APIs/AndroidCamera/NDK/ACameraMetadata.cs
new file mode 100644
index 0000000..94609dd
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraMetadata.cs
@@ -0,0 +1,135 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+using Unity.Collections;
+using UnityEngine.Serialization;
+using UnityEngine.XR.MagicLeap.Unsafe;
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using System.Runtime.InteropServices;
+ using Unity.Collections.LowLevel.Unsafe;
+
+ using static CameraNativeBindings;
+
+ public struct ACameraMetadata : INullablePointer
+ {
+ public struct Entry
+ {
+ // [StructLayout(LayoutKind.Explicit, Size = 8)]
+ // public unsafe struct ConstUnionPointer
+ // {
+ // [FieldOffset(0)]
+ // public readonly byte* u8;
+ // [FieldOffset(0)]
+ // public readonly int* i32;
+ // [FieldOffset(0)]
+ // public readonly float* f32;
+ // [FieldOffset(0)]
+ // public readonly long* i64;
+ // [FieldOffset(0)]
+ // public readonly double* f64;
+ // [FieldOffset(0)]
+ // public readonly Rational* r;
+ // }
+
+ [StructLayout(LayoutKind.Explicit, Size = 8)]
+ public unsafe struct UnionPointer
+ {
+ [FieldOffset(0)]
+ [NativeDisableUnsafePtrRestriction]
+ public byte* U8;
+ [FieldOffset(0)]
+ [NativeDisableUnsafePtrRestriction]
+ public int* I32;
+ [FieldOffset(0)]
+ [NativeDisableUnsafePtrRestriction]
+ public float* F32;
+ [FieldOffset(0)]
+ [NativeDisableUnsafePtrRestriction]
+ public long* I64;
+ [FieldOffset(0)]
+ [NativeDisableUnsafePtrRestriction]
+ public double* F64;
+ [FieldOffset(0)]
+ [NativeDisableUnsafePtrRestriction]
+ public Rational* R;
+ }
+
+ public struct ReadOnly
+ {
+ public readonly uint Tag;
+ public readonly Type Type;
+ public readonly uint Count;
+ public readonly UnionPointer Data;
+ }
+
+ public enum Type : byte
+ {
+ Byte = 0,
+ Int32 = 1,
+ Float = 2,
+ Int64 = 3,
+ Double = 4,
+ Rational = 5,
+ NumTypes,
+ }
+
+ public uint Tag;
+ public Type type;
+ public uint Count;
+ public UnionPointer Data;
+ }
+
+ public struct Rational
+ {
+ public int Numerator;
+ [FormerlySerializedAs("denominator")] public int Denominator;
+ }
+
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACameraMetadata_free(this);
+ }
+
+ public NativeArray GetAllMetadataTags()
+ => GetAllMetadataTagsUntyped().Reinterpret();
+
+ public unsafe NativeArray GetAllMetadataTagsUntyped()
+ {
+ uint* data = null;
+ int length = 0;
+ var result = ACameraMetadata_getAllTags(this, out length, out data);
+ result.CheckReturnValueAndThrow();
+ if (result != CameraStatus.Ok)
+ return default;
+
+ var array = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(data, length, Allocator.None);
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref array, UnsafeUtilityEx.CreateAtomicSafetyHandleForAllocator(Allocator.None));
+#endif
+ return array;
+ }
+
+ public bool TryGetConstEntry(Metadata.Tags tag, out Entry.ReadOnly outEntry)
+ {
+ outEntry = default;
+ var result = ACameraMetadata_getConstEntry(this, (uint)tag, out outEntry);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraMetadata.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACameraMetadata.cs.meta
new file mode 100644
index 0000000..1811edf
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraMetadata.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: cadab48c3c6a46389dd77416e197d087
+timeCreated: 1702930743
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraOutputTarget.cs b/Runtime/APIs/AndroidCamera/NDK/ACameraOutputTarget.cs
new file mode 100644
index 0000000..f8821d3
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraOutputTarget.cs
@@ -0,0 +1,41 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using NativeWindow;
+
+ using static CameraNativeBindings;
+
+ public struct ACameraOutputTarget : INullablePointer
+ {
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACameraOutputTarget_free(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public static bool TryCreate(ANativeWindow window, out ACameraOutputTarget outTarget)
+ {
+ window.CheckNullAndThrow();
+
+ var result = ACameraOutputTarget_create(window, out outTarget);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACameraOutputTarget.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACameraOutputTarget.cs.meta
new file mode 100644
index 0000000..045891c
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACameraOutputTarget.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 209e398876ab4a80b420959c513975f5
+timeCreated: 1703032320
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACaptureRequest.cs b/Runtime/APIs/AndroidCamera/NDK/ACaptureRequest.cs
new file mode 100644
index 0000000..7632b41
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACaptureRequest.cs
@@ -0,0 +1,248 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+
+using System.Diagnostics;
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using Unity.Collections;
+ using Unity.Collections.LowLevel.Unsafe;
+ using UnityEngine.XR.MagicLeap.Unsafe;
+
+ using static CameraNativeBindings;
+
+ public unsafe struct ACaptureRequest : INullablePointer
+ {
+ public enum Template
+ {
+ ///
+ /// Create a request suitable for a camera preview window. Specifically, this
+ /// means that high frame rate is given priority over the highest-quality
+ /// post-processing. These requests would normally be used with the
+ /// {@link ACameraCaptureSession_setRepeatingRequest} method.
+ /// This template is guaranteed to be supported on all camera devices.
+ ///
+ /// @see ACameraDevice_createCaptureRequest
+ ///
+ Preview = 1,
+
+ ///
+ /// Create a request suitable for still image capture. Specifically, this
+ /// means prioritizing image quality over frame rate. These requests would
+ /// commonly be used with the {@link ACameraCaptureSession_capture} method.
+ /// This template is guaranteed to be supported on all camera devices.
+ ///
+ /// @see ACameraDevice_createCaptureRequest
+ ///
+ StillCapture = 2,
+
+ ///
+ /// Create a request suitable for video recording. Specifically, this means
+ /// that a stable frame rate is used, and post-processing is set for
+ /// recording quality. These requests would commonly be used with the
+ /// {@link ACameraCaptureSession_setRepeatingRequest} method.
+ /// This template is guaranteed to be supported on all camera devices.
+ ///
+ /// @see ACameraDevice_createCaptureRequest
+ ///
+ Record = 3,
+
+ ///
+ /// Create a request suitable for still image capture while recording
+ /// video. Specifically, this means maximizing image quality without
+ /// disrupting the ongoing recording. These requests would commonly be used
+ /// with the {@link ACameraCaptureSession_capture} method while a request based on
+ /// {@link TEMPLATE_RECORD} is is in use with {@link ACameraCaptureSession_setRepeatingRequest}.
+ /// This template is guaranteed to be supported on all camera devices.
+ ///
+ /// @see ACameraDevice_createCaptureRequest
+ ///
+ VideoSnapshot = 4,
+
+ ///
+ /// Create a request suitable for zero shutter lag still capture. This means
+ /// means maximizing image quality without compromising preview frame rate.
+ /// AE/AWB/AF should be on auto mode.
+ ///
+ /// @see ACameraDevice_createCaptureRequest
+ ///
+ ZeroShutterLag = 5,
+
+ ///
+ /// A basic template for direct application control of capture
+ /// parameters. All automatic control is disabled (auto-exposure, auto-white
+ /// balance, auto-focus), and post-processing parameters are set to preview
+ /// quality. The manual capture parameters (exposure, sensitivity, and so on)
+ /// are set to reasonable defaults, but should be overriden by the
+ /// application depending on the intended use case.
+ /// This template is guaranteed to be supported on camera devices that support the
+ /// {@link ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR} capability.
+ ///
+ /// @see ACameraDevice_createCaptureRequest
+ ///
+ Manual = 6,
+ }
+
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ internal IntPtr UserContext
+ {
+ get => ACaptureRequest_getUserContext(this, out var context) == CameraStatus.Ok ? context : IntPtr.Zero;
+ set
+ {
+ if (ACaptureRequest_setUserContext(this, value) != CameraStatus.Ok)
+ throw new Exception("failed to set user context value");
+ }
+ }
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACaptureRequest_free(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public bool TryAddOutputTarget(ACameraOutputTarget ptr)
+ {
+ this.CheckNullAndThrow();
+ ptr.CheckNullAndThrow();
+ var result = ACaptureRequest_addTarget(this, ptr);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TryGetAllTags(Allocator allocator, out NativeArray tags)
+ {
+ this.CheckNullAndThrow();
+ if (allocator <= Allocator.None)
+ throw new ArgumentException($"{nameof(allocator)} is not a valid allocator");
+
+ tags = default;
+
+ int numTags = 0;
+
+ var result = ACaptureRequest_getAllTags(this, ref numTags, null);
+ result.CheckReturnValueAndThrow();
+ if (result != CameraStatus.Ok)
+ return false;
+
+ if (numTags == 0)
+ return false;
+
+ uint* tagBuffer =
+ (uint*)UnsafeUtility.MallocTracked(sizeof(uint) * numTags, UnsafeUtility.AlignOf(), allocator, 0);
+
+ result = ACaptureRequest_getAllTags(this, ref numTags, &tagBuffer);
+ result.CheckReturnValueAndThrow();
+ if (result != CameraStatus.Ok)
+ {
+ UnsafeUtility.FreeTracked(tagBuffer, allocator);
+ return false;
+ }
+
+ tags = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(tagBuffer, numTags, allocator)
+ .Reinterpret();
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref tags, UnsafeUtilityEx.CreateAtomicSafetyHandleForAllocator(allocator));
+#endif
+ return true;
+ }
+
+ public bool TryGetMetadata(Metadata.Tags tag, out ACameraMetadata.Entry.ReadOnly outEntry)
+ {
+ this.CheckNullAndThrow();
+ var result = ACaptureRequest_getConstEntry(this, (uint)tag, out outEntry);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TrySetMetadataEntry(Metadata.Tags tag, NativeArray data)
+ {
+ this.CheckNullAndThrow();
+ CheckValidArrayAndThrow(data);
+
+ var result = ACaptureRequest_setEntry_u8(this, (uint)tag, (uint)data.Length,
+ (byte*)data.GetUnsafeReadOnlyPtr());
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TrySetMetadataEntry(Metadata.Tags tag, NativeArray data)
+ {
+ this.CheckNullAndThrow();
+ CheckValidArrayAndThrow(data);
+
+ var result = ACaptureRequest_setEntry_i32(this, (uint)tag, (uint)data.Length,
+ (int*)data.GetUnsafeReadOnlyPtr());
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TrySetMetadataEntry(Metadata.Tags tag, NativeArray data)
+ {
+ this.CheckNullAndThrow();
+ CheckValidArrayAndThrow(data);
+
+ var result = ACaptureRequest_setEntry_i64(this, (uint)tag, (uint)data.Length,
+ (long*)data.GetUnsafeReadOnlyPtr());
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TrySetMetadataEntry(Metadata.Tags tag, NativeArray data)
+ {
+ this.CheckNullAndThrow();
+ CheckValidArrayAndThrow(data);
+
+ var result = ACaptureRequest_setEntry_float(this, (uint)tag, (uint)data.Length,
+ (float*)data.GetUnsafeReadOnlyPtr());
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TrySetMetadataEntry(Metadata.Tags tag, NativeArray data)
+ {
+ this.CheckNullAndThrow();
+ CheckValidArrayAndThrow(data);
+
+ var result = ACaptureRequest_setEntry_double(this, (uint)tag, (uint)data.Length,
+ (double*)data.GetUnsafeReadOnlyPtr());
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TrySetMetadataEntry(Metadata.Tags tag, NativeArray data)
+ {
+ this.CheckNullAndThrow();
+ CheckValidArrayAndThrow(data);
+
+ var result = ACaptureRequest_setEntry_rational(this, (uint)tag, (uint)data.Length,
+ (ACameraMetadata.Rational*)data.GetUnsafeReadOnlyPtr());
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ private static void CheckValidArrayAndThrow(NativeArray array) where T : unmanaged
+ {
+ if (!array.IsCreated)
+ throw new ArgumentNullException(nameof(array));
+
+ if (array.Length == 0)
+ throw new ArgumentException("Length of native array must be greater than 0");
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACaptureRequest.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACaptureRequest.cs.meta
new file mode 100644
index 0000000..79f9c8a
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACaptureRequest.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 9c9674d564f340cf896f639e87d944e1
+timeCreated: 1703024704
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutput.cs b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutput.cs
new file mode 100644
index 0000000..60f1e35
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutput.cs
@@ -0,0 +1,29 @@
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using NativeWindow;
+
+ using static CameraNativeBindings;
+
+ public struct ACaptureSessionOutput : INullablePointer
+ {
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACaptureSessionOutput_free(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public static bool TryCreate(ANativeWindow window, out ACaptureSessionOutput output)
+ {
+ var result = ACaptureSessionOutput_create(window, out output);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutput.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutput.cs.meta
new file mode 100644
index 0000000..ca758f1
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutput.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 8e5631e93ebb4d2c8e2f9d0ab74626f4
+timeCreated: 1703185557
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutputContainer.cs b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutputContainer.cs
new file mode 100644
index 0000000..2bee78a
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutputContainer.cs
@@ -0,0 +1,56 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+
+ using static CameraNativeBindings;
+
+ public struct ACaptureSessionOutputContainer : INullablePointer
+ {
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ACaptureSessionOutputContainer_free(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public bool TryAddOutput(ACaptureSessionOutput output)
+ {
+ this.CheckNullAndThrow();
+ output.CheckNullAndThrow();
+ var result = ACaptureSessionOutputContainer_add(this, output);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public static bool TryCreate(out ACaptureSessionOutputContainer container)
+ {
+ var result = ACaptureSessionOutputContainer_create(out container);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+
+ public bool TryRemoveOutput(ACaptureSessionOutput output)
+ {
+ this.CheckNullAndThrow();
+ output.CheckNullAndThrow();
+ var result = ACaptureSessionOutputContainer_remove(this, output);
+ result.CheckReturnValueAndThrow();
+ return result == CameraStatus.Ok;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutputContainer.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutputContainer.cs.meta
new file mode 100644
index 0000000..c1b7640
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ACaptureSessionOutputContainer.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: e638133eccb64f9f94862cc4a15aeb67
+timeCreated: 1703034153
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/AHardwareBuffer.cs b/Runtime/APIs/AndroidCamera/NDK/AHardwareBuffer.cs
new file mode 100644
index 0000000..95a541b
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/AHardwareBuffer.cs
@@ -0,0 +1,660 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+
+using UnityEngine.Serialization;
+
+namespace MagicLeap.Android
+{
+ using System;
+ using System.Collections;
+ using Unity.Collections;
+
+ public struct ARect
+ {
+ public int Left;
+ public int Top;
+ public int Right;
+ public int Bottom;
+
+ public override string ToString()
+ => $"ARect[left = {Left}, top = {Top}, right = {Right}, bottom = {Bottom}]";
+ }
+
+ public readonly struct ImageDimensions : IEnumerable
+ {
+ public readonly int Height;
+ public readonly int Width;
+
+ public int Size => Width * Height;
+
+ public ImageDimensions(int width, int height)
+ {
+ this.Width = width;
+ this.Height = height;
+ }
+
+ public NativeArray CreateNativeArray(Allocator allocator,
+ NativeArrayOptions options = NativeArrayOptions.ClearMemory) where T : unmanaged
+ => new NativeArray(Size, allocator, options);
+
+ IEnumerator IEnumerable.GetEnumerator() => throw new NotSupportedException();
+
+ public ImageCoordinates GetCoordinatesFromIndex(int index)
+ => new ImageCoordinates(index % Width, index / Width, index);
+
+ public Enumerator GetEnumerator() => new Enumerator(this);
+
+ public struct Enumerator : IEnumerator
+ {
+ private int current;
+ private int stride;
+ private int size;
+ private int x;
+ private int y;
+
+ internal Enumerator(in ImageDimensions imgDim)
+ {
+ current = -1;
+ size = imgDim.Size;
+ stride = imgDim.Width;
+ x = -1;
+ y = -1;
+ }
+
+ public void Dispose() {}
+
+ object IEnumerator.Current => throw new NotImplementedException();
+ public ImageCoordinates Current => new ImageCoordinates(x, y, current);
+
+ public bool MoveNext()
+ {
+ if (current >= size)
+ return false;
+
+ var cur = current++;
+ x = cur % stride;
+ y = cur / stride;
+
+ return true;
+ }
+
+ public void Reset()
+ {
+ current = -1;
+ x = -1;
+ y = -1;
+ }
+ }
+ }
+
+ public readonly struct ImageCoordinates
+ {
+ public readonly int X;
+ public readonly int Y;
+ public readonly int Index;
+
+ public ImageCoordinates(int x, int y, int index)
+ {
+ this.X = x;
+ this.Y = y;
+ this.Index = index;
+ }
+
+ public void Deconstruct(out int outX, out int outY)
+ {
+ outX = X;
+ outY = Y;
+ }
+
+ public void Deconstruct(out int outX, out int outY, out int outIndex)
+ {
+ outX = X;
+ outY = Y;
+ outIndex = Index;
+ }
+ }
+}
+
+namespace MagicLeap.Android.NDK.NativeWindow
+{
+ using System;
+ using System.Diagnostics;
+ using Unity.Collections.LowLevel.Unsafe;
+
+ using Media;
+ using static NativeWindowNativeBindings;
+
+ public struct AHardwareBuffer : INullablePointer
+ {
+ public struct Description
+ {
+ public uint Width;
+ public uint Height;
+ public uint Layers;
+ public Format Format;
+ public Usage Usage;
+ public uint Stride;
+ private uint reserved0;
+ private uint reserved1;
+
+ public bool HasMultiplePlanes
+ => Format == Format.Y8Cb8Cr8_420 || Format == Format.YCbCrP010;
+ }
+
+ public enum Format
+ {
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_R8G8B8A8_UNORM
+ /// OpenGL ES: GL_RGBA8
+ ///
+ R8G8B8A8UNorm = 1,
+
+ ///
+ /// 32 bits per pixel, 8 bits per channel format where alpha values are
+ /// ignored (always opaque).
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_R8G8B8A8_UNORM
+ /// OpenGL ES: GL_RGB8
+ ///
+ R8G8B8X8UNorm = 2,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_R8G8B8_UNORM
+ /// OpenGL ES: GL_RGB8
+ ///
+ R8G8B8UNorm = 3,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_R5G6B5_UNORM_PACK16
+ /// OpenGL ES: GL_RGB565
+ ///
+ R5G6B5UNorm = 4,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_R16G16B16A16_SFLOAT
+ /// OpenGL ES: GL_RGBA16F
+ ///
+ R16G16B16A16Float = 0x16,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_A2B10G10R10_UNORM_PACK32
+ /// OpenGL ES: GL_RGB10_A2
+ ///
+ R10G10B10A2UNorm = 0x2b,
+
+ ///
+ /// Opaque binary blob format.
+ /// Must have height 1 and one layer, with width equal to the buffer
+ /// size in bytes. Corresponds to Vulkan buffers and OpenGL buffer
+ /// objects. Can be bound to the latter using GL_EXT_external_buffer.
+ ///
+ Blob = 0x21,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_D16_UNORM
+ /// OpenGL ES: GL_DEPTH_COMPONENT16
+ ///
+ D16UNorm = 0x30,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_X8_D24_UNORM_PACK32
+ /// OpenGL ES: GL_DEPTH_COMPONENT24
+ ///
+ D24UNorm = 0x31,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_D24_UNORM_S8_UINT
+ /// OpenGL ES: GL_DEPTH24_STENCIL8
+ ///
+ D24UNormS8UInt = 0x32,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_D32_SFLOAT
+ /// OpenGL ES: GL_DEPTH_COMPONENT32F
+ ///
+ D32Float = 0x33,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_D32_SFLOAT_S8_UINT
+ /// OpenGL ES: GL_DEPTH32F_STENCIL8
+ ///
+ D32FloatS8UInt = 0x34,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_S8_UINT
+ /// OpenGL ES: GL_STENCIL_INDEX8
+ ///
+ S8UInt = 0x35,
+
+ ///
+ /// YUV 420 888 format.
+ /// Must have an even width and height. Can be accessed in OpenGL
+ /// shaders through an external sampler. Does not support mip-maps
+ /// cube-maps or multi-layered textures.
+ ///
+ Y8Cb8Cr8_420 = 0x23,
+
+ ///
+ /// YUV P010 format.
+ /// Must have an even width and height. Can be accessed in OpenGL
+ /// shaders through an external sampler. Does not support mip-maps
+ /// cube-maps or multi-layered textures.
+ ///
+ YCbCrP010 = 0x36,
+
+ ///
+ /// Corresponding formats:
+ /// Vulkan: VK_FORMAT_R8_UNORM
+ /// OpenGL ES: GR_GL_R8
+ ///
+ R8UNorm = 0x38,
+ }
+
+ public unsafe struct Plane
+ {
+ [NativeDisableUnsafePtrRestriction]
+ public byte* Data;
+ public uint PixelStride;
+ public uint RowStride;
+
+ public bool IsValid => Data != null;
+ }
+
+ public unsafe struct Planes
+ {
+ private const uint kSizeOfPlaneInBytes = 16;
+ private const uint kMaxPlanes = 4;
+
+ private int planeCount;
+ private int padding0;
+ private fixed byte planesRaw[(int)(kSizeOfPlaneInBytes * kMaxPlanes)];
+
+ public int PlaneCount => planeCount;
+
+ public Plane PlaneFor(int planeIdx)
+ {
+ CheckPlaneIndexAndThrow(planeIdx);
+ fixed (byte* @base = planesRaw)
+ {
+ return UnsafeUtility.ReadArrayElement(@base, planeIdx);
+ }
+ }
+
+ [Conditional("DEVELOPMENT_BUILD"), Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private void CheckPlaneIndexAndThrow(int planeIdx)
+ {
+ if (planeIdx < 0 || planeIdx >= PlaneCount)
+ throw new IndexOutOfRangeException(
+ $"plane index must be between 0 and {PlaneCount - 1}, inclusive");
+ }
+ }
+
+ [Flags]
+ public enum Usage : ulong
+ {
+ ///
+ /// The buffer will never be locked for direct CPU reads using the
+ /// AHardwareBuffer_lock() function. Note that reading the buffer
+ /// using OpenGL or Vulkan functions or memory mappings is still
+ /// allowed.
+ ///
+ CpuReadNever = 0UL,
+
+ ///
+ /// The buffer will sometimes be locked for direct CPU reads using
+ /// the AHardwareBuffer_lock() function. Note that reading the
+ /// buffer using OpenGL or Vulkan functions or memory mappings
+ /// does not require the presence of this flag.
+ ///
+ CpuReadRarely = 2UL,
+
+ ///
+ /// The buffer will often be locked for direct CPU reads using
+ /// the AHardwareBuffer_lock() function. Note that reading the
+ /// buffer using OpenGL or Vulkan functions or memory mappings
+ /// does not require the presence of this flag.
+ ///
+ CpuReadOften = 3UL,
+
+ ///
+ /// CPU read value mask.
+ ///
+ CpuReadMask = 0xFUL,
+
+ ///
+ /// The buffer will never be locked for direct CPU writes using the
+ /// AHardwareBuffer_lock() function. Note that writing the buffer
+ /// using OpenGL or Vulkan functions or memory mappings is still
+ /// allowed.
+ ///
+ CpuWriteNever = 0UL << 4,
+
+ ///
+ /// The buffer will sometimes be locked for direct CPU writes using
+ /// the AHardwareBuffer_lock() function. Note that writing the
+ /// buffer using OpenGL or Vulkan functions or memory mappings
+ /// does not require the presence of this flag.
+ ///
+ CpuWriteRarely = 2UL << 4,
+
+ ///
+ /// The buffer will often be locked for direct CPU writes using
+ /// the AHardwareBuffer_lock() function. Note that writing the
+ /// buffer using OpenGL or Vulkan functions or memory mappings
+ /// does not require the presence of this flag.
+ ///
+ CpuWriteOften = 3UL << 4,
+
+ ///
+ /// CPU write value mask.
+ ///
+ CpuWriteMask = 0xFUL << 4,
+
+ ///
+ /// The buffer will be read from by the GPU as a texture.
+ ///
+ GpuSampledImage = 1UL << 8,
+
+ ///
+ /// The buffer will be written to by the GPU as a framebuffer attachment.
+ ///
+ GpuFramebuffer = 1UL << 9,
+
+ ///
+ /// The buffer will be written to by the GPU as a framebuffer
+ /// attachment.
+ ///
+ /// Note that the name of this flag is somewhat misleading: it does
+ /// not imply that the buffer contains a color format. A buffer with
+ /// depth or stencil format that will be used as a framebuffer
+ /// attachment should also have this flag. Use the equivalent flag
+ /// AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER to avoid this confusion.
+ ///
+ GpuColorOutput = GpuFramebuffer,
+
+ ///
+ /// The buffer will be used as a composer HAL overlay layer.
+ ///
+ /// This flag is currently only needed when using ASurfaceTransaction_setBuffer
+ /// to set a buffer. In all other cases, the framework adds this flag
+ /// internally to buffers that could be presented in a composer overlay.
+ /// ASurfaceTransaction_setBuffer is special because it uses buffers allocated
+ /// directly through AHardwareBuffer_allocate instead of buffers allocated
+ /// by the framework.
+ ///
+ ComposerOverlay = 1UL << 11,
+
+ ///
+ /// The buffer is protected from direct CPU access or being read by
+ /// non-secure hardware, such as video encoders.
+ ///
+ /// This flag is incompatible with CPU read and write flags. It is
+ /// mainly used when handling DRM video. Refer to the EGL extension
+ /// EGL_EXT_protected_content and GL extension
+ /// GL_EXT_protected_textures for more information on how these
+ /// buffers are expected to behave.
+ ///
+ ProtectedContent = 1UL << 14,
+
+ ///
+ /// The buffer will be read by a hardware video encoder.
+ ///
+ VideoEncode = 1UL << 16,
+
+ ///
+ /// The buffer will be used for direct writes from sensors.
+ /// When this flag is present, the format must be AHARDWAREBUFFER_FORMAT_BLOB.
+ ///
+ SensorDirectData = 1UL << 23,
+
+ ///
+ /// The buffer will be used as a shader storage or uniform buffer object.
+ /// When this flag is present, the format must be AHARDWAREBUFFER_FORMAT_BLOB.
+ ///
+ GpuDataBuffer = 1UL << 24,
+
+ ///
+ /// The buffer will be used as a cube map texture.
+ /// When this flag is present, the buffer must have a layer count
+ /// that is a multiple of 6. Note that buffers with this flag must be
+ /// bound to OpenGL textures using the extension
+ /// GL_EXT_EGL_image_storage instead of GL_KHR_EGL_image.
+ ///
+ GpuCubeMap = 1UL << 25,
+
+ ///
+ /// The buffer contains a complete mipmap hierarchy.
+ /// Note that buffers with this flag must be bound to OpenGL textures using
+ /// the extension GL_EXT_EGL_image_storage instead of GL_KHR_EGL_image.
+ ///
+ GpuMipmapComplete = 1UL << 26,
+
+ Vendor0 = 1UL << 28,
+ Vendor1 = 1UL << 29,
+ Vendor2 = 1UL << 30,
+ Vendor3 = 1UL << 31,
+ Vendor4 = 1UL << 48,
+ Vendor5 = 1UL << 49,
+ Vendor6 = 1UL << 50,
+ Vendor7 = 1UL << 51,
+ Vendor8 = 1UL << 52,
+ Vendor9 = 1UL << 53,
+ Vendor10 = 1UL << 54,
+ Vendor11 = 1UL << 55,
+ Vendor12 = 1UL << 56,
+ Vendor13 = 1UL << 57,
+ Vendor14 = 1UL << 58,
+ Vendor15 = 1UL << 59,
+ Vendor16 = 1UL << 60,
+ Vendor17 = 1UL << 61,
+ Vendor18 = 1UL << 62,
+ Vendor19 = 1UL << 63,
+ }
+
+ private IntPtr value;
+
+ public Description description
+ {
+ get
+ {
+ AHardwareBuffer_describe(this, out var desc);
+ return desc;
+ }
+ }
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public void Acquire()
+ {
+ this.CheckNullAndThrow();
+ AHardwareBuffer_acquire(this);
+ }
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ ReleaseUnchecked();
+
+ value = IntPtr.Zero;
+ }
+
+ public static bool IsSupported(Description description)
+ => AHardwareBuffer_isSupported(ref description) == 1;
+
+ public static bool TryAllocate(Description description, out AHardwareBuffer buffer)
+ => AHardwareBuffer_allocate(ref description, out buffer) == 0;
+
+ public unsafe bool TryLockData(Usage usageFlags, out void* outAddress)
+ {
+ this.CheckNullAndThrow();
+ CheckMultiPlanarAndThrow(false);
+
+ var ret = AHardwareBuffer_lock(this, usageFlags, -1, null, out outAddress);
+
+ return CheckResultAndThrow(ret);
+ }
+
+ public bool TryLockPlaneData(Usage usageFlags, out Planes planes)
+ {
+ this.CheckNullAndThrow();
+ CheckMultiPlanarAndThrow();
+
+ int ret;
+
+ unsafe
+ {
+ ret = AHardwareBuffer_lockPlanes(this, usageFlags, -1, null, out planes);
+ }
+
+ return CheckResultAndThrow(ret);
+ }
+
+ public bool TryUnlockData()
+ {
+ this.CheckNullAndThrow();
+
+ int ret;
+
+ unsafe
+ {
+ ret = AHardwareBuffer_unlock(this, null);
+ }
+
+ return CheckResultAndThrow(ret);
+ }
+
+ internal void ReleaseUnchecked()
+ => AHardwareBuffer_release(this);
+
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ internal void CheckMultiPlanarAndThrow(bool expectedMultiPlanar = true)
+ {
+ var valid = expectedMultiPlanar == description.HasMultiplePlanes;
+
+ if (valid)
+ return;
+
+ if (expectedMultiPlanar)
+ throw new InvalidOperationException(
+ $"Cannot perform a multi-planar operation on a buffer with a single plane");
+ else
+ throw new InvalidOperationException(
+ $"Cannot perform a single planar operation on a buffer with multiple planes");
+
+ }
+
+ private bool CheckResultAndThrow(int actual, int expected = 0)
+ {
+ bool success = expected == actual;
+#if DEVELOPMENT_BUILD
+ if (!success)
+ throw new Exception($"expected result: {expected}, actual result was: {actual}");
+#endif
+ return success;
+ }
+
+ }
+
+ public static class AHardwareBufferExtensions
+ {
+ public static void Release(ref this AHardwareBuffer buffer)
+ {
+ buffer.CheckNullAndThrow();
+ buffer.ReleaseUnchecked();
+ }
+
+ public static int BytesPerPixel(this AHardwareBuffer.Format format)
+ {
+ switch (format)
+ {
+ case AHardwareBuffer.Format.R8G8B8A8UNorm:
+ return 4;
+ case AHardwareBuffer.Format.R8G8B8X8UNorm:
+ return 4;
+ case AHardwareBuffer.Format.R8G8B8UNorm:
+ return 3;
+ case AHardwareBuffer.Format.R5G6B5UNorm:
+ return 2;
+ case AHardwareBuffer.Format.R16G16B16A16Float:
+ return 8;
+ case AHardwareBuffer.Format.R10G10B10A2UNorm:
+ return 4;
+ case AHardwareBuffer.Format.Blob:
+ return 1;
+ case AHardwareBuffer.Format.D16UNorm:
+ return 2;
+ case AHardwareBuffer.Format.D24UNorm:
+ return 3;
+ case AHardwareBuffer.Format.D24UNormS8UInt:
+ return 4;
+ case AHardwareBuffer.Format.D32Float:
+ return 4;
+ case AHardwareBuffer.Format.D32FloatS8UInt:
+ return 5;
+ case AHardwareBuffer.Format.S8UInt:
+ return 1;
+ case AHardwareBuffer.Format.Y8Cb8Cr8_420:
+ return 1;
+ case AHardwareBuffer.Format.YCbCrP010:
+ return 2;
+ case AHardwareBuffer.Format.R8UNorm:
+ return 1;
+ default:
+ throw new ArgumentOutOfRangeException(nameof(format), format, null);
+ }
+ }
+
+ internal static MediaFormat ToMediaFormat(this AHardwareBuffer.Format format)
+ {
+ switch (format)
+ {
+ case AHardwareBuffer.Format.R8G8B8A8UNorm:
+ return MediaFormat.Rgba8888;
+ case AHardwareBuffer.Format.R8G8B8X8UNorm:
+ return MediaFormat.Rgbx8888;
+ case AHardwareBuffer.Format.R8G8B8UNorm:
+ return MediaFormat.Rgb888;
+ case AHardwareBuffer.Format.R5G6B5UNorm:
+ return MediaFormat.Rgb565;
+ case AHardwareBuffer.Format.R16G16B16A16Float:
+ return MediaFormat.Rgba_Fp16;
+ case AHardwareBuffer.Format.D16UNorm:
+ return MediaFormat.Depth16;
+ case AHardwareBuffer.Format.Y8Cb8Cr8_420:
+ return MediaFormat.Yuv_420_888;
+ // AHardwareBuffer formats with no MediaFormat equivalent.
+ case AHardwareBuffer.Format.YCbCrP010:
+ case AHardwareBuffer.Format.R8UNorm:
+ case AHardwareBuffer.Format.S8UInt:
+ case AHardwareBuffer.Format.D32FloatS8UInt:
+ case AHardwareBuffer.Format.D32Float:
+ case AHardwareBuffer.Format.D24UNormS8UInt:
+ case AHardwareBuffer.Format.D24UNorm:
+ case AHardwareBuffer.Format.Blob:
+ case AHardwareBuffer.Format.R10G10B10A2UNorm:
+ default:
+ throw new ArgumentOutOfRangeException(nameof(format), format, null);
+ }
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/AHardwareBuffer.cs.meta b/Runtime/APIs/AndroidCamera/NDK/AHardwareBuffer.cs.meta
new file mode 100644
index 0000000..285467a
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/AHardwareBuffer.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 2f84e58a8e854ed1b15ec3d136b3ac99
+timeCreated: 1702942261
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/AImage.cs b/Runtime/APIs/AndroidCamera/NDK/AImage.cs
new file mode 100644
index 0000000..db98251
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/AImage.cs
@@ -0,0 +1,140 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Media
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.CompilerServices;
+ using Unity.Collections;
+ using Unity.Collections.LowLevel.Unsafe;
+ using NativeWindow;
+
+ using static MediaNativeBindings;
+
+ public struct AImage : INullablePointer
+ {
+
+ private IntPtr value;
+
+ public ARect CropRect
+ {
+ get
+ {
+ var result = AImage_getCropRect(this, out var cropRect);
+ result.CheckReturnValueAndThrow();
+ return cropRect;
+ }
+ }
+
+ public MediaFormat Format
+ {
+ get
+ {
+ var result = AImage_getFormat(this, out var format);
+ result.CheckReturnValueAndThrow();
+ return format;
+ }
+ }
+
+ public AHardwareBuffer HardwareBuffer
+ {
+ get
+ {
+ var result = AImage_getHardwareBuffer(this, out var buffer);
+ result.CheckReturnValueAndThrow();
+ return buffer;
+ }
+ }
+
+ public int Height
+ {
+ get
+ {
+ var result = AImage_getHeight(this, out var height);
+ result.CheckReturnValueAndThrow();
+ return height;
+ }
+ }
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public int NumberOfPlanes
+ {
+ get
+ {
+ var result = AImage_getNumberOfPlanes(this, out var numPlanes);
+ result.CheckReturnValueAndThrow();
+ return numPlanes;
+ }
+ }
+
+ public long TimestampInNanoseconds
+ {
+ get
+ {
+ var result = AImage_getTimestamp(this, out var timestampNs);
+ result.CheckReturnValueAndThrow();
+ return timestampNs;
+ }
+ }
+
+ public int Width
+ {
+ get
+ {
+ var result = AImage_getWidth(this, out var width);
+ result.CheckReturnValueAndThrow();
+ return width;
+ }
+ }
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ AImage_delete(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public unsafe bool TryGetPlaneData(int planeIdx, out int dataLength, out int pixelStride, out int rowStride, out byte* data)
+ {
+ this.CheckNullAndThrow();
+ CheckPlaneIndexAndThrow(planeIdx);
+
+ dataLength = 0;
+ pixelStride = 0;
+ rowStride = 0;
+ data = null;
+
+ var result = AImage_getPlanePixelStride(this, planeIdx, out pixelStride);
+ result.CheckReturnValueAndThrow();
+ if (result != MediaStatus.Ok)
+ return false;
+
+ result = AImage_getPlaneRowStride(this, planeIdx, out rowStride);
+ result.CheckReturnValueAndThrow();
+ if (result != MediaStatus.Ok)
+ return false;
+
+ result = AImage_getPlaneData(this, planeIdx, out data, out dataLength);
+ result.CheckReturnValueAndThrow();
+ return result == MediaStatus.Ok;
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ private void CheckPlaneIndexAndThrow(int planeIdx)
+ {
+ var numPlanes = NumberOfPlanes;
+ if (planeIdx < 0 || planeIdx >= numPlanes)
+ throw new IndexOutOfRangeException($"plane index must be between 0 and {numPlanes - 1}, inclusive");
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/AImage.cs.meta b/Runtime/APIs/AndroidCamera/NDK/AImage.cs.meta
new file mode 100644
index 0000000..e7444c0
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/AImage.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: f47e36a8c4e34f3db7837000789d5b65
+timeCreated: 1703012148
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/AImageReader.cs b/Runtime/APIs/AndroidCamera/NDK/AImageReader.cs
new file mode 100644
index 0000000..62f01b7
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/AImageReader.cs
@@ -0,0 +1,135 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Media
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.InteropServices;
+ using NativeWindow;
+
+ using static MediaNativeBindings;
+
+ public struct AImageReader : INullablePointer
+ {
+ public struct ImageListener
+ {
+ private IntPtr context;
+ private IntPtr onImageAvailable;
+
+ public bool IsCreated => onImageAvailable != IntPtr.Zero;
+
+ public static ImageListener Create(Action onImageAvailable, IntPtr context = default)
+ {
+ if (onImageAvailable == null)
+ throw new ArgumentNullException(nameof(onImageAvailable));
+
+ return new ImageListener
+ {
+ context = context,
+ onImageAvailable = Marshal.GetFunctionPointerForDelegate(onImageAvailable),
+ };
+ }
+ }
+
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ public MediaFormat Format
+ {
+ get
+ {
+ var result = AImageReader_getFormat(this, out var format);
+ result.CheckReturnValueAndThrow();
+ return format;
+ }
+ }
+
+ public int Height
+ {
+ get
+ {
+ var result = AImageReader_getHeight(this, out var height);
+ result.CheckReturnValueAndThrow();
+ return height;
+ }
+ }
+
+ public int MaxImages
+ {
+ get
+ {
+ var result = AImageReader_getMaxImages(this, out var maxImages);
+ result.CheckReturnValueAndThrow();
+ return maxImages;
+ }
+ }
+
+ public int Width
+ {
+ get
+ {
+ var result = AImageReader_getWidth(this, out var width);
+ result.CheckReturnValueAndThrow();
+ return width;
+ }
+ }
+
+ public ANativeWindow Window
+ {
+ get
+ {
+ var result = AImageReader_getWindow(this, out var window);
+ result.CheckReturnValueAndThrow();
+ return window;
+ }
+ }
+
+ public void Dispose()
+ {
+ if (!IsNull)
+ AImageReader_delete(this);
+
+ value = IntPtr.Zero;
+ }
+
+ public bool TryAcquireLatestImage(out AImage outImage)
+ {
+ this.CheckNullAndThrow();
+ return AImageReader_acquireLatestImage(this, out outImage) == MediaStatus.Ok;
+ }
+
+ public bool TryAcquireNextImage(out AImage outImage)
+ {
+ this.CheckNullAndThrow();
+ return AImageReader_acquireNextImage(this, out outImage) == MediaStatus.Ok;
+ }
+
+ public static bool TryCreate(int width, int height, MediaFormat format, int maxImages,
+ out AImageReader reader)
+ {
+ var result = AImageReader_new(width, height, format, maxImages, out reader);
+ return result == MediaStatus.Ok;
+ }
+
+ public static bool TryCreate(int width, int height, MediaFormat format, AHardwareBuffer.Usage usage, int maxImages, out AImageReader reader)
+ => AImageReader_newWithUsage(width, height, format, usage, maxImages, out reader) == MediaStatus.Ok;
+
+ public bool TrySetImageListener(ImageListener listener)
+ {
+ this.CheckNullAndThrow();
+ if (!listener.IsCreated)
+ return false;
+
+ return AImageReader_setImageListener(this, ref listener) == MediaStatus.Ok;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/AImageReader.cs.meta b/Runtime/APIs/AndroidCamera/NDK/AImageReader.cs.meta
new file mode 100644
index 0000000..c939888
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/AImageReader.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 2678439076b7433abab300d4ba1c9656
+timeCreated: 1702937290
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/ANativeWindow.cs b/Runtime/APIs/AndroidCamera/NDK/ANativeWindow.cs
new file mode 100644
index 0000000..9044d8d
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ANativeWindow.cs
@@ -0,0 +1,23 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.NativeWindow
+{
+ using System;
+
+ public struct ANativeWindow : INullablePointer
+ {
+ private IntPtr value;
+
+ public bool IsNull => value == IntPtr.Zero;
+
+ void IDisposable.Dispose() {}
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/ANativeWindow.cs.meta b/Runtime/APIs/AndroidCamera/NDK/ANativeWindow.cs.meta
new file mode 100644
index 0000000..4ed00f4
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/ANativeWindow.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: e117b549bccf41e89ed08a3a038714bb
+timeCreated: 1702941335
\ No newline at end of file
diff --git a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferencesProvider.cs b/Runtime/APIs/AndroidCamera/NDK/CameraConstants.cs
similarity index 62%
rename from Editor/SettingsProviders/Preferences/MagicLeapEditorPreferencesProvider.cs
rename to Runtime/APIs/AndroidCamera/NDK/CameraConstants.cs
index 327d1ed..66701b1 100644
--- a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferencesProvider.cs
+++ b/Runtime/APIs/AndroidCamera/NDK/CameraConstants.cs
@@ -1,20 +1,19 @@
-// %BANNER_BEGIN%
+// %BANNER_BEGIN%
// ---------------------------------------------------------------------
// %COPYRIGHT_BEGIN%
-// Copyright (c) 2022 Magic Leap, Inc. All Rights Reserved.
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
// %COPYRIGHT_END%
// ---------------------------------------------------------------------
// %BANNER_END%
-using System;
-
-namespace UnityEditor.XR.MagicLeap
+namespace MagicLeap.Android.NDK.Camera
{
- public static class MagicLeapEditorPreferencesProvider
+ internal static class CameraConstants
{
- public static Action OnSDKPathChanged = null;
- public static Action OnZIPathChanged = null;
+ public const string kCameraLibrary = "libcamera2ndk";
+ public const int kErrorBase = -10000;
+
}
}
diff --git a/Runtime/APIs/AndroidCamera/NDK/CameraConstants.cs.meta b/Runtime/APIs/AndroidCamera/NDK/CameraConstants.cs.meta
new file mode 100644
index 0000000..135d9eb
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/CameraConstants.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 73a57973ceca4640b6c56016872460fc
+timeCreated: 1702929311
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/CameraNativeBindings.cs b/Runtime/APIs/AndroidCamera/NDK/CameraNativeBindings.cs
new file mode 100644
index 0000000..b9089e0
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/CameraNativeBindings.cs
@@ -0,0 +1,190 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System.Runtime.InteropServices;
+ using NativeWindow;
+
+ using static CameraConstants;
+
+ internal static class CameraNativeBindings
+ {
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACameraCaptureSession_capture(ACameraCaptureSession session,
+ ACameraCaptureSession.CaptureCallbacks* callbacks, int numRequests, ACaptureRequest* requests,
+ out int outCaptureSequenceId);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACameraCaptureSession_close(ACameraCaptureSession session);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraCaptureSession_getDevice(ACameraCaptureSession session,
+ out ACameraDevice outDevice);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACameraCaptureSession_setRepeatingRequest(ACameraCaptureSession session,
+ ACameraCaptureSession.CaptureCallbacks* callbacks, int numRequests, ACaptureRequest* requests,
+ out int outSequenceId);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraCaptureSession_stopRepeating(ACameraCaptureSession session);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraDevice_close(ACameraDevice camera);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraDevice_createCaptureRequest(ACameraDevice device,
+ ACaptureRequest.Template template, out ACaptureRequest outRequest);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACameraDevice_createCaptureSession(ACameraDevice device,
+ ACaptureSessionOutputContainer container, ACameraCaptureSession.StateCallbacks* callbacks,
+ out ACameraCaptureSession session);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe byte* ACameraDevice_getId(ACameraDevice camera);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern ACameraManager ACameraManager_create();
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACameraManager_delete(ACameraManager manager);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACameraManager_deleteCameraIdList(ACameraIdList list);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
+ public static extern CameraStatus ACameraManager_getCameraCharacteristics(ACameraManager manager,
+ string cameraId, out ACameraMetadata metadata);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl,
+ EntryPoint = "ACameraManager_getCameraCharacteristics")]
+ public static extern unsafe CameraStatus ACameraManager_getCameraCharacteristicsNonAlloc(ACameraManager manager,
+ byte* cameraId, out ACameraMetadata metadata);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraManager_getCameraIdList(ACameraManager manager, out ACameraIdList list);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Ansi)]
+ public static extern unsafe CameraStatus ACameraManager_openCamera(ACameraManager manager, string cameraId,
+ ACameraDevice.StateCallbacks* callbacks, out ACameraDevice camera);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl, EntryPoint = "ACameraManager_openCamera")]
+ public static extern unsafe CameraStatus ACameraManager_openCameraNonAlloc(ACameraManager manager, byte* cameraId,
+ ACameraDevice.StateCallbacks* callbacks, out ACameraDevice camera);
+
+ // NB:
+ // The following two signatures use 'ref' for the callback parameter instead of a pointer. The reasoning behind
+ // this is that in other signatures, the callback parameter is optional, thus a bare pointer is required.
+ // In this case, however, the call itself is rather nonsensical without a valid callbacks pointer, so we can
+ // somewhat enforce that at the C# level by using a 'ref' param (which translates to a pointer on the ABI level anyways).
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraManager_registerAvailabilityCallback(ACameraManager manager,
+ ref ACameraManager.AvailabilityCallbacks callbacks);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraManager_unregisterAvailabilityCallback(ACameraManager manager,
+ ref ACameraManager.AvailabilityCallbacks callbacks);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraMetadata_getConstEntry(ACameraMetadata metadata, uint tag,
+ out ACameraMetadata.Entry.ReadOnly outEntry);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACameraMetadata_getAllTags(ACameraMetadata metadata, out int outNumEntries,
+ out uint* outTags);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern ACameraMetadata ACameraMetadata_copy(ACameraMetadata metadata);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACameraMetadata_free(ACameraMetadata metadata);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACameraOutputTarget_create(ANativeWindow window,
+ out ACameraOutputTarget outTarget);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACameraOutputTarget_free(ACameraOutputTarget target);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACaptureRequest_addTarget(ACaptureRequest request, ACameraOutputTarget target);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACaptureRequest_free(ACaptureRequest request);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_getAllTags(ACaptureRequest request, ref int numTags,
+ uint** tags);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACaptureRequest_getConstEntry(ACaptureRequest request, uint tag,
+ out ACameraMetadata.Entry.ReadOnly outEntry);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_getUserContext(ACaptureRequest request,
+ out System.IntPtr context);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACaptureRequest_removeTarget(ACaptureRequest request,
+ ACameraOutputTarget target);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_setEntry_u8(ACaptureRequest request, uint tag,
+ uint count, byte* data);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_setEntry_i32(ACaptureRequest request, uint tag,
+ uint count, int* data);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_setEntry_float(ACaptureRequest request, uint tag,
+ uint count, float* data);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_setEntry_i64(ACaptureRequest request, uint tag,
+ uint count, long* data);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_setEntry_double(ACaptureRequest request, uint tag,
+ uint count, double* data);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_setEntry_rational(ACaptureRequest request, uint tag,
+ uint count, ACameraMetadata.Rational* data);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe CameraStatus ACaptureRequest_setUserContext(ACaptureRequest request,
+ System.IntPtr context);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACaptureSessionOutput_create(ANativeWindow window,
+ out ACaptureSessionOutput output);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACaptureSessionOutput_free(ACaptureSessionOutput output);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACaptureSessionOutputContainer_add(
+ ACaptureSessionOutputContainer container, ACaptureSessionOutput output);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACaptureSessionOutputContainer_create(
+ out ACaptureSessionOutputContainer outContainer);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void ACaptureSessionOutputContainer_free(ACaptureSessionOutputContainer container);
+
+ [DllImport(kCameraLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern CameraStatus ACaptureSessionOutputContainer_remove(
+ ACaptureSessionOutputContainer container, ACaptureSessionOutput output);
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/CameraNativeBindings.cs.meta b/Runtime/APIs/AndroidCamera/NDK/CameraNativeBindings.cs.meta
new file mode 100644
index 0000000..bf7ae96
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/CameraNativeBindings.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 91049dd67eb7415ba358a953d1dd1bf6
+timeCreated: 1703033237
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/CameraStatus.cs b/Runtime/APIs/AndroidCamera/NDK/CameraStatus.cs
new file mode 100644
index 0000000..a3aeb77
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/CameraStatus.cs
@@ -0,0 +1,91 @@
+namespace MagicLeap.Android.NDK.Camera
+{
+ using System;
+ using System.Diagnostics;
+
+ using static CameraConstants;
+
+ internal enum CameraStatus
+ {
+ ///
+ /// Camera operation has succeeded.
+ ///
+ Ok = 0,
+ ///
+ /// Camera operation has failed due to an unspecified cause.
+ ///
+ UnknownError = kErrorBase,
+ ///
+ /// Camera operation has failed due to an invalid parameter being passed to the method.
+ ///
+ InvalidParameter = kErrorBase - 1,
+ ///
+ /// Camera operation has failed because the camera device has been closed,
+ /// possibly because a higher-priority client has taken ownership of the camera device.
+ ///
+ CameraDisconnected = kErrorBase - 2,
+ ///
+ /// Camera operation has failed due to insufficient memory.
+ ///
+ NotEnoughMemory = kErrorBase - 3,
+ ///
+ /// Camera operation has failed due to the requested metadata tag cannot be found
+ /// in input {@link ACameraMetadata} or {@link ACaptureRequest}.
+ ///
+ MetadataNotFound = kErrorBase - 4,
+ ///
+ /// Camera operation has failed and the camera device has encountered a fatal error
+ /// and needs to be re-opened before it can be used again.
+ ///
+ FatalDeviceError = kErrorBase - 5,
+ ///
+ /// Camera operation has failed and the camera service has encountered a fatal error.
+ ///
The Android device may need to be shut down and restarted to restore camera function, or there may be a persistent hardware problem.
+ ///
An attempt at recovery may be possible by closing the
+ /// ACameraDevice and the ACameraManager, and trying to acquire all resources again from scratch.
+ ///
+ FatalServiceError = kErrorBase - 6,
+ ///
+ /// The {@link ACameraCaptureSession} has been closed and cannnot perform any operation other than {@link ACameraCaptureSession_close}.
+ ///
+ SessionClosed = kErrorBase - 7,
+ ///
+ /// Camera operation has failed due to an invalid internal operation. Usually this is due to a low-level problem that may resolve itself on retry
+ ///
+ InvalidOperation = kErrorBase - 8,
+ ///
+ /// Camera device does not support the stream configuration provided by application in {@link ACameraDevice_createCaptureSession} or {@link ACameraDevice_isSessionConfigurationSupported}.
+ ///
+ StreamConfigurationFailure = kErrorBase - 9,
+ ///
+ /// Camera device is being used by another higher priority camera API client.
+ ///
+ CameraInUse = kErrorBase - 10,
+ ///
+ /// The system-wide limit for number of open cameras or camera resources has been reached, and more camera devices cannot be opened until previous instances are closed.
+ ///
+ MaximumCamerasInUse = kErrorBase - 11,
+ ///
+ /// The camera is disabled due to a device policy, and cannot be opened.
+ ///
+ CameraDisabled = kErrorBase - 12,
+ ///
+ /// The application does not have permission to open camera.
+ ///
+ PermissionDenied = kErrorBase - 13,
+ ///
+ /// The operation is not supported by the camera device.
+ ///
+ UnsupportedOperation = kErrorBase - 14,
+ }
+
+ internal static class CameraStatusExtensions
+ {
+ [Conditional("DEVELOPMENT_BUILD")]
+ public static void CheckReturnValueAndThrow(this CameraStatus actual, CameraStatus expected = CameraStatus.Ok)
+ {
+ if (expected != actual)
+ throw new Exception($"Unexpected result from native call. Expected: '{expected}', Actual: '{actual}'");
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/CameraStatus.cs.meta b/Runtime/APIs/AndroidCamera/NDK/CameraStatus.cs.meta
new file mode 100644
index 0000000..96799a1
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/CameraStatus.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: be5efa23af8847358111c150f3c61932
+timeCreated: 1702929576
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/INullablePointer.cs b/Runtime/APIs/AndroidCamera/NDK/INullablePointer.cs
new file mode 100644
index 0000000..8fdac2c
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/INullablePointer.cs
@@ -0,0 +1,35 @@
+namespace MagicLeap
+{
+ using System;
+ using System.Diagnostics;
+
+ public interface INullablePointer : IDisposable
+ {
+ bool IsNull { get; }
+ }
+
+ internal interface IReferenceCountedPointer
+ {
+ void Acquire();
+ void Release();
+ }
+
+ public static class NullablePointerExtensions
+ {
+ [Conditional("DEVELOPMENT_BUILD"), Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ public static void CheckNullAndThrow(this T self) where T : unmanaged, INullablePointer
+ {
+ if (self.IsNull)
+ throw new NullReferenceException();
+ }
+ }
+
+ internal static class ReferenceCounterPtrExtensions
+ {
+ public static T Clone(this T self) where T : unmanaged, IReferenceCountedPointer
+ {
+ self.Acquire();
+ return self;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/INullablePointer.cs.meta b/Runtime/APIs/AndroidCamera/NDK/INullablePointer.cs.meta
new file mode 100644
index 0000000..ea1186c
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/INullablePointer.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 1a41064e7e114146a450781f3eef0e0e
+timeCreated: 1703190315
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaConstants.cs b/Runtime/APIs/AndroidCamera/NDK/MediaConstants.cs
new file mode 100644
index 0000000..e748391
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaConstants.cs
@@ -0,0 +1,10 @@
+namespace MagicLeap.Android.NDK.Media
+{
+ internal static class MediaConstants
+ {
+ public const int kMediaErrorBase = -10000;
+ public const int kMediaDRMErrorBase = -20000;
+ public const int kMediaImageReaderErrorBase = -30000;
+ public const string kMediaLibrary = "libmediandk";
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaConstants.cs.meta b/Runtime/APIs/AndroidCamera/NDK/MediaConstants.cs.meta
new file mode 100644
index 0000000..25c80e3
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaConstants.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 0da7c5e4b5f44fe58b9712baf97fed7b
+timeCreated: 1702937421
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaFormat.cs b/Runtime/APIs/AndroidCamera/NDK/MediaFormat.cs
new file mode 100644
index 0000000..0c42f15
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaFormat.cs
@@ -0,0 +1,565 @@
+using System;
+
+namespace MagicLeap.Android.NDK.Media
+{
+ public enum MediaFormat
+ {
+ ///
+ /// 32 bits RGBA format, 8 bits for each of the four channels.
+ ///
+ ///
+ ///
+ ///
+ /// @see AImage
+ /// @see AImageReader
+ /// @see AHardwareBuffer
+ ///
+ Rgba8888 = 0x1,
+
+ ///
+ /// 32 bits RGBX format, 8 bits for each of the four channels. The values
+ /// of the alpha channel bits are ignored (image is assumed to be opaque).
+ ///
+ ///
This format is a generic YCbCr format, capable of describing any 4:2:0
+ /// chroma-subsampled planar or semiplanar buffer (but not fully interleaved),
+ /// with 8 bits per color sample.
+ ///
+ ///
Images in this format are always represented by three separate buffers
+ /// of data, one for each color plane. Additional information always
+ /// accompanies the buffers, describing the row stride and the pixel stride
+ /// for each plane.
+ ///
+ ///
The order of planes is guaranteed such that plane #0 is always Y, plane #1 is always
+ /// U (Cb), and plane #2 is always V (Cr).
+ ///
+ ///
The Y-plane is guaranteed not to be interleaved with the U/V planes
+ /// (in particular, pixel stride is always 1 in {@link AImage_getPlanePixelStride}).
+ ///
+ ///
The U/V planes are guaranteed to have the same row stride and pixel stride, that is, the
+ /// return value of {@link AImage_getPlaneRowStride} for the U/V plane are guaranteed to be the
+ /// same, and the return value of {@link AImage_getPlanePixelStride} for the U/V plane are also
+ /// guaranteed to be the same.
+ ///
+ ///
For example, the {@link AImage} object can provide data
+ /// in this format from a {@link ACameraDevice} through an {@link AImageReader} object.
+ ///
+ ///
This format is always supported as an output format for the android Camera2 NDK API.
This format is always supported as an output format for the android Camera2 NDK API.
+ ///
+ Jpeg = 0x100,
+
+ ///
+ /// 16 bits per pixel raw camera sensor image format, usually representing a single-channel
+ /// Bayer-mosaic image.
+ ///
+ ///
The layout of the color mosaic, the maximum and minimum encoding
+ /// values of the raw pixel data, the color space of the image, and all other
+ /// needed information to interpret a raw sensor image must be queried from
+ /// the {@link ACameraDevice} which produced the image.
+ ///
+ Raw16 = 0x20,
+
+ ///
+ /// Private raw camera sensor image format, a single channel image with implementation depedent
+ /// pixel layout.
+ ///
+ ///
AIMAGE_FORMAT_RAW_PRIVATE is a format for unprocessed raw image buffers coming from an
+ /// image sensor. The actual structure of buffers of this format is implementation-dependent.
+ /// This is a single-plane, 10-bit per pixel, densely packed (in each row),
+ /// unprocessed format, usually representing raw Bayer-pattern images coming
+ /// from an image sensor.
+ ///
+ ///
+ /// In an image buffer with this format, starting from the first pixel of
+ /// each row, each 4 consecutive pixels are packed into 5 bytes (40 bits).
+ /// Each one of the first 4 bytes contains the top 8 bits of each pixel, The
+ /// fifth byte contains the 2 least significant bits of the 4 pixels, the
+ /// exact layout data for each 4 consecutive pixels is illustrated below
+ /// (Pi[j] stands for the jth bit of the ith pixel):
+ ///
+ ///
+ ///
+ ///
+ ///
bit 7
+ ///
bit 6
+ ///
bit 5
+ ///
bit 4
+ ///
bit 3
+ ///
bit 2
+ ///
bit 1
+ ///
bit 0
+ ///
+ ///
+ ///
Byte 0:
+ ///
P0[9]
+ ///
P0[8]
+ ///
P0[7]
+ ///
P0[6]
+ ///
P0[5]
+ ///
P0[4]
+ ///
P0[3]
+ ///
P0[2]
+ ///
+ ///
+ ///
Byte 1:
+ ///
P1[9]
+ ///
P1[8]
+ ///
P1[7]
+ ///
P1[6]
+ ///
P1[5]
+ ///
P1[4]
+ ///
P1[3]
+ ///
P1[2]
+ ///
+ ///
+ ///
Byte 2:
+ ///
P2[9]
+ ///
P2[8]
+ ///
P2[7]
+ ///
P2[6]
+ ///
P2[5]
+ ///
P2[4]
+ ///
P2[3]
+ ///
P2[2]
+ ///
+ ///
+ ///
Byte 3:
+ ///
P3[9]
+ ///
P3[8]
+ ///
P3[7]
+ ///
P3[6]
+ ///
P3[5]
+ ///
P3[4]
+ ///
P3[3]
+ ///
P3[2]
+ ///
+ ///
+ ///
Byte 4:
+ ///
P3[1]
+ ///
P3[0]
+ ///
P2[1]
+ ///
P2[0]
+ ///
P1[1]
+ ///
P1[0]
+ ///
P0[1]
+ ///
P0[0]
+ ///
+ ///
+ ///
+ /// This format assumes
+ ///
+ ///
a width multiple of 4 pixels
+ ///
an even height
+ ///
+ ///
+ ///
+ ///
size = row stride * height
where the row stride is in bytes,
+ /// not pixels.
+ ///
+ ///
+ /// Since this is a densely packed format, the pixel stride is always 0. The
+ /// application must use the pixel data layout defined in above table to
+ /// access each row data. When row stride is equal to (width * (10 / 8)), there
+ /// will be no padding bytes at the end of each row, the entire image data is
+ /// densely packed. When stride is larger than (width * (10 / 8)), padding
+ /// bytes will be present at the end of each row.
+ ///
+ ///
+ /// For example, the {@link AImage} object can provide data in this format from a
+ /// {@link ACameraDevice} (if supported) through a {@link AImageReader} object.
+ /// The number of planes returned by {@link AImage_getNumberOfPlanes} will always be 1.
+ /// The pixel stride is undefined ({@link AImage_getPlanePixelStride} will return
+ /// {@link AMEDIA_ERROR_UNSUPPORTED}), and the {@link AImage_getPlaneRowStride} described the
+ /// vertical neighboring pixel distance (in bytes) between adjacent rows.
+ ///
+ /// This is a single-plane, 12-bit per pixel, densely packed (in each row),
+ /// unprocessed format, usually representing raw Bayer-pattern images coming
+ /// from an image sensor.
+ ///
+ ///
+ /// In an image buffer with this format, starting from the first pixel of each
+ /// row, each two consecutive pixels are packed into 3 bytes (24 bits). The first
+ /// and second byte contains the top 8 bits of first and second pixel. The third
+ /// byte contains the 4 least significant bits of the two pixels, the exact layout
+ /// data for each two consecutive pixels is illustrated below (Pi[j] stands for
+ /// the jth bit of the ith pixel):
+ ///
+ ///
+ ///
+ ///
+ ///
bit 7
+ ///
bit 6
+ ///
bit 5
+ ///
bit 4
+ ///
bit 3
+ ///
bit 2
+ ///
bit 1
+ ///
bit 0
+ ///
+ ///
+ ///
Byte 0:
+ ///
P0[11]
+ ///
P0[10]
+ ///
P0[ 9]
+ ///
P0[ 8]
+ ///
P0[ 7]
+ ///
P0[ 6]
+ ///
P0[ 5]
+ ///
P0[ 4]
+ ///
+ ///
+ ///
Byte 1:
+ ///
P1[11]
+ ///
P1[10]
+ ///
P1[ 9]
+ ///
P1[ 8]
+ ///
P1[ 7]
+ ///
P1[ 6]
+ ///
P1[ 5]
+ ///
P1[ 4]
+ ///
+ ///
+ ///
Byte 2:
+ ///
P1[ 3]
+ ///
P1[ 2]
+ ///
P1[ 1]
+ ///
P1[ 0]
+ ///
P0[ 3]
+ ///
P0[ 2]
+ ///
P0[ 1]
+ ///
P0[ 0]
+ ///
+ ///
+ ///
+ /// This format assumes
+ ///
+ ///
a width multiple of 4 pixels
+ ///
an even height
+ ///
+ ///
+ ///
+ ///
size = row stride * height
where the row stride is in bytes,
+ /// not pixels.
+ ///
+ ///
+ /// Since this is a densely packed format, the pixel stride is always 0. The
+ /// application must use the pixel data layout defined in above table to
+ /// access each row data. When row stride is equal to (width * (12 / 8)), there
+ /// will be no padding bytes at the end of each row, the entire image data is
+ /// densely packed. When stride is larger than (width * (12 / 8)), padding
+ /// bytes will be present at the end of each row.
+ ///
+ ///
+ /// For example, the {@link AImage} object can provide data in this format from a
+ /// {@link ACameraDevice} (if supported) through a {@link AImageReader} object.
+ /// The number of planes returned by {@link AImage_getNumberOfPlanes} will always be 1.
+ /// The pixel stride is undefined ({@link AImage_getPlanePixelStride} will return
+ /// {@link AMEDIA_ERROR_UNSUPPORTED}), and the {@link AImage_getPlaneRowStride} described the
+ /// vertical neighboring pixel distance (in bytes) between adjacent rows.
+ ///
Each pixel is 16 bits, representing a depth ranging measurement from a depth camera or
+ /// similar sensor. The 16-bit sample consists of a confidence value and the actual ranging
+ /// measurement.
+ ///
+ ///
The confidence value is an estimate of correctness for this sample. It is encoded in the
+ /// 3 most significant bits of the sample, with a value of 0 representing 100% confidence, a
+ /// value of 1 representing 0% confidence, a value of 2 representing 1/7, a value of 3
+ /// representing 2/7, and so on.
+ ///
+ ///
As an example, the following sample extracts the range and confidence from the first pixel
+ /// of a DEPTH16-format {@link AImage}, and converts the confidence to a floating-point value
+ /// between 0 and 1.f inclusive, with 1.f representing maximum confidence:
+ ///
+ ///
+ ///
+ /// When produced by a camera, the units for the range are millimeters.
+ ///
+ Depth16 = 0x44363159,
+
+ ///
+ /// Android sparse depth point cloud format.
+ ///
+ ///
A variable-length list of 3D points plus a confidence value, with each point represented
+ /// by four floats; first the X, Y, Z position coordinates, and then the confidence value.
+ ///
+ ///
The number of points is ((size of the buffer in bytes) / 16).
+ ///
+ ///
The coordinate system and units of the position values depend on the source of the point
+ /// cloud data. The confidence value is between 0.f and 1.f, inclusive, with 0 representing 0%
+ /// confidence and 1.f representing 100% confidence in the measured position values.
+ ///
+ ///
As an example, the following code extracts the first depth point in a DEPTH_POINT_CLOUD
+ /// format {@link AImage}:
+ ///
+ /// float* data;
+ /// int dataLength;
+ /// AImage_getPlaneData(image, 0, (uint8_t**)&data, &dataLength);
+ /// float x = data[0];
+ /// float y = data[1];
+ /// float z = data[2];
+ /// float confidence = data[3];
+ ///
The choices of the actual format and pixel data layout are entirely up to the
+ /// device-specific and framework internal implementations, and may vary depending on use cases
+ /// even for the same device. Also note that the contents of these buffers are not directly
+ /// accessible to the application.
+ ///
+ ///
When an {@link AImage} of this format is obtained from an {@link AImageReader} or
+ /// {@link AImage_getNumberOfPlanes()} method will return zero.
Y8 is a planar format comprised of a WxH Y plane only, with each pixel
+ /// being represented by 8 bits.
+ ///
+ ///
This format assumes
+ ///
+ ///
an even width
+ ///
an even height
+ ///
a horizontal stride multiple of 16 pixels
+ ///
+ ///
+ ///
+ ///
size = stride * height
+ ///
+ ///
For example, the {@link AImage} object can provide data
+ /// in this format from a {@link ACameraDevice} (if supported) through a
+ /// {@link AImageReader} object. The number of planes returned by
+ /// {@link AImage_getNumberOfPlanes} will always be 1. The pixel stride returned by
+ /// {@link AImage_getPlanePixelStride} will always be 1, and the
+ /// {@link AImage_getPlaneRowStride} described the vertical neighboring pixel distance
+ /// (in bytes) between adjacent rows.
JPEG compressed main image along with XMP embedded depth metadata
+ /// following ISO 16684-1:2011(E).
+ ///
+ DepthJpeg = 0x69656963,
+ }
+
+ public static class MediaFormatExtensions
+ {
+ public static int BytesPerPixel(this MediaFormat format)
+ {
+ switch (format)
+ {
+ case MediaFormat.Rgba8888:
+ return 4;
+ case MediaFormat.Rgbx8888:
+ return 4;
+ case MediaFormat.Rgb888:
+ return 3;
+ case MediaFormat.Rgb565:
+ return 2;
+ case MediaFormat.Rgba_Fp16:
+ return 2;
+ case MediaFormat.Yuv_420_888:
+ return 1;
+ case MediaFormat.Jpeg:
+ return 1;
+ case MediaFormat.Raw16:
+ return 2;
+ case MediaFormat.RawPrivate:
+ return 1;
+ case MediaFormat.Raw10:
+ return 2;
+ case MediaFormat.Raw12:
+ return 2;
+ case MediaFormat.Depth16:
+ return 2;
+ case MediaFormat.DepthPointCloud:
+ return 16;
+ case MediaFormat.Private:
+ return 1;
+ case MediaFormat.Y8:
+ return 1;
+ case MediaFormat.Heic:
+ return 1;
+ case MediaFormat.DepthJpeg:
+ return 1;
+ default:
+ throw new ArgumentOutOfRangeException(nameof(format), format, null);
+ }
+ }
+
+ public static bool IsMultiPlanar(this MediaFormat format)
+ {
+ switch (format)
+ {
+ case MediaFormat.Y8:
+ case MediaFormat.Yuv_420_888:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ public static string ToNameOrHexValue(this MediaFormat format)
+ {
+ var name = Enum.GetName(typeof(MediaFormat), format);
+ return string.IsNullOrEmpty(name)
+ ? $"0x{(int)format:X}"
+ : name;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaFormat.cs.meta b/Runtime/APIs/AndroidCamera/NDK/MediaFormat.cs.meta
new file mode 100644
index 0000000..2b44afd
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaFormat.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 9961d49fb493493690c555b13aed1abb
+timeCreated: 1702938521
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaNativeBindings.cs b/Runtime/APIs/AndroidCamera/NDK/MediaNativeBindings.cs
new file mode 100644
index 0000000..12a0d9d
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaNativeBindings.cs
@@ -0,0 +1,78 @@
+using System.Runtime.InteropServices;
+using MagicLeap.Android.NDK.NativeWindow;
+
+namespace MagicLeap.Android.NDK.Media
+{
+ internal static class MediaNativeBindings
+ {
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void AImage_delete(AImage image);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getCropRect(AImage image, out ARect cropRect);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getFormat(AImage image, out MediaFormat format);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getHardwareBuffer(AImage image, out AHardwareBuffer buffer);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getHeight(AImage image, out int height);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getNumberOfPlanes(AImage image, out int numPlanes);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe MediaStatus AImage_getPlaneData(AImage image, int planeIdx, out byte* data,
+ out int dataLength);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getPlanePixelStride(AImage image, int planeIdx, out int pixelStride);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getPlaneRowStride(AImage image, int planeIdx, out int rowStride);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getTimestamp(AImage image, out long timestampNs);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImage_getWidth(AImage image, out int width);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_acquireLatestImage(AImageReader reader, out AImage image);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_acquireNextImage(AImageReader reader, out AImage image);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void AImageReader_delete(AImageReader reader);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_getFormat(AImageReader reader, out MediaFormat format);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_getHeight(AImageReader reader, out int height);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_getMaxImages(AImageReader reader, out int maxImages);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_getWidth(AImageReader reader, out int width);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_getWindow(AImageReader reader, out ANativeWindow window);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_new(int width, int height, MediaFormat format, int maxImages,
+ out AImageReader reader);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_newWithUsage(int width, int height, MediaFormat format,
+ AHardwareBuffer.Usage usage, int maxImages, out AImageReader reader);
+
+ [DllImport(MediaConstants.kMediaLibrary, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MediaStatus AImageReader_setImageListener(AImageReader reader,
+ ref AImageReader.ImageListener callback);
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaNativeBindings.cs.meta b/Runtime/APIs/AndroidCamera/NDK/MediaNativeBindings.cs.meta
new file mode 100644
index 0000000..c86491f
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaNativeBindings.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: db4394fb571f45de9a96e9572914d358
+timeCreated: 1703193271
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaStatus.cs b/Runtime/APIs/AndroidCamera/NDK/MediaStatus.cs
new file mode 100644
index 0000000..e236ead
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaStatus.cs
@@ -0,0 +1,116 @@
+
+
+namespace MagicLeap.Android.NDK.Media
+{
+ using System;
+ using System.Diagnostics;
+ using static MediaConstants;
+
+ internal enum MediaStatus
+ {
+ ///
+ /// The requested media operation completed successfully.
+ ///
+ Ok = 0,
+
+ ///
+ /// This indicates required resource was not able to be allocated.
+ ///
+ InsufficientResource = 1100,
+
+ ///
+ /// This indicates the resource manager reclaimed the media resource used by the codec. With this error, the codec must be released, as it has moved to terminal state.
+ ///
+ Reclaimed = 1101,
+
+ ///
+ /// The called media function failed with an unknown error.
+ ///
+ Unknown = kMediaErrorBase,
+
+ ///
+ /// The input media data is corrupt or incomplete.
+ ///
+ Malformed = kMediaErrorBase - 1,
+
+ ///
+ /// The required operation or media formats are not supported.
+ ///
+ Unsupported = kMediaErrorBase - 2,
+
+ ///
+ /// An invalid (or already closed) object is used in the function call.
+ ///
+ InvalidObject = kMediaErrorBase - 3,
+
+ ///
+ /// At least one of the invalid parameters is used.
+ ///
+ InvalidParameter = kMediaErrorBase - 4,
+
+ ///
+ /// The media object is not in the right state for the required operation.
+ ///
+ InvalidOperation = kMediaErrorBase - 5,
+
+ ///
+ /// Media stream ends while processing the requested operation.
+ ///
+ EndOfStream = kMediaErrorBase - 6,
+
+ ///
+ /// An Error occurred when the Media object is carrying IO operation.
+ ///
+ IOError = kMediaErrorBase - 7,
+
+ ///
+ /// The required operation would have to be blocked (on I/O or others), but blocking is not enabled.
+ ///
+ WouldBlock = kMediaErrorBase - 8,
+
+ DRMNotProvisioned = kMediaDRMErrorBase - 1,
+ DRMResourceBusy = kMediaDRMErrorBase - 2,
+ DRMDeviceRemoved = kMediaDRMErrorBase - 3,
+ DRMShortBuffer = kMediaDRMErrorBase - 4,
+ DRMSessionNotOpened = kMediaDRMErrorBase - 5,
+ DRMTamperDetected = kMediaDRMErrorBase - 6,
+ DRMVerifyFailed = kMediaDRMErrorBase - 7,
+ DRMNeedKey = kMediaDRMErrorBase - 8,
+ DRMLicenseExpired = kMediaDRMErrorBase - 9,
+
+ ///
+ /// There are no more image buffers to read/write image data.
+ ///
+ ImageReaderNoBufferAvailable = kMediaImageReaderErrorBase - 1,
+
+ ///
+ /// The AImage object has used up the allowed maximum image buffers.
+ ///
+ ImageReaderMaxImagesAcquired = kMediaImageReaderErrorBase - 2,
+
+ ///
+ /// The required image buffer could not be locked to read.
+ ///
+ ImageReaderCannotLockImage = kMediaImageReaderErrorBase - 3,
+
+ ///
+ /// The media data or buffer could not be unlocked.
+ ///
+ ImageReaderCannotUnlockImage = kMediaImageReaderErrorBase - 4,
+
+ ///
+ /// The media/buffer needs to be locked to perform the required operation.
+ ///
+ ImageReaderImageNotLocked = kMediaImageReaderErrorBase - 5,
+ }
+
+ internal static class MediaStatusExtensions
+ {
+ [Conditional("DEVELOPMENT_BUILD")]
+ public static void CheckReturnValueAndThrow(this MediaStatus actual, MediaStatus expected = MediaStatus.Ok)
+ {
+ if (expected != actual)
+ throw new Exception($"Unexpected result from native call. Expected: '{expected}', Actual: '{actual}'");
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/MediaStatus.cs.meta b/Runtime/APIs/AndroidCamera/NDK/MediaStatus.cs.meta
new file mode 100644
index 0000000..93aa525
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/MediaStatus.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: ec0c357e6e544d5bb5f24667d2a1348d
+timeCreated: 1702937666
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NDK/NativeWindowNativeBindings.cs b/Runtime/APIs/AndroidCamera/NDK/NativeWindowNativeBindings.cs
new file mode 100644
index 0000000..4f4d60b
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/NativeWindowNativeBindings.cs
@@ -0,0 +1,54 @@
+namespace MagicLeap.Android.NDK.NativeWindow
+{
+ using System;
+ using System.Runtime.InteropServices;
+
+ internal static class NativeWindowNativeBindings
+ {
+ public const string kLibraryName = "libnativewindow";
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void AHardwareBuffer_acquire(AHardwareBuffer buffer);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern int AHardwareBuffer_allocate(ref AHardwareBuffer.Description description,
+ out AHardwareBuffer buffer);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void AHardwareBuffer_describe(AHardwareBuffer buffer,
+ out AHardwareBuffer.Description description);
+
+#if ANDROID_API_LEVEL_31_OR_NEWER
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern int AHardwareBuffer_getId(AHardwareBuffer buffer, out ulong outId);
+#endif
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern int AHardwareBuffer_isSupported(ref AHardwareBuffer.Description description);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe int AHardwareBuffer_lock(AHardwareBuffer buffer, AHardwareBuffer.Usage usage,
+ int fence, ARect* rect, out void* virtualAddress);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe int AHardwareBuffer_lockAndGetInfo(AHardwareBuffer buffer,
+ AHardwareBuffer.Usage usage, int fence, ARect* rect, out void* outVirtualAddress, out int outBytesPerPixel,
+ out int outBytesPerStride);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe int AHardwareBuffer_lockPlanes(AHardwareBuffer buffer, AHardwareBuffer.Usage usage,
+ int fenceFd, ARect* rect, out AHardwareBuffer.Planes outPlanes);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern int AHardwareBuffer_recvHandleFromUnixSocket(int socketFd, out AHardwareBuffer buffer);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void AHardwareBuffer_release(AHardwareBuffer buffer);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern int AHardwareBuffer_sendHandleToUnixSocket(AHardwareBuffer buffer, int socketFd);
+
+ [DllImport(kLibraryName, CallingConvention = CallingConvention.Cdecl)]
+ public static extern unsafe int AHardwareBuffer_unlock(AHardwareBuffer buffer, int* fence);
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NDK/NativeWindowNativeBindings.cs.meta b/Runtime/APIs/AndroidCamera/NDK/NativeWindowNativeBindings.cs.meta
new file mode 100644
index 0000000..491cb37
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NDK/NativeWindowNativeBindings.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 769c6da4f6d44b10a3bb57e69f7ae315
+timeCreated: 1703195762
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/NativeImage.cs b/Runtime/APIs/AndroidCamera/NativeImage.cs
new file mode 100644
index 0000000..7e5397e
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NativeImage.cs
@@ -0,0 +1,551 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+namespace MagicLeap.Android
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.CompilerServices;
+ using Unity.Collections;
+ using Unity.Collections.LowLevel.Unsafe;
+ using Unity.Jobs;
+ using UnityEngine.XR.MagicLeap.Unsafe;
+ using NDK.Media;
+ using NDK.NativeWindow;
+
+ [NativeContainer]
+ [NativeContainerIsReadOnly]
+ public struct NativeImage : IDisposable
+ {
+ private struct DisposeData
+ {
+ public UnsafeImage image;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ public AtomicSafetyHandle m_Safety;
+#endif
+ }
+
+ private struct DisposeJob : IJob
+ {
+ public DisposeData Data;
+
+ public void Execute()
+ {
+ if (Data.image.IsCreated)
+ Data.image.Dispose();
+ }
+ }
+
+ private UnsafeImage image;
+ private IntPtr requestHandle;
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ private AtomicSafetyHandle m_Safety;
+
+ internal static readonly int s_staticSafetyId = AtomicSafetyHandle.NewStaticSafetyId();
+#endif
+
+ public ImageDimensions Dimensions => new ImageDimensions(Width, Height);
+
+ public bool IsCreated => image.IsCreated;
+
+ public bool IsMultiplanar => image.IsMultiPlanar;
+
+ public int Height
+ {
+ get
+ {
+ CheckedCreatedAndThrow();
+ return image.Height;
+ }
+ }
+
+ public int NumberOfPlanes
+ {
+ get
+ {
+ CheckedCreatedAndThrow();
+ return image.NumberOfPlanes;
+ }
+ }
+
+
+ internal IntPtr RequestHandle
+ {
+ get
+ {
+ CheckedCreatedAndThrow();
+ return requestHandle;
+ }
+ }
+
+ public long Timestamp
+ {
+ get
+ {
+ CheckedCreatedAndThrow();
+ return image.Timestamp;
+ }
+ }
+
+ public int Width
+ {
+ get
+ {
+ CheckedCreatedAndThrow();
+ return image.Width;
+ }
+ }
+
+ internal NativeImage(UnsafeImage image, IntPtr requestHandle)
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = AtomicSafetyHandle.Create();
+ AtomicSafetyHandle.SetAllowReadOrWriteAccess(m_Safety, false);
+ AtomicSafetyHandle.SetAllowSecondaryVersionWriting(m_Safety, false);
+
+ AtomicSafetyHandle.SetStaticSafetyId(ref m_Safety, s_staticSafetyId);
+#endif
+ this.image = image;
+ this.requestHandle = requestHandle;
+ }
+
+ public void Dispose()
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ AtomicSafetyHandle.CheckDeallocateAndThrow(m_Safety);
+ AtomicSafetyHandle.Release(m_Safety);
+#endif
+ if (!image.IsCreated)
+ return;
+
+ image.Dispose();
+ requestHandle = IntPtr.Zero;
+ }
+
+ public JobHandle Dispose(JobHandle depends)
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ AtomicSafetyHandle.CheckDeallocateAndThrow(m_Safety);
+#endif
+ if (!image.IsCreated)
+ return default;
+
+ var handle = new DisposeJob
+ {
+ Data = new DisposeData
+ {
+ image = image,
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = m_Safety,
+#endif
+ },
+ }.Schedule(depends);
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ AtomicSafetyHandle.Release(m_Safety);
+#endif
+ image = default;
+
+ return handle;
+ }
+
+ private struct UnlockBufferJob : IJob
+ {
+ public UnsafeImage Source;
+
+ public void Execute()
+ {
+ Source.TryUnlockData();
+ }
+ }
+
+ public bool TryGetData(out NativePlane outData, out ImageDimensions outDim)
+ {
+ CheckedCreatedAndThrow();
+ outData = default;
+ outDim = new ImageDimensions(Width, Height);
+
+ if (!TryLockDataInternal())
+ return false;
+
+ if (!TryGetDataInternal(out outData, out outDim))
+ return false;
+
+ if (!TryUnlockDataInternal())
+ return false;
+
+ return true;
+ }
+
+ internal bool TryGetDataInternal(out NativePlane outData, out ImageDimensions outDim)
+ {
+ outDim = new ImageDimensions(Width, Height);
+ outData = default;
+
+ if (!image.TryGetPlane(0, out var plane))
+ {
+ UnityEngine.Debug.LogError("Failed to acquire data");
+ return false;
+ }
+
+ outData = new NativePlane(plane);
+
+ return true;
+ }
+
+ public bool TryGetYUVData(out NativeYUVPlanes outYuvPlanes)
+ {
+ CheckedCreatedAndThrow();
+ outYuvPlanes = default;
+ if (!TryLockDataInternal())
+ return false;
+
+ if (!TryGetYUVDataUnchecked(out outYuvPlanes))
+ return false;
+
+ if (!TryUnlockDataInternal())
+ {
+ // TODO :: log the failure, but should the whole call fail, too?
+ return false;
+ }
+
+ return true;
+ }
+
+ internal bool TryGetYUVDataUnchecked(out NativeYUVPlanes outYuvPlanes)
+ {
+ outYuvPlanes = default;
+
+ NativePlane y = default, u = default, v = default;
+ for (var i = 0; i < image.NumberOfPlanes; ++i)
+ {
+ if (!image.TryGetPlane(i, out var plane))
+ return false;
+ switch (i)
+ {
+ case 0:
+ y = new NativePlane(plane);
+ break;
+ case 1:
+ u = new NativePlane(plane);
+ break;
+ case 2:
+ v = new NativePlane(plane);
+ break;
+ }
+ }
+
+ outYuvPlanes = new NativeYUVPlanes(Dimensions, NumberOfPlanes, y, u, v);
+
+ return true;
+ }
+
+ internal bool TryLockDataInternal() => image.TryLockData();
+
+ internal bool TryUnlockDataInternal() => image.TryUnlockData();
+
+ internal JobHandle TryUnlockDataAsyncInternal(JobHandle dependency)
+ => new UnlockBufferJob { Source = image }.Schedule(dependency);
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ internal void CheckedCreatedAndThrow()
+ {
+ if (!IsCreated)
+ throw new NullReferenceException();
+ }
+ }
+
+ public static class NativeImageSourceUnsafeUtility
+ {
+ public static bool TryGetYUVDataAssumeLocked(this NativeImage self, out NativeYUVPlanes outYuvPlanes)
+ {
+ self.CheckedCreatedAndThrow();
+ return self.TryGetYUVDataUnchecked(out outYuvPlanes);
+ }
+
+ public static bool TryLockData(this NativeImage self)
+ {
+ self.CheckedCreatedAndThrow();
+ return self.TryLockDataInternal();
+ }
+
+ public static bool TryUnlockData(this NativeImage self)
+ {
+ self.CheckedCreatedAndThrow();
+ return self.TryUnlockDataInternal();
+ }
+
+ public static JobHandle TryUnlockDataAsync(this NativeImage self, JobHandle dependency)
+ {
+ self.CheckedCreatedAndThrow();
+ return self.TryUnlockDataAsyncInternal(dependency);
+ }
+ }
+
+ [NativeContainer]
+ [NativeContainerIsReadOnly]
+ public struct NativePlane
+ {
+ private UnsafePlane _Plane;
+
+ public bool IsValid => _Plane.IsValid;
+
+ internal UnsafePlane UnsafePlane => _Plane;
+
+ public int PixelStride
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get => _Plane.PixelStride;
+ }
+
+ public int RowStride
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get => _Plane.RowStride;
+ }
+
+ internal NativePlane(UnsafePlane plane)
+ {
+ _Plane = plane;
+ }
+
+ public NativeArray CopyToNativeArray(Allocator allocator)
+ => _Plane.CopyToNativeArray(allocator);
+
+ internal string DebugRepr(string name = null)
+ {
+ name = string.IsNullOrEmpty(name) ? nameof(NativePlane) : name;
+ return _Plane.DebugRepr(name);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public T GetDataAtOffset(int offset) where T : unmanaged
+ => _Plane.GetDataAtOffset(offset);
+ }
+
+
+ public readonly struct NativeYUVPlanes
+ {
+ public readonly NativePlane YPlane;
+ public readonly NativePlane UPlane;
+ public readonly NativePlane VPlane;
+
+ public readonly ImageDimensions Dimensions;
+ public readonly int PlaneCount;
+
+ internal NativeYUVPlanes(ImageDimensions dim, int planeCount, NativePlane y, NativePlane u, NativePlane v)
+ {
+ Dimensions = dim;
+ PlaneCount = planeCount;
+ YPlane = y;
+ UPlane = u;
+ VPlane = v;
+ }
+ }
+
+ internal static unsafe class AHardwareBufferImageBackend
+ {
+ private struct Backend
+ {
+ public AHardwareBuffer buffer;
+ public ARect cropRect;
+ public AHardwareBuffer.Description desc;
+ public AHardwareBuffer.Planes planes;
+ public void* data;
+ public long timestamp;
+ public int numPlanes;
+ public byte isLocked;
+ }
+
+ public static UnsafeImage Create(AImage image, Allocator allocator)
+ {
+ var backend = UnsafeUtilityEx.CallocTracked(allocator, new Backend
+ {
+ buffer = image.HardwareBuffer,
+ cropRect = image.CropRect,
+ desc = image.HardwareBuffer.description,
+ timestamp = image.TimestampInNanoseconds,
+ numPlanes = image.NumberOfPlanes,
+ isLocked = 0,
+ });
+
+ backend->buffer.Acquire();
+
+ var vtable = new UnsafeImage.VTable
+ {
+ data = backend,
+ destroy = &Destroy,
+ getCropRect = &GetCropRect,
+ getFormat = &GetFormat,
+ getHeight = &GetHeight,
+ getNumPlanes = &GetNumPlanes,
+ getTimestamp = &GetTimestamp,
+ getWidth = &GetWidth,
+ tryGetPlane = &TryGetPlane,
+ tryLockData = &TryLockData,
+ tryUnlockData = &TryUnlockData,
+ };
+
+ return new UnsafeImage(vtable, allocator);
+ }
+
+ private static void Destroy(void* obj) => CheckPointerAndThrow(obj)->buffer.ReleaseUnchecked();
+
+ private static ARect GetCropRect(void* obj) => CheckPointerAndThrow(obj)->cropRect;
+
+ private static MediaFormat GetFormat(void* obj) => CheckPointerAndThrow(obj)->desc.Format.ToMediaFormat();
+
+ private static int GetHeight(void* obj) => (int)CheckPointerAndThrow(obj)->desc.Height;
+
+ private static int GetNumPlanes(void* obj) => CheckPointerAndThrow(obj)->numPlanes;
+
+ private static long GetTimestamp(void* obj) => CheckPointerAndThrow(obj)->timestamp;
+
+ private static int GetWidth(void* obj) => (int)CheckPointerAndThrow(obj)->desc.Width;
+
+ private static bool TryGetPlane(void* obj, int planeIdx, UnsafePlane* outPlane)
+ {
+ if (outPlane == null)
+ return false;
+
+ var backend = CheckPointerAndThrow(obj);
+ bool didLock = false;
+ if (backend->isLocked == 0)
+ {
+ if (!TryLockData(obj))
+ return false;
+ didLock = true;
+ }
+
+ if (backend->desc.HasMultiplePlanes)
+ {
+ var plane = backend->planes.PlaneFor(planeIdx);
+ *outPlane = new UnsafePlane(plane, backend->desc);
+ }
+ else
+ {
+ *outPlane = new UnsafePlane((byte*)backend->data, new DataLayout(backend->desc));
+ }
+
+ if (didLock)
+ {
+ if (!TryUnlockData(obj))
+ return false;
+ }
+
+ return true;
+ }
+
+ private static bool TryLockData(void* obj)
+ {
+ var backend = CheckPointerAndThrow(obj);
+ var success = (backend->desc.HasMultiplePlanes)
+ ? backend->buffer.TryLockPlaneData(AHardwareBuffer.Usage.CpuReadRarely, out backend->planes)
+ : backend->buffer.TryLockData(AHardwareBuffer.Usage.CpuReadRarely, out backend->data);
+ if (success)
+ backend->isLocked = 1;
+ return success;
+ }
+
+ private static bool TryUnlockData(void* obj)
+ {
+ var backend = CheckPointerAndThrow(obj);
+ var success = backend->buffer.TryUnlockData();
+ backend->isLocked = 0;
+ return success;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static Backend* CheckPointerAndThrow(void* obj)
+ {
+#if DEVELOPMENT_BUILD || ENABLE_UNITY_COLLECTIONS_CHECKS
+ if (obj == null)
+ throw new NullReferenceException();
+#endif
+ return UnsafeUtilityEx.PunTypeUnchecked(obj);
+ }
+ }
+
+ internal static unsafe class AImageBackend
+ {
+ private struct Backend
+ {
+ public AImage image;
+ }
+
+ public static UnsafeImage Create(AImage image, Allocator allocator)
+ {
+ var backend = UnsafeUtilityEx.CallocTracked(allocator, new Backend
+ {
+ image = image,
+ });
+
+ var vtable = new UnsafeImage.VTable
+ {
+ data = backend,
+ destroy = &Destroy,
+ getCropRect = &GetCropRect,
+ getFormat = &GetFormat,
+ getHeight = &GetHeight,
+ getNumPlanes = &GetNumPlanes,
+ getTimestamp = &GetTimestamp,
+ getWidth = &GetWidth,
+ tryGetPlane = &TryGetPlane,
+ tryLockData = null,
+ tryUnlockData = null,
+ };
+
+ return new UnsafeImage(vtable, allocator);
+ }
+
+ private static void Destroy(void* obj) => CheckPointerAndThrow(obj)->image.Dispose();
+
+ private static ARect GetCropRect(void* obj)
+ => CheckPointerAndThrow(obj)->image.CropRect;
+
+ private static MediaFormat GetFormat(void* obj) => CheckPointerAndThrow(obj)->image.Format;
+
+ private static int GetHeight(void* obj) => CheckPointerAndThrow(obj)->image.Height;
+
+ private static int GetNumPlanes(void* obj) => CheckPointerAndThrow(obj)->image.NumberOfPlanes;
+
+ private static long GetTimestamp(void* obj) => CheckPointerAndThrow(obj)->image.TimestampInNanoseconds;
+
+ private static int GetWidth(void* obj) => (int)CheckPointerAndThrow(obj)->image.Width;
+
+ private static bool TryGetPlane(void* obj, int planeIdx, UnsafePlane* outPlane)
+ {
+ if (outPlane == null)
+ return false;
+
+ var image = CheckPointerAndThrow(obj)->image;
+
+ var success = image.TryGetPlaneData(planeIdx, out var dataLength, out var pixelStride, out var rowStride, out var buffer);
+ if (!success)
+ return false;
+
+ var layout = new DataLayout(image.Width, image.Height, pixelStride, rowStride,
+ image.Format.BytesPerPixel());
+ *outPlane = new UnsafePlane(buffer, layout);
+ return true;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static Backend* CheckPointerAndThrow(void* obj)
+ {
+#if DEVELOPMENT_BUILD || ENABLE_UNITY_COLLECTIONS_CHECKS
+ if (obj == null)
+ throw new NullReferenceException();
+#endif
+ return UnsafeUtilityEx.PunTypeUnchecked(obj);
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/NativeImage.cs.meta b/Runtime/APIs/AndroidCamera/NativeImage.cs.meta
new file mode 100644
index 0000000..2aaa756
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/NativeImage.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: cce46511ea2f4d7c9b5f123804fa5f69
+timeCreated: 1703215847
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/RequestTemplate.cs b/Runtime/APIs/AndroidCamera/RequestTemplate.cs
new file mode 100644
index 0000000..c3ef198
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/RequestTemplate.cs
@@ -0,0 +1,28 @@
+
+
+namespace MagicLeap.Android
+{
+ using System;
+ using NDK.Camera;
+
+ public enum RequestTemplate
+ {
+ Invalid = 0,
+ Preview = 1,
+ StillCapture = 2,
+ Record = 3,
+ VideoSnapshot = 4,
+ ZeroShutterLag = 5,
+ Manual = 6,
+ }
+
+ internal static class RequestTemplateExtensions
+ {
+ public static ACaptureRequest.Template ToNDKTemplate(this RequestTemplate self)
+ {
+ if (self == RequestTemplate.Invalid)
+ throw new InvalidOperationException();
+ return (ACaptureRequest.Template)self;
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/RequestTemplate.cs.meta b/Runtime/APIs/AndroidCamera/RequestTemplate.cs.meta
new file mode 100644
index 0000000..7e354de
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/RequestTemplate.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: fa0a7ba9c0a14d47a38b52d6ac60c57a
+timeCreated: 1704241026
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/SequenceId.cs b/Runtime/APIs/AndroidCamera/SequenceId.cs
new file mode 100644
index 0000000..5daa486
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/SequenceId.cs
@@ -0,0 +1,41 @@
+namespace MagicLeap.Android
+{
+ using System;
+
+ public struct SequenceId : IEquatable
+ {
+ private int id;
+ private bool isSet;
+
+ public bool IsSet => isSet;
+ public int Value => IsSet ? id : throw new InvalidOperationException("SequenceId is empty");
+
+ public void Clear()
+ => isSet = false;
+
+ public override bool Equals(object obj)
+ {
+ if (obj is SequenceId seq)
+ return Equals(seq);
+ return false;
+ }
+
+ public bool Equals(SequenceId other)
+ {
+ return false;
+ }
+
+ public override int GetHashCode()
+ => isSet ? id.GetHashCode() : -1;
+
+ public static implicit operator SequenceId(int value)
+ => new SequenceId
+ {
+ id = value,
+ isSet = true
+ };
+
+ public static implicit operator int(SequenceId value)
+ => value.Value;
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/SequenceId.cs.meta b/Runtime/APIs/AndroidCamera/SequenceId.cs.meta
new file mode 100644
index 0000000..a8193c3
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/SequenceId.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 9ed2b5ee6a5b4f85b22ef960896a275b
+timeCreated: 1704410766
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/StreamConfiguration.cs b/Runtime/APIs/AndroidCamera/StreamConfiguration.cs
new file mode 100644
index 0000000..0916acd
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/StreamConfiguration.cs
@@ -0,0 +1,139 @@
+
+namespace MagicLeap.Android
+{
+ using System;
+ using NDK.Media;
+ using Unity.Collections;
+
+ public readonly struct StreamConfiguration : IComparable, IEquatable
+ {
+ public readonly MediaFormat Format;
+ public readonly int Height;
+ public readonly int Width;
+
+ public float AspectRatio => Width / (float)Height;
+
+ public static readonly StreamConfiguration Invalid = default;
+
+ public bool IsValid => this != Invalid;
+
+ internal int Size => Width * Height;
+
+ public StreamConfiguration(MediaFormat format, int width, int height)
+ {
+ Format = format;
+ Height = height;
+ Width = width;
+ }
+
+ public int CompareTo(StreamConfiguration other)
+ => Size.CompareTo(other.Size);
+
+ public bool Equals(StreamConfiguration other)
+ => Format == other.Format && Height == other.Height && Width == other.Width;
+
+ public override bool Equals(object obj)
+ {
+ if (obj == null)
+ return false;
+
+ if (obj is StreamConfiguration other)
+ return Equals(other);
+ else
+ return false;
+ }
+
+ public override int GetHashCode()
+ => HashCode.Combine(Format, Height, Width);
+
+ public override string ToString()
+ => $"{nameof(StreamConfiguration)}[Format = {Format.ToNameOrHexValue()}, Width = {Width}, Height = {Height}]";
+
+ public static bool operator >(StreamConfiguration lhs, StreamConfiguration rhs)
+ => lhs.CompareTo(rhs) > 0;
+
+ public static bool operator <(StreamConfiguration lhs, StreamConfiguration rhs)
+ => lhs.CompareTo(rhs) < 0;
+
+ public static bool operator ==(StreamConfiguration lhs, StreamConfiguration rhs)
+ => lhs.Equals(rhs);
+
+ public static bool operator !=(StreamConfiguration lhs, StreamConfiguration rhs)
+ => !lhs.Equals(rhs);
+
+ }
+
+ public static class StreamConfigurationUtility
+ {
+ public static bool Contains(this NativeArray configs, in StreamConfiguration config)
+ {
+ CheckValidListAndThrow(configs);
+
+ foreach (var cfg in configs)
+ {
+ if (cfg == config)
+ return true;
+ }
+
+ return false;
+ }
+
+ public static bool TryFindLargestConfigurationMatchingFormat(this NativeArray configs, MediaFormat format, out StreamConfiguration outConfig)
+ {
+ CheckValidListAndThrow(configs);
+ outConfig = default;
+
+ foreach (var cfg in configs)
+ outConfig.UpdateIfLarger(cfg, format);
+
+ return outConfig.IsValid;
+ }
+
+ public static bool TryFindSmallestConfigurationMatchingFormat(this NativeArray configs, MediaFormat format, out StreamConfiguration outConfig)
+ {
+ CheckValidListAndThrow(configs);
+ outConfig = default;
+
+ foreach (var cfg in configs)
+ outConfig.UpdateIfSmaller(cfg, format);
+
+ return outConfig.IsValid;
+ }
+
+ internal static void UpdateIfLarger(ref this StreamConfiguration config, in StreamConfiguration other, MediaFormat format, bool requireFormatMatch = true)
+ {
+ if (!config.IsValid)
+ {
+ if (other.Format == format)
+ config = other;
+ return;
+ }
+
+ if (requireFormatMatch && config.Format != other.Format)
+ return;
+ if (other > config)
+ config = other;
+ }
+
+ internal static void UpdateIfSmaller(ref this StreamConfiguration config, in StreamConfiguration other, MediaFormat format, bool requireFormatMatch = true)
+ {
+ if (!config.IsValid)
+ {
+ if (other.Format == format)
+ config = other;
+ return;
+ }
+
+ if (requireFormatMatch && config.Format != other.Format)
+ return;
+ if (other < config)
+ config = other;
+ }
+
+ private static void CheckValidListAndThrow(NativeArray configs)
+ {
+ if (!configs.IsCreated || configs.Length == 0)
+ throw new ArgumentNullException(nameof(configs));
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/StreamConfiguration.cs.meta b/Runtime/APIs/AndroidCamera/StreamConfiguration.cs.meta
new file mode 100644
index 0000000..f949658
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/StreamConfiguration.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 002b473762c543b3ae57434d41280cf8
+timeCreated: 1707521782
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/UnsafeImage.cs b/Runtime/APIs/AndroidCamera/UnsafeImage.cs
new file mode 100644
index 0000000..f9e30e2
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/UnsafeImage.cs
@@ -0,0 +1,292 @@
+namespace MagicLeap.Android
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.CompilerServices;
+ using Unity.Collections;
+ using Unity.Collections.LowLevel.Unsafe;
+
+ using UnityEngine.XR.MagicLeap.Unsafe;
+ using NDK.Media;
+ using NDK.NativeWindow;
+
+
+ internal unsafe struct UnsafeImage : IDisposable
+ {
+ public struct VTable
+ {
+ [NativeDisableUnsafePtrRestriction]
+ public void* data;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* destroy;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* getCropRect;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* getFormat;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* getHeight;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* getNumPlanes;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* getTimestamp;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* getWidth;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* tryGetPlane;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* tryLockData;
+ [NativeDisableUnsafePtrRestriction]
+ public delegate* tryUnlockData;
+ }
+
+ private VTable* vTable;
+ private Allocator allocator;
+
+ internal Allocator Allocator => allocator;
+
+ public ARect CropRect
+ {
+ get
+ {
+ CheckVTableAndThrow();
+ return vTable->getCropRect(vTable->data);
+ }
+ }
+
+ public MediaFormat Format
+ {
+ get
+ {
+ CheckVTableAndThrow();
+ return vTable->getFormat(vTable->data);
+ }
+ }
+
+ public int Height
+ {
+ get
+ {
+ CheckVTableAndThrow();
+ return vTable->getHeight(vTable->data);
+ }
+ }
+
+ public bool IsCreated => vTable != null;
+
+ public bool IsMultiPlanar => NumberOfPlanes >= 1;
+
+ public int NumberOfPlanes
+ {
+ get
+ {
+ CheckVTableAndThrow();
+ return vTable->getNumPlanes(vTable->data);
+ }
+ }
+
+ public long Timestamp
+ {
+ get
+ {
+ CheckVTableAndThrow();
+ return vTable->getTimestamp(vTable->data);
+ }
+ }
+
+ public int Width
+ {
+ get
+ {
+ CheckVTableAndThrow();
+ return vTable->getWidth(vTable->data);
+ }
+ }
+
+ public UnsafeImage(VTable vtable, Allocator allocator)
+ {
+ this.allocator = allocator;
+ vTable = UnsafeUtilityEx.CallocTracked(this.allocator, vtable);
+ }
+
+ public void Dispose()
+ {
+ CheckVTableAndThrow();
+
+ vTable->destroy(vTable->data);
+ UnsafeUtility.FreeTracked(vTable, allocator);
+ vTable = null;
+ allocator = Allocator.Invalid;
+ }
+
+ public bool TryGetPlane(int planeIdx, out UnsafePlane outPlane)
+ {
+ CheckVTableAndThrow();
+ CheckPlaneIndexAndThrow(planeIdx);
+
+ UnsafePlane p = default;
+ var success = vTable->tryGetPlane(vTable->data, planeIdx, &p);
+ outPlane = success ? p : default;
+ return success;
+ }
+
+ public bool TryLockData()
+ {
+ CheckVTableAndThrow();
+
+ if (vTable->tryLockData == null)
+ return true;
+
+ return vTable->tryLockData(vTable->data);
+ }
+
+ public bool TryUnlockData()
+ {
+ CheckVTableAndThrow();
+
+ if (vTable->tryUnlockData == null)
+ return true;
+
+ return vTable->tryUnlockData(vTable->data);
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ private void CheckPlaneIndexAndThrow(int planeIdx)
+ {
+ int planeCount = NumberOfPlanes;
+ if (planeIdx < 0 || planeIdx >= planeCount)
+ throw new IndexOutOfRangeException(
+ $"plane index must be between 0 and {planeCount - 1}, inclusive");
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ private void CheckVTableAndThrow()
+ {
+ if (vTable == null)
+ throw new NullReferenceException();
+ }
+ }
+
+ internal readonly struct DataLayout
+ {
+ public readonly int BytesPerPixel;
+ public readonly int Height;
+ public readonly int PixelStride;
+ public readonly int RowStride;
+ public readonly int Width;
+
+ public int DataLength
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get => UnityEngine.Mathf.Max(RowStride, Width * BytesPerPixel) * Height;
+ }
+
+ public int PixelCount
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get => Width * Height;
+ }
+
+ public DataLayout(int width, int height, int pixelStride, int rowStride, int bytesPerPixel)
+ {
+ BytesPerPixel = bytesPerPixel;
+ Height = height;
+ PixelStride = pixelStride;
+ RowStride = rowStride;
+ Width = width;
+ }
+
+ public DataLayout(AHardwareBuffer.Description description)
+ {
+ BytesPerPixel = description.Format.BytesPerPixel();
+ Height = (int)description.Height;
+ PixelStride = description.Format.BytesPerPixel();
+ RowStride = (int)description.Stride;
+ Width = (int)description.Width;
+ }
+
+ public string Debug()
+ {
+ var sb = new System.Text.StringBuilder();
+ sb.AppendLine("DataLayout:");
+ sb.AppendLine($"\tWidth = {Width}, Height = {Height}");
+ sb.AppendLine($"\tPixelStride = {PixelStride}. RowStride = {RowStride}");
+ sb.AppendLine($"\tBytesPerPixel = {BytesPerPixel}");
+ return sb.ToString();
+ }
+ }
+
+ internal unsafe struct UnsafePlane
+ {
+ [NativeDisableUnsafePtrRestriction]
+ private byte* data;
+ private DataLayout layout;
+
+ public byte* Data => data;
+
+ public int DataLength => layout.DataLength;
+
+ public bool IsValid => data != null;
+
+ public int PixelStride
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get => layout.PixelStride;
+ }
+
+ public int RowStride
+ {
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ get => layout.RowStride;
+ }
+
+ internal UnsafePlane(byte* data, DataLayout layout)
+ {
+ this.data = data;
+ this.layout = layout;
+ }
+
+ internal UnsafePlane(AHardwareBuffer.Plane plane, AHardwareBuffer.Description desc)
+ {
+ data = plane.Data;
+ layout = new DataLayout((int)desc.Width, (int)desc.Height, (int)plane.PixelStride, (int)plane.RowStride,
+ desc.Format.BytesPerPixel());
+ }
+
+ public NativeArray CopyToNativeArray(Allocator allocator)
+ {
+ var array = new NativeArray(layout.DataLength, allocator, NativeArrayOptions.ClearMemory);
+ var ptr = array.GetUnsafePtr();
+ UnsafeUtility.MemCpy(ptr, data, layout.DataLength);
+ return array;
+ }
+
+ internal string DebugRepr(string name = null)
+ {
+ name = string.IsNullOrEmpty(name) ? nameof(UnsafeImage) : name;
+ return $"{name}[ _Data = {UnsafeUtilityEx.FormatAddress(data)}, _Layout = {layout.Debug()} ]";
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public T GetDataAtOffset(int offset) where T : unmanaged
+ {
+ CheckOffsetBoundsAndThrow(offset);
+ UnsafeUtilityEx.CheckTypeSizeAndThrow(layout.BytesPerPixel);
+ return GetDataAtOffsetUnchecked(offset);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public T GetDataAtOffsetUnchecked(int offset) where T : unmanaged
+ => UnsafeUtility.ReadArrayElement(data, offset);
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ private void CheckOffsetBoundsAndThrow(int offset) where T : unmanaged
+ {
+ var length = layout.DataLength;
+ var sz = offset + sizeof(T);
+
+ if (sz >= length)
+ throw new IndexOutOfRangeException(
+ $"Trying to access data at offset 0x{offset:X} with size 0x{sizeof(T):X}, but length is only 0x{length:X}");
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/UnsafeImage.cs.meta b/Runtime/APIs/AndroidCamera/UnsafeImage.cs.meta
new file mode 100644
index 0000000..59cf38e
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/UnsafeImage.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 01434500299f45549e61e343aa41db59
+timeCreated: 1707182392
\ No newline at end of file
diff --git a/Runtime/APIs/AndroidCamera/YUVUtility.cs b/Runtime/APIs/AndroidCamera/YUVUtility.cs
new file mode 100644
index 0000000..7e5c1cb
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/YUVUtility.cs
@@ -0,0 +1,93 @@
+namespace MagicLeap.Android
+{
+ using System.Runtime.CompilerServices;
+ using Unity.Collections;
+ using Unity.Jobs;
+ using UnityEngine;
+ using UnityEngine.XR.MagicLeap.Unsafe;
+
+ public static class YUVUtility
+ {
+ private const float kRedCoefficient = 1.370705f;
+ private const float kGreenCoefficientV = 0.698001f;
+ private const float kGreenCoefficientU = 0.337633f;
+ private const float kBlueCoefficient = 1.732446f;
+
+ private struct YuvToRgbJob : IJobFor
+ {
+ public NativePlane Y;
+ public NativePlane U;
+ public NativePlane V;
+
+ public ImageDimensions Dimensions;
+
+ public NativeArray RgbData;
+
+ public void Execute(int index)
+ {
+ (int x, int y) = Dimensions.GetCoordinatesFromIndex(index);
+
+ int yIndex = CalculateYIndex(x, y, Y.PixelStride, Y.RowStride);
+ int uvIndex = CalculateUVIndex(x, y, U.PixelStride, U.RowStride);
+
+ byte rawY = Y.GetDataAtOffset(yIndex);
+ byte rawU = U.GetDataAtOffset(uvIndex);
+ byte rawV = V.GetDataAtOffset(uvIndex);
+
+ RgbData[index] = ProcessPixel(rawY, rawU, rawV);
+ }
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int CalculateYIndex(int x, int y, int pixelStride, int rowStride)
+ => (y * rowStride) + (x * pixelStride);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static int CalculateUVIndex(int x, int y, int pixelStride, int rowStride)
+ {
+ int uvx = x / 2;
+ int uvy = y / 2;
+
+ return (uvy * rowStride) + (uvx * pixelStride);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static byte ClampToByte(int value) => (byte)Mathf.Clamp(value, 0, 255);
+
+ public static JobHandle ConvertPlanesToRGBAsync(Allocator allocator, NativeYUVPlanes yuvPlanes, out NativeArray outRgb)
+ {
+ outRgb = yuvPlanes.Dimensions.CreateNativeArray(allocator);
+
+ var job = new YuvToRgbJob
+ {
+ Y = yuvPlanes.YPlane,
+ U = yuvPlanes.UPlane,
+ V = yuvPlanes.VPlane,
+ Dimensions = yuvPlanes.Dimensions,
+ RgbData = outRgb,
+ };
+
+ return job.ScheduleParallel(yuvPlanes.Dimensions.Size, yuvPlanes.Dimensions.Width, default);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public static Color32 ProcessPixel(byte rawY, byte rawU, byte rawV)
+ {
+ int y = rawY & 0xFF;
+ int u = (rawU & 0xFF) - 128;
+ int v = (rawV & 0xFF) - 128;
+
+ int r = (int)(y + kRedCoefficient * v);
+ int g = (int)(y - (kGreenCoefficientV * v) - (kGreenCoefficientU * u));
+ int b = (int)(y + kBlueCoefficient * u);
+
+ return new Color32
+ {
+ a = 255,
+ r = ClampToByte(r),
+ g = ClampToByte(g),
+ b = ClampToByte(b)
+ };
+ }
+ }
+}
diff --git a/Runtime/APIs/AndroidCamera/YUVUtility.cs.meta b/Runtime/APIs/AndroidCamera/YUVUtility.cs.meta
new file mode 100644
index 0000000..4872097
--- /dev/null
+++ b/Runtime/APIs/AndroidCamera/YUVUtility.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 9a02e397e6cd4b899473554159aef42e
+timeCreated: 1707688830
\ No newline at end of file
diff --git a/Runtime/APIs/CVCamera/MLCVCamera.cs b/Runtime/APIs/CVCamera/MLCVCamera.cs
index 8a837ff..20ee5d5 100644
--- a/Runtime/APIs/CVCamera/MLCVCamera.cs
+++ b/Runtime/APIs/CVCamera/MLCVCamera.cs
@@ -89,13 +89,14 @@ protected override MLResult.Code StopAPI()
///
private MLResult InternalGetFramePose(NativeBindings.CameraID cameraId, MLTime vcamTimestamp, out Matrix4x4 outTransform)
{
- if (!MagicLeapXrProviderNativeBindings.IsHeadTrackingAvailable())
+ if ((MLDevice.IsMagicLeapLoaderActive() && !MagicLeapXrProviderNativeBindings.IsHeadTrackingAvailable()) ||
+ (MLDevice.IsOpenXRLoaderActive() && !OpenXR.Features.MagicLeapSupport.MLHeadTracking.IsAvailable()))
{
outTransform = default;
return MLResult.Create(MLResult.Code.PoseNotFound, "HeadTracking is not available");
}
MagicLeapNativeBindings.MLTransform outInternalTransform = new MagicLeapNativeBindings.MLTransform();
- MLResult.Code resultCode = NativeBindings.MLCVCameraGetFramePose(Handle, MagicLeapXrProviderNativeBindings.GetHeadTrackerHandle(), cameraId, vcamTimestamp.Value, ref outInternalTransform);
+ MLResult.Code resultCode = NativeBindings.MLCVCameraGetFramePose(Handle, GetHeadTrackerHandle(), cameraId, vcamTimestamp.Value, ref outInternalTransform);
MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLCVCameraGetFramePose));
MLResult poseResult = MLResult.Create(resultCode);
if (!poseResult.IsOk)
@@ -111,6 +112,14 @@ private MLResult InternalGetFramePose(NativeBindings.CameraID cameraId, MLTime v
return poseResult;
}
+ private ulong GetHeadTrackerHandle()
+ {
+ if (MLDevice.IsMagicLeapLoaderActive())
+ return MagicLeapXrProviderNativeBindings.GetHeadTrackerHandle();
+ else
+ return OpenXR.Features.MagicLeapSupport.MLHeadTracking.Handle;
+ }
+
protected override void OnApplicationPause(bool pauseStatus)
{
if (pauseStatus)
diff --git a/Runtime/APIs/Permissions.meta b/Runtime/APIs/MLInput.meta
similarity index 77%
rename from Runtime/APIs/Permissions.meta
rename to Runtime/APIs/MLInput.meta
index 2c5391c..556e01a 100644
--- a/Runtime/APIs/Permissions.meta
+++ b/Runtime/APIs/MLInput.meta
@@ -1,5 +1,5 @@
fileFormatVersion: 2
-guid: e68d84446d827d34cb347903de2f8478
+guid: e63ba6a8f9f1e5f4bb3945b90c5d212b
folderAsset: yes
DefaultImporter:
externalObjects: {}
diff --git a/Runtime/Deprecated/MagicLeapInputs.inputactions b/Runtime/APIs/MLInput/MLInput.inputactions
similarity index 99%
rename from Runtime/Deprecated/MagicLeapInputs.inputactions
rename to Runtime/APIs/MLInput/MLInput.inputactions
index 7b58d7a..73fc546 100644
--- a/Runtime/Deprecated/MagicLeapInputs.inputactions
+++ b/Runtime/APIs/MLInput/MLInput.inputactions
@@ -531,7 +531,7 @@
{
"name": "first",
"id": "5635980f-ce3d-410d-a1c2-7bdff3040a39",
- "path": "/pointerPosition",
+ "path": "/pointer/position",
"interactions": "",
"processors": "",
"groups": "",
@@ -564,7 +564,7 @@
{
"name": "first",
"id": "286ff085-2f65-4aec-9acc-85eeaca37f79",
- "path": "/pointerRotation",
+ "path": "/pointer/rotation",
"interactions": "",
"processors": "",
"groups": "",
diff --git a/Runtime/Deprecated/MagicLeapInputs.inputactions.meta b/Runtime/APIs/MLInput/MLInput.inputactions.meta
similarity index 100%
rename from Runtime/Deprecated/MagicLeapInputs.inputactions.meta
rename to Runtime/APIs/MLInput/MLInput.inputactions.meta
diff --git a/Runtime/Deprecated/MagicLeapInputs.cs b/Runtime/APIs/MLInput/MagicLeapInputs.cs
similarity index 72%
rename from Runtime/Deprecated/MagicLeapInputs.cs
rename to Runtime/APIs/MLInput/MagicLeapInputs.cs
index b02c10b..2ac9adc 100644
--- a/Runtime/Deprecated/MagicLeapInputs.cs
+++ b/Runtime/APIs/MLInput/MagicLeapInputs.cs
@@ -1,8 +1,8 @@
//------------------------------------------------------------------------------
//
// This code was auto-generated by com.unity.inputsystem:InputActionCodeGenerator
-// version 1.4.4
-// from Packages/com.magicleap.unitysdk/Runtime/Subsystems/Input/MagicLeapInputs.inputactions
+// version 1.7.0
+// from Packages/com.magicleap.unitysdk/Runtime/APIs/MLInput/MLInput.inputactions
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
@@ -15,13 +15,13 @@
using UnityEngine.InputSystem;
using UnityEngine.InputSystem.Utilities;
-public partial class @MagicLeapInputs : IInputActionCollection2, IDisposable
+public partial class @MagicLeapInputs: IInputActionCollection2, IDisposable
{
public InputActionAsset asset { get; }
public @MagicLeapInputs()
{
asset = InputActionAsset.FromJson(@"{
- ""name"": ""MagicLeapInputs"",
+ ""name"": ""MLInput"",
""maps"": [
{
""name"": ""Controller"",
@@ -553,7 +553,7 @@ public @MagicLeapInputs()
{
""name"": ""first"",
""id"": ""5635980f-ce3d-410d-a1c2-7bdff3040a39"",
- ""path"": ""/pointerPosition"",
+ ""path"": ""/pointer/position"",
""interactions"": """",
""processors"": """",
""groups"": """",
@@ -586,7 +586,7 @@ public @MagicLeapInputs()
{
""name"": ""first"",
""id"": ""286ff085-2f65-4aec-9acc-85eeaca37f79"",
- ""path"": ""/pointerRotation"",
+ ""path"": ""/pointer/rotation"",
""interactions"": """",
""processors"": """",
""groups"": """",
@@ -700,24 +700,6 @@ public @MagicLeapInputs()
""processors"": """",
""interactions"": """",
""initialStateCheck"": true
- },
- {
- ""name"": ""Position"",
- ""type"": ""Value"",
- ""id"": ""c7e35463-0720-42cc-bd53-6833954f3bdb"",
- ""expectedControlType"": ""Vector3"",
- ""processors"": """",
- ""interactions"": """",
- ""initialStateCheck"": true
- },
- {
- ""name"": ""Rotation"",
- ""type"": ""Value"",
- ""id"": ""abacd085-1f61-495a-87a4-cb9824ff0ccb"",
- ""expectedControlType"": ""Quaternion"",
- ""processors"": """",
- ""interactions"": """",
- ""initialStateCheck"": true
}
],
""bindings"": [
@@ -731,28 +713,6 @@ public @MagicLeapInputs()
""action"": ""Data"",
""isComposite"": false,
""isPartOfComposite"": false
- },
- {
- ""name"": """",
- ""id"": ""acc1ab74-2315-4f52-ad02-61d40239c614"",
- ""path"": ""/pose/position"",
- ""interactions"": """",
- ""processors"": """",
- ""groups"": """",
- ""action"": ""Position"",
- ""isComposite"": false,
- ""isPartOfComposite"": false
- },
- {
- ""name"": """",
- ""id"": ""986d5b69-5403-41ae-940d-286ea438f41f"",
- ""path"": ""/pose/rotation"",
- ""interactions"": """",
- ""processors"": """",
- ""groups"": """",
- ""action"": ""Rotation"",
- ""isComposite"": false,
- ""isPartOfComposite"": false
}
]
},
@@ -956,8 +916,6 @@ public @MagicLeapInputs()
// Eyes
m_Eyes = asset.FindActionMap("Eyes", throwIfNotFound: true);
m_Eyes_Data = m_Eyes.FindAction("Data", throwIfNotFound: true);
- m_Eyes_Position = m_Eyes.FindAction("Position", throwIfNotFound: true);
- m_Eyes_Rotation = m_Eyes.FindAction("Rotation", throwIfNotFound: true);
// LeftHand
m_LeftHand = asset.FindActionMap("LeftHand", throwIfNotFound: true);
m_LeftHand_Position = m_LeftHand.FindAction("Position", throwIfNotFound: true);
@@ -1013,12 +971,14 @@ public void Disable()
{
asset.Disable();
}
+
public IEnumerable bindings => asset.bindings;
public InputAction FindAction(string actionNameOrId, bool throwIfNotFound = false)
{
return asset.FindAction(actionNameOrId, throwIfNotFound);
}
+
public int FindBinding(InputBinding bindingMask, out InputAction action)
{
return asset.FindBinding(bindingMask, out action);
@@ -1026,7 +986,7 @@ public int FindBinding(InputBinding bindingMask, out InputAction action)
// Controller
private readonly InputActionMap m_Controller;
- private IControllerActions m_ControllerActionsCallbackInterface;
+ private List m_ControllerActionsCallbackInterfaces = new List();
private readonly InputAction m_Controller_Position;
private readonly InputAction m_Controller_Rotation;
private readonly InputAction m_Controller_Velocity;
@@ -1076,142 +1036,155 @@ public struct ControllerActions
public void Disable() { Get().Disable(); }
public bool enabled => Get().enabled;
public static implicit operator InputActionMap(ControllerActions set) { return set.Get(); }
+ public void AddCallbacks(IControllerActions instance)
+ {
+ if (instance == null || m_Wrapper.m_ControllerActionsCallbackInterfaces.Contains(instance)) return;
+ m_Wrapper.m_ControllerActionsCallbackInterfaces.Add(instance);
+ @Position.started += instance.OnPosition;
+ @Position.performed += instance.OnPosition;
+ @Position.canceled += instance.OnPosition;
+ @Rotation.started += instance.OnRotation;
+ @Rotation.performed += instance.OnRotation;
+ @Rotation.canceled += instance.OnRotation;
+ @Velocity.started += instance.OnVelocity;
+ @Velocity.performed += instance.OnVelocity;
+ @Velocity.canceled += instance.OnVelocity;
+ @AngularVelocity.started += instance.OnAngularVelocity;
+ @AngularVelocity.performed += instance.OnAngularVelocity;
+ @AngularVelocity.canceled += instance.OnAngularVelocity;
+ @Acceleration.started += instance.OnAcceleration;
+ @Acceleration.performed += instance.OnAcceleration;
+ @Acceleration.canceled += instance.OnAcceleration;
+ @AngularAcceleration.started += instance.OnAngularAcceleration;
+ @AngularAcceleration.performed += instance.OnAngularAcceleration;
+ @AngularAcceleration.canceled += instance.OnAngularAcceleration;
+ @Menu.started += instance.OnMenu;
+ @Menu.performed += instance.OnMenu;
+ @Menu.canceled += instance.OnMenu;
+ @Bumper.started += instance.OnBumper;
+ @Bumper.performed += instance.OnBumper;
+ @Bumper.canceled += instance.OnBumper;
+ @TriggerButton.started += instance.OnTriggerButton;
+ @TriggerButton.performed += instance.OnTriggerButton;
+ @TriggerButton.canceled += instance.OnTriggerButton;
+ @Trigger.started += instance.OnTrigger;
+ @Trigger.performed += instance.OnTrigger;
+ @Trigger.canceled += instance.OnTrigger;
+ @TriggerHold.started += instance.OnTriggerHold;
+ @TriggerHold.performed += instance.OnTriggerHold;
+ @TriggerHold.canceled += instance.OnTriggerHold;
+ @TouchpadPosition.started += instance.OnTouchpadPosition;
+ @TouchpadPosition.performed += instance.OnTouchpadPosition;
+ @TouchpadPosition.canceled += instance.OnTouchpadPosition;
+ @TouchpadClick.started += instance.OnTouchpadClick;
+ @TouchpadClick.performed += instance.OnTouchpadClick;
+ @TouchpadClick.canceled += instance.OnTouchpadClick;
+ @TouchpadTouch.started += instance.OnTouchpadTouch;
+ @TouchpadTouch.performed += instance.OnTouchpadTouch;
+ @TouchpadTouch.canceled += instance.OnTouchpadTouch;
+ @TouchpadForce.started += instance.OnTouchpadForce;
+ @TouchpadForce.performed += instance.OnTouchpadForce;
+ @TouchpadForce.canceled += instance.OnTouchpadForce;
+ @IsTracked.started += instance.OnIsTracked;
+ @IsTracked.performed += instance.OnIsTracked;
+ @IsTracked.canceled += instance.OnIsTracked;
+ @Haptic.started += instance.OnHaptic;
+ @Haptic.performed += instance.OnHaptic;
+ @Haptic.canceled += instance.OnHaptic;
+ @PointerPosition.started += instance.OnPointerPosition;
+ @PointerPosition.performed += instance.OnPointerPosition;
+ @PointerPosition.canceled += instance.OnPointerPosition;
+ @PointerRotation.started += instance.OnPointerRotation;
+ @PointerRotation.performed += instance.OnPointerRotation;
+ @PointerRotation.canceled += instance.OnPointerRotation;
+ @TrackingState.started += instance.OnTrackingState;
+ @TrackingState.performed += instance.OnTrackingState;
+ @TrackingState.canceled += instance.OnTrackingState;
+ }
+
+ private void UnregisterCallbacks(IControllerActions instance)
+ {
+ @Position.started -= instance.OnPosition;
+ @Position.performed -= instance.OnPosition;
+ @Position.canceled -= instance.OnPosition;
+ @Rotation.started -= instance.OnRotation;
+ @Rotation.performed -= instance.OnRotation;
+ @Rotation.canceled -= instance.OnRotation;
+ @Velocity.started -= instance.OnVelocity;
+ @Velocity.performed -= instance.OnVelocity;
+ @Velocity.canceled -= instance.OnVelocity;
+ @AngularVelocity.started -= instance.OnAngularVelocity;
+ @AngularVelocity.performed -= instance.OnAngularVelocity;
+ @AngularVelocity.canceled -= instance.OnAngularVelocity;
+ @Acceleration.started -= instance.OnAcceleration;
+ @Acceleration.performed -= instance.OnAcceleration;
+ @Acceleration.canceled -= instance.OnAcceleration;
+ @AngularAcceleration.started -= instance.OnAngularAcceleration;
+ @AngularAcceleration.performed -= instance.OnAngularAcceleration;
+ @AngularAcceleration.canceled -= instance.OnAngularAcceleration;
+ @Menu.started -= instance.OnMenu;
+ @Menu.performed -= instance.OnMenu;
+ @Menu.canceled -= instance.OnMenu;
+ @Bumper.started -= instance.OnBumper;
+ @Bumper.performed -= instance.OnBumper;
+ @Bumper.canceled -= instance.OnBumper;
+ @TriggerButton.started -= instance.OnTriggerButton;
+ @TriggerButton.performed -= instance.OnTriggerButton;
+ @TriggerButton.canceled -= instance.OnTriggerButton;
+ @Trigger.started -= instance.OnTrigger;
+ @Trigger.performed -= instance.OnTrigger;
+ @Trigger.canceled -= instance.OnTrigger;
+ @TriggerHold.started -= instance.OnTriggerHold;
+ @TriggerHold.performed -= instance.OnTriggerHold;
+ @TriggerHold.canceled -= instance.OnTriggerHold;
+ @TouchpadPosition.started -= instance.OnTouchpadPosition;
+ @TouchpadPosition.performed -= instance.OnTouchpadPosition;
+ @TouchpadPosition.canceled -= instance.OnTouchpadPosition;
+ @TouchpadClick.started -= instance.OnTouchpadClick;
+ @TouchpadClick.performed -= instance.OnTouchpadClick;
+ @TouchpadClick.canceled -= instance.OnTouchpadClick;
+ @TouchpadTouch.started -= instance.OnTouchpadTouch;
+ @TouchpadTouch.performed -= instance.OnTouchpadTouch;
+ @TouchpadTouch.canceled -= instance.OnTouchpadTouch;
+ @TouchpadForce.started -= instance.OnTouchpadForce;
+ @TouchpadForce.performed -= instance.OnTouchpadForce;
+ @TouchpadForce.canceled -= instance.OnTouchpadForce;
+ @IsTracked.started -= instance.OnIsTracked;
+ @IsTracked.performed -= instance.OnIsTracked;
+ @IsTracked.canceled -= instance.OnIsTracked;
+ @Haptic.started -= instance.OnHaptic;
+ @Haptic.performed -= instance.OnHaptic;
+ @Haptic.canceled -= instance.OnHaptic;
+ @PointerPosition.started -= instance.OnPointerPosition;
+ @PointerPosition.performed -= instance.OnPointerPosition;
+ @PointerPosition.canceled -= instance.OnPointerPosition;
+ @PointerRotation.started -= instance.OnPointerRotation;
+ @PointerRotation.performed -= instance.OnPointerRotation;
+ @PointerRotation.canceled -= instance.OnPointerRotation;
+ @TrackingState.started -= instance.OnTrackingState;
+ @TrackingState.performed -= instance.OnTrackingState;
+ @TrackingState.canceled -= instance.OnTrackingState;
+ }
+
+ public void RemoveCallbacks(IControllerActions instance)
+ {
+ if (m_Wrapper.m_ControllerActionsCallbackInterfaces.Remove(instance))
+ UnregisterCallbacks(instance);
+ }
+
public void SetCallbacks(IControllerActions instance)
{
- if (m_Wrapper.m_ControllerActionsCallbackInterface != null)
- {
- @Position.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPosition;
- @Position.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPosition;
- @Position.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPosition;
- @Rotation.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnRotation;
- @Rotation.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnRotation;
- @Rotation.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnRotation;
- @Velocity.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnVelocity;
- @Velocity.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnVelocity;
- @Velocity.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnVelocity;
- @AngularVelocity.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAngularVelocity;
- @AngularVelocity.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAngularVelocity;
- @AngularVelocity.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAngularVelocity;
- @Acceleration.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAcceleration;
- @Acceleration.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAcceleration;
- @Acceleration.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAcceleration;
- @AngularAcceleration.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAngularAcceleration;
- @AngularAcceleration.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAngularAcceleration;
- @AngularAcceleration.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnAngularAcceleration;
- @Menu.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnMenu;
- @Menu.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnMenu;
- @Menu.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnMenu;
- @Bumper.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnBumper;
- @Bumper.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnBumper;
- @Bumper.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnBumper;
- @TriggerButton.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTriggerButton;
- @TriggerButton.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTriggerButton;
- @TriggerButton.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTriggerButton;
- @Trigger.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTrigger;
- @Trigger.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTrigger;
- @Trigger.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTrigger;
- @TriggerHold.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTriggerHold;
- @TriggerHold.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTriggerHold;
- @TriggerHold.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTriggerHold;
- @TouchpadPosition.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadPosition;
- @TouchpadPosition.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadPosition;
- @TouchpadPosition.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadPosition;
- @TouchpadClick.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadClick;
- @TouchpadClick.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadClick;
- @TouchpadClick.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadClick;
- @TouchpadTouch.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadTouch;
- @TouchpadTouch.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadTouch;
- @TouchpadTouch.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadTouch;
- @TouchpadForce.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadForce;
- @TouchpadForce.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadForce;
- @TouchpadForce.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTouchpadForce;
- @IsTracked.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnIsTracked;
- @IsTracked.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnIsTracked;
- @IsTracked.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnIsTracked;
- @Haptic.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnHaptic;
- @Haptic.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnHaptic;
- @Haptic.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnHaptic;
- @PointerPosition.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPointerPosition;
- @PointerPosition.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPointerPosition;
- @PointerPosition.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPointerPosition;
- @PointerRotation.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPointerRotation;
- @PointerRotation.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPointerRotation;
- @PointerRotation.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnPointerRotation;
- @TrackingState.started -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTrackingState;
- @TrackingState.performed -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTrackingState;
- @TrackingState.canceled -= m_Wrapper.m_ControllerActionsCallbackInterface.OnTrackingState;
- }
- m_Wrapper.m_ControllerActionsCallbackInterface = instance;
- if (instance != null)
- {
- @Position.started += instance.OnPosition;
- @Position.performed += instance.OnPosition;
- @Position.canceled += instance.OnPosition;
- @Rotation.started += instance.OnRotation;
- @Rotation.performed += instance.OnRotation;
- @Rotation.canceled += instance.OnRotation;
- @Velocity.started += instance.OnVelocity;
- @Velocity.performed += instance.OnVelocity;
- @Velocity.canceled += instance.OnVelocity;
- @AngularVelocity.started += instance.OnAngularVelocity;
- @AngularVelocity.performed += instance.OnAngularVelocity;
- @AngularVelocity.canceled += instance.OnAngularVelocity;
- @Acceleration.started += instance.OnAcceleration;
- @Acceleration.performed += instance.OnAcceleration;
- @Acceleration.canceled += instance.OnAcceleration;
- @AngularAcceleration.started += instance.OnAngularAcceleration;
- @AngularAcceleration.performed += instance.OnAngularAcceleration;
- @AngularAcceleration.canceled += instance.OnAngularAcceleration;
- @Menu.started += instance.OnMenu;
- @Menu.performed += instance.OnMenu;
- @Menu.canceled += instance.OnMenu;
- @Bumper.started += instance.OnBumper;
- @Bumper.performed += instance.OnBumper;
- @Bumper.canceled += instance.OnBumper;
- @TriggerButton.started += instance.OnTriggerButton;
- @TriggerButton.performed += instance.OnTriggerButton;
- @TriggerButton.canceled += instance.OnTriggerButton;
- @Trigger.started += instance.OnTrigger;
- @Trigger.performed += instance.OnTrigger;
- @Trigger.canceled += instance.OnTrigger;
- @TriggerHold.started += instance.OnTriggerHold;
- @TriggerHold.performed += instance.OnTriggerHold;
- @TriggerHold.canceled += instance.OnTriggerHold;
- @TouchpadPosition.started += instance.OnTouchpadPosition;
- @TouchpadPosition.performed += instance.OnTouchpadPosition;
- @TouchpadPosition.canceled += instance.OnTouchpadPosition;
- @TouchpadClick.started += instance.OnTouchpadClick;
- @TouchpadClick.performed += instance.OnTouchpadClick;
- @TouchpadClick.canceled += instance.OnTouchpadClick;
- @TouchpadTouch.started += instance.OnTouchpadTouch;
- @TouchpadTouch.performed += instance.OnTouchpadTouch;
- @TouchpadTouch.canceled += instance.OnTouchpadTouch;
- @TouchpadForce.started += instance.OnTouchpadForce;
- @TouchpadForce.performed += instance.OnTouchpadForce;
- @TouchpadForce.canceled += instance.OnTouchpadForce;
- @IsTracked.started += instance.OnIsTracked;
- @IsTracked.performed += instance.OnIsTracked;
- @IsTracked.canceled += instance.OnIsTracked;
- @Haptic.started += instance.OnHaptic;
- @Haptic.performed += instance.OnHaptic;
- @Haptic.canceled += instance.OnHaptic;
- @PointerPosition.started += instance.OnPointerPosition;
- @PointerPosition.performed += instance.OnPointerPosition;
- @PointerPosition.canceled += instance.OnPointerPosition;
- @PointerRotation.started += instance.OnPointerRotation;
- @PointerRotation.performed += instance.OnPointerRotation;
- @PointerRotation.canceled += instance.OnPointerRotation;
- @TrackingState.started += instance.OnTrackingState;
- @TrackingState.performed += instance.OnTrackingState;
- @TrackingState.canceled += instance.OnTrackingState;
- }
+ foreach (var item in m_Wrapper.m_ControllerActionsCallbackInterfaces)
+ UnregisterCallbacks(item);
+ m_Wrapper.m_ControllerActionsCallbackInterfaces.Clear();
+ AddCallbacks(instance);
}
}
public ControllerActions @Controller => new ControllerActions(this);
// HMD
private readonly InputActionMap m_HMD;
- private IHMDActions m_HMDActionsCallbackInterface;
+ private List m_HMDActionsCallbackInterfaces = new List();
private readonly InputAction m_HMD_Position;
private readonly InputAction m_HMD_Rotation;
public struct HMDActions
@@ -1225,83 +1198,93 @@ public struct HMDActions
public void Disable() { Get().Disable(); }
public bool enabled => Get().enabled;
public static implicit operator InputActionMap(HMDActions set) { return set.Get(); }
+ public void AddCallbacks(IHMDActions instance)
+ {
+ if (instance == null || m_Wrapper.m_HMDActionsCallbackInterfaces.Contains(instance)) return;
+ m_Wrapper.m_HMDActionsCallbackInterfaces.Add(instance);
+ @Position.started += instance.OnPosition;
+ @Position.performed += instance.OnPosition;
+ @Position.canceled += instance.OnPosition;
+ @Rotation.started += instance.OnRotation;
+ @Rotation.performed += instance.OnRotation;
+ @Rotation.canceled += instance.OnRotation;
+ }
+
+ private void UnregisterCallbacks(IHMDActions instance)
+ {
+ @Position.started -= instance.OnPosition;
+ @Position.performed -= instance.OnPosition;
+ @Position.canceled -= instance.OnPosition;
+ @Rotation.started -= instance.OnRotation;
+ @Rotation.performed -= instance.OnRotation;
+ @Rotation.canceled -= instance.OnRotation;
+ }
+
+ public void RemoveCallbacks(IHMDActions instance)
+ {
+ if (m_Wrapper.m_HMDActionsCallbackInterfaces.Remove(instance))
+ UnregisterCallbacks(instance);
+ }
+
public void SetCallbacks(IHMDActions instance)
{
- if (m_Wrapper.m_HMDActionsCallbackInterface != null)
- {
- @Position.started -= m_Wrapper.m_HMDActionsCallbackInterface.OnPosition;
- @Position.performed -= m_Wrapper.m_HMDActionsCallbackInterface.OnPosition;
- @Position.canceled -= m_Wrapper.m_HMDActionsCallbackInterface.OnPosition;
- @Rotation.started -= m_Wrapper.m_HMDActionsCallbackInterface.OnRotation;
- @Rotation.performed -= m_Wrapper.m_HMDActionsCallbackInterface.OnRotation;
- @Rotation.canceled -= m_Wrapper.m_HMDActionsCallbackInterface.OnRotation;
- }
- m_Wrapper.m_HMDActionsCallbackInterface = instance;
- if (instance != null)
- {
- @Position.started += instance.OnPosition;
- @Position.performed += instance.OnPosition;
- @Position.canceled += instance.OnPosition;
- @Rotation.started += instance.OnRotation;
- @Rotation.performed += instance.OnRotation;
- @Rotation.canceled += instance.OnRotation;
- }
+ foreach (var item in m_Wrapper.m_HMDActionsCallbackInterfaces)
+ UnregisterCallbacks(item);
+ m_Wrapper.m_HMDActionsCallbackInterfaces.Clear();
+ AddCallbacks(instance);
}
}
public HMDActions @HMD => new HMDActions(this);
// Eyes
private readonly InputActionMap m_Eyes;
- private IEyesActions m_EyesActionsCallbackInterface;
+ private List m_EyesActionsCallbackInterfaces = new List();
private readonly InputAction m_Eyes_Data;
- private readonly InputAction m_Eyes_Position;
- private readonly InputAction m_Eyes_Rotation;
public struct EyesActions
{
private @MagicLeapInputs m_Wrapper;
public EyesActions(@MagicLeapInputs wrapper) { m_Wrapper = wrapper; }
public InputAction @Data => m_Wrapper.m_Eyes_Data;
- public InputAction @Position => m_Wrapper.m_Eyes_Position;
- public InputAction @Rotation => m_Wrapper.m_Eyes_Rotation;
public InputActionMap Get() { return m_Wrapper.m_Eyes; }
public void Enable() { Get().Enable(); }
public void Disable() { Get().Disable(); }
public bool enabled => Get().enabled;
public static implicit operator InputActionMap(EyesActions set) { return set.Get(); }
+ public void AddCallbacks(IEyesActions instance)
+ {
+ if (instance == null || m_Wrapper.m_EyesActionsCallbackInterfaces.Contains(instance)) return;
+ m_Wrapper.m_EyesActionsCallbackInterfaces.Add(instance);
+ @Data.started += instance.OnData;
+ @Data.performed += instance.OnData;
+ @Data.canceled += instance.OnData;
+ }
+
+ private void UnregisterCallbacks(IEyesActions instance)
+ {
+ @Data.started -= instance.OnData;
+ @Data.performed -= instance.OnData;
+ @Data.canceled -= instance.OnData;
+ }
+
+ public void RemoveCallbacks(IEyesActions instance)
+ {
+ if (m_Wrapper.m_EyesActionsCallbackInterfaces.Remove(instance))
+ UnregisterCallbacks(instance);
+ }
+
public void SetCallbacks(IEyesActions instance)
{
- if (m_Wrapper.m_EyesActionsCallbackInterface != null)
- {
- @Data.started -= m_Wrapper.m_EyesActionsCallbackInterface.OnData;
- @Data.performed -= m_Wrapper.m_EyesActionsCallbackInterface.OnData;
- @Data.canceled -= m_Wrapper.m_EyesActionsCallbackInterface.OnData;
- @Position.started -= m_Wrapper.m_EyesActionsCallbackInterface.OnPosition;
- @Position.performed -= m_Wrapper.m_EyesActionsCallbackInterface.OnPosition;
- @Position.canceled -= m_Wrapper.m_EyesActionsCallbackInterface.OnPosition;
- @Rotation.started -= m_Wrapper.m_EyesActionsCallbackInterface.OnRotation;
- @Rotation.performed -= m_Wrapper.m_EyesActionsCallbackInterface.OnRotation;
- @Rotation.canceled -= m_Wrapper.m_EyesActionsCallbackInterface.OnRotation;
- }
- m_Wrapper.m_EyesActionsCallbackInterface = instance;
- if (instance != null)
- {
- @Data.started += instance.OnData;
- @Data.performed += instance.OnData;
- @Data.canceled += instance.OnData;
- @Position.started += instance.OnPosition;
- @Position.performed += instance.OnPosition;
- @Position.canceled += instance.OnPosition;
- @Rotation.started += instance.OnRotation;
- @Rotation.performed += instance.OnRotation;
- @Rotation.canceled += instance.OnRotation;
- }
+ foreach (var item in m_Wrapper.m_EyesActionsCallbackInterfaces)
+ UnregisterCallbacks(item);
+ m_Wrapper.m_EyesActionsCallbackInterfaces.Clear();
+ AddCallbacks(instance);
}
}
public EyesActions @Eyes => new EyesActions(this);
// LeftHand
private readonly InputActionMap m_LeftHand;
- private ILeftHandActions m_LeftHandActionsCallbackInterface;
+ private List m_LeftHandActionsCallbackInterfaces = new List();
private readonly InputAction m_LeftHand_Position;
private readonly InputAction m_LeftHand_Rotation;
private readonly InputAction m_LeftHand_Keypose;
@@ -1317,40 +1300,53 @@ public struct LeftHandActions
public void Disable() { Get().Disable(); }
public bool enabled => Get().enabled;
public static implicit operator InputActionMap(LeftHandActions set) { return set.Get(); }
+ public void AddCallbacks(ILeftHandActions instance)
+ {
+ if (instance == null || m_Wrapper.m_LeftHandActionsCallbackInterfaces.Contains(instance)) return;
+ m_Wrapper.m_LeftHandActionsCallbackInterfaces.Add(instance);
+ @Position.started += instance.OnPosition;
+ @Position.performed += instance.OnPosition;
+ @Position.canceled += instance.OnPosition;
+ @Rotation.started += instance.OnRotation;
+ @Rotation.performed += instance.OnRotation;
+ @Rotation.canceled += instance.OnRotation;
+ @Keypose.started += instance.OnKeypose;
+ @Keypose.performed += instance.OnKeypose;
+ @Keypose.canceled += instance.OnKeypose;
+ }
+
+ private void UnregisterCallbacks(ILeftHandActions instance)
+ {
+ @Position.started -= instance.OnPosition;
+ @Position.performed -= instance.OnPosition;
+ @Position.canceled -= instance.OnPosition;
+ @Rotation.started -= instance.OnRotation;
+ @Rotation.performed -= instance.OnRotation;
+ @Rotation.canceled -= instance.OnRotation;
+ @Keypose.started -= instance.OnKeypose;
+ @Keypose.performed -= instance.OnKeypose;
+ @Keypose.canceled -= instance.OnKeypose;
+ }
+
+ public void RemoveCallbacks(ILeftHandActions instance)
+ {
+ if (m_Wrapper.m_LeftHandActionsCallbackInterfaces.Remove(instance))
+ UnregisterCallbacks(instance);
+ }
+
public void SetCallbacks(ILeftHandActions instance)
{
- if (m_Wrapper.m_LeftHandActionsCallbackInterface != null)
- {
- @Position.started -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnPosition;
- @Position.performed -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnPosition;
- @Position.canceled -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnPosition;
- @Rotation.started -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnRotation;
- @Rotation.performed -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnRotation;
- @Rotation.canceled -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnRotation;
- @Keypose.started -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnKeypose;
- @Keypose.performed -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnKeypose;
- @Keypose.canceled -= m_Wrapper.m_LeftHandActionsCallbackInterface.OnKeypose;
- }
- m_Wrapper.m_LeftHandActionsCallbackInterface = instance;
- if (instance != null)
- {
- @Position.started += instance.OnPosition;
- @Position.performed += instance.OnPosition;
- @Position.canceled += instance.OnPosition;
- @Rotation.started += instance.OnRotation;
- @Rotation.performed += instance.OnRotation;
- @Rotation.canceled += instance.OnRotation;
- @Keypose.started += instance.OnKeypose;
- @Keypose.performed += instance.OnKeypose;
- @Keypose.canceled += instance.OnKeypose;
- }
+ foreach (var item in m_Wrapper.m_LeftHandActionsCallbackInterfaces)
+ UnregisterCallbacks(item);
+ m_Wrapper.m_LeftHandActionsCallbackInterfaces.Clear();
+ AddCallbacks(instance);
}
}
public LeftHandActions @LeftHand => new LeftHandActions(this);
// RightHand
private readonly InputActionMap m_RightHand;
- private IRightHandActions m_RightHandActionsCallbackInterface;
+ private List m_RightHandActionsCallbackInterfaces = new List();
private readonly InputAction m_RightHand_Position;
private readonly InputAction m_RightHand_Rotation;
private readonly InputAction m_RightHand_Keypose;
@@ -1366,33 +1362,46 @@ public struct RightHandActions
public void Disable() { Get().Disable(); }
public bool enabled => Get().enabled;
public static implicit operator InputActionMap(RightHandActions set) { return set.Get(); }
+ public void AddCallbacks(IRightHandActions instance)
+ {
+ if (instance == null || m_Wrapper.m_RightHandActionsCallbackInterfaces.Contains(instance)) return;
+ m_Wrapper.m_RightHandActionsCallbackInterfaces.Add(instance);
+ @Position.started += instance.OnPosition;
+ @Position.performed += instance.OnPosition;
+ @Position.canceled += instance.OnPosition;
+ @Rotation.started += instance.OnRotation;
+ @Rotation.performed += instance.OnRotation;
+ @Rotation.canceled += instance.OnRotation;
+ @Keypose.started += instance.OnKeypose;
+ @Keypose.performed += instance.OnKeypose;
+ @Keypose.canceled += instance.OnKeypose;
+ }
+
+ private void UnregisterCallbacks(IRightHandActions instance)
+ {
+ @Position.started -= instance.OnPosition;
+ @Position.performed -= instance.OnPosition;
+ @Position.canceled -= instance.OnPosition;
+ @Rotation.started -= instance.OnRotation;
+ @Rotation.performed -= instance.OnRotation;
+ @Rotation.canceled -= instance.OnRotation;
+ @Keypose.started -= instance.OnKeypose;
+ @Keypose.performed -= instance.OnKeypose;
+ @Keypose.canceled -= instance.OnKeypose;
+ }
+
+ public void RemoveCallbacks(IRightHandActions instance)
+ {
+ if (m_Wrapper.m_RightHandActionsCallbackInterfaces.Remove(instance))
+ UnregisterCallbacks(instance);
+ }
+
public void SetCallbacks(IRightHandActions instance)
{
- if (m_Wrapper.m_RightHandActionsCallbackInterface != null)
- {
- @Position.started -= m_Wrapper.m_RightHandActionsCallbackInterface.OnPosition;
- @Position.performed -= m_Wrapper.m_RightHandActionsCallbackInterface.OnPosition;
- @Position.canceled -= m_Wrapper.m_RightHandActionsCallbackInterface.OnPosition;
- @Rotation.started -= m_Wrapper.m_RightHandActionsCallbackInterface.OnRotation;
- @Rotation.performed -= m_Wrapper.m_RightHandActionsCallbackInterface.OnRotation;
- @Rotation.canceled -= m_Wrapper.m_RightHandActionsCallbackInterface.OnRotation;
- @Keypose.started -= m_Wrapper.m_RightHandActionsCallbackInterface.OnKeypose;
- @Keypose.performed -= m_Wrapper.m_RightHandActionsCallbackInterface.OnKeypose;
- @Keypose.canceled -= m_Wrapper.m_RightHandActionsCallbackInterface.OnKeypose;
- }
- m_Wrapper.m_RightHandActionsCallbackInterface = instance;
- if (instance != null)
- {
- @Position.started += instance.OnPosition;
- @Position.performed += instance.OnPosition;
- @Position.canceled += instance.OnPosition;
- @Rotation.started += instance.OnRotation;
- @Rotation.performed += instance.OnRotation;
- @Rotation.canceled += instance.OnRotation;
- @Keypose.started += instance.OnKeypose;
- @Keypose.performed += instance.OnKeypose;
- @Keypose.canceled += instance.OnKeypose;
- }
+ foreach (var item in m_Wrapper.m_RightHandActionsCallbackInterfaces)
+ UnregisterCallbacks(item);
+ m_Wrapper.m_RightHandActionsCallbackInterfaces.Clear();
+ AddCallbacks(instance);
}
}
public RightHandActions @RightHand => new RightHandActions(this);
@@ -1436,8 +1445,6 @@ public interface IHMDActions
public interface IEyesActions
{
void OnData(InputAction.CallbackContext context);
- void OnPosition(InputAction.CallbackContext context);
- void OnRotation(InputAction.CallbackContext context);
}
public interface ILeftHandActions
{
diff --git a/Runtime/Deprecated/MagicLeapInputs.cs.meta b/Runtime/APIs/MLInput/MagicLeapInputs.cs.meta
similarity index 100%
rename from Runtime/Deprecated/MagicLeapInputs.cs.meta
rename to Runtime/APIs/MLInput/MagicLeapInputs.cs.meta
diff --git a/Runtime/APIs/MarkerTracker/MLMarkerTrackerNativeBindings.cs b/Runtime/APIs/MarkerTracker/MLMarkerTrackerNativeBindings.cs
index 1624b58..086f385 100644
--- a/Runtime/APIs/MarkerTracker/MLMarkerTrackerNativeBindings.cs
+++ b/Runtime/APIs/MarkerTracker/MLMarkerTrackerNativeBindings.cs
@@ -495,7 +495,7 @@ public readonly partial struct MLMarkerTrackerSettings
///
public MLMarkerTrackerSettings(TrackerSettings settings)
{
- this.Version = 5;
+ this.Version = 6;
this.EnableMarkerScanning = settings.EnableMarkerScanning;
this.EnabledDetectorTypes = (uint)settings.MarkerTypes;
this.ArucoDicitonary = settings.ArucoDicitonary;
diff --git a/Runtime/APIs/Permissions/MLPermissionsNativeBindings.cs b/Runtime/APIs/Permissions/MLPermissionsNativeBindings.cs
deleted file mode 100644
index d0c9254..0000000
--- a/Runtime/APIs/Permissions/MLPermissionsNativeBindings.cs
+++ /dev/null
@@ -1,79 +0,0 @@
-// %BANNER_BEGIN%
-// ---------------------------------------------------------------------
-// %COPYRIGHT_BEGIN%
-// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
-// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
-// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
-// %COPYRIGHT_END%
-// ---------------------------------------------------------------------
-// %BANNER_END%
-
-namespace UnityEngine.XR.MagicLeap
-{
- using System.Runtime.InteropServices;
-
- public sealed partial class MLPermissions
- {
- ///
- /// Permissions NativeBindings are only used when running in the Editor with Magic Leap App Simulator
- ///
- private class NativeBindings : Native.MagicLeapNativeBindings
- {
- ///
- /// Prevents a default instance of the class from being created.
- ///
- private NativeBindings() { }
-
- ///
- /// Start the ZI permissions-checking subsystem. This function should be called
- /// before any permissions-checking functions are called while running in the Editor.
- ///
- ///
- /// MLResult.Code.Ok if the permission system startup succeeded.
- /// MLResult.Code.UnspecifiedFailure if the permission system failed to startup.
- ///
- [DllImport(MLZIPermissionsDll, CallingConvention = CallingConvention.Cdecl)]
- public static extern MLResult.Code MLZIPermissionsStart();
-
- ///
- /// Stop the ZI permissions system. Call to clean up the ZI permission system at application termination.
- ///
- ///
- /// MLResult.Code.OK if the stop was successful.
- /// MLResult.Code.UnspecifiedFailure if the permissions system failed to stop.
- ///
- [DllImport(MLZIPermissionsDll, CallingConvention = CallingConvention.Cdecl)]
- public static extern MLResult.Code MLZIPermissionsStop();
-
- ///
- /// Queries whther or not the application has been granted the given permission.
- /// see https://developer.android.com/training/permissions/requesting
- ///
- /// The name of the permission to query.
- ///
- /// MLResult.Code.InvalidParam The permissionName parameter is not valid (or null)
- /// MLResult.Code.Ok The permission is granted.
- /// MLResult.Code.PermissionDenied The permission is denied.
- /// MLResult.Code.Pending The permission request has not been resolved.
- /// MLResult.Code.UnspecifiedFailure There was an unknown error.
- ///
- [DllImport(MLZIPermissionsDll, CallingConvention = CallingConvention.Cdecl)]
- public static extern MLResult.Code MLZIPermissionsIsGranted(string permissionName);
-
- ///
- /// Request the given permission asynchronously (non-blocking).
- /// Client code can poll the permission state to see if the permission was granted, denied or still pending.
- /// see https://developer.android.com/training/permissions/requesting
- ///
- /// The name of the permission to request.
- ///
- /// MLResult.Code.InvalidParam The permission_name parameter is not valid (or null).
- /// MLResult.Code.Ok The query was successful.
- /// MLResult.Code.UnspecifiedFailure There was an unknown error.
- ///
- [DllImport(MLZIPermissionsDll, CallingConvention = CallingConvention.Cdecl)]
- public static extern MLResult.Code MLZIPermissionsRequest(string permissionName);
- }
- }
-}
-
diff --git a/Runtime/APIs/Permissions/MLPermissionsNativeBindings.cs.meta b/Runtime/APIs/Permissions/MLPermissionsNativeBindings.cs.meta
deleted file mode 100644
index ecec492..0000000
--- a/Runtime/APIs/Permissions/MLPermissionsNativeBindings.cs.meta
+++ /dev/null
@@ -1,11 +0,0 @@
-fileFormatVersion: 2
-guid: 49f7db0535f69694ea6d370147b8c17f
-MonoImporter:
- externalObjects: {}
- serializedVersion: 2
- defaultReferences: []
- executionOrder: 0
- icon: {instanceID: 0}
- userData:
- assetBundleName:
- assetBundleVariant:
diff --git a/Runtime/Common/NativeRingBuffer.cs b/Runtime/Common/NativeRingBuffer.cs
new file mode 100644
index 0000000..abd19f2
--- /dev/null
+++ b/Runtime/Common/NativeRingBuffer.cs
@@ -0,0 +1,1084 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+// Ring Buffer implementation inspired by https://www.daugaard.org/blog/writing-a-fast-and-versatile-spsc-ring-buffer/
+namespace UnityEngine.XR.MagicLeap
+{
+ using System;
+ using System.Diagnostics;
+ using System.Runtime.CompilerServices;
+ using System.Runtime.InteropServices;
+ using System.Threading;
+ using Unity.Collections;
+ using Unity.Collections.LowLevel.Unsafe;
+ using Unity.Jobs;
+ using UnityEngine.XR.MagicLeap.LowLevel.Unsafe;
+ using Unsafe;
+
+ namespace LowLevel.Unsafe
+ {
+ internal unsafe struct UnsafeRingBuffer : IDisposable
+ {
+ public struct BlockingReader
+ {
+ [NativeDisableUnsafePtrRestriction]
+ private Data* _Data;
+
+ public bool IsCreated => _Data != null;
+
+ internal BlockingReader(void* data)
+ {
+ _Data = (Data*)data;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishRead()
+ {
+ CheckNullAndThrow(_Data);
+ FinishReadUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishReadUnchecked()
+ {
+ _Data->_ReaderShared.StorePosition(ref _Data->_Reader);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* PrepareRead(ulong size, ulong alignment)
+ {
+ CheckNullAndThrow(_Data);
+ return PrepareReadUnchecked(size, alignment);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* PrepareReadUnchecked(ulong size, ulong alignment)
+ {
+ var pos = Align(_Data->_Reader.position, alignment);
+ var end = pos + size;
+ if (end > _Data->_Reader.end)
+ GetBufferSpaceToReadFrom(ref pos, ref end);
+ _Data->_Reader.position = end;
+ return _Data->_Reader.buffer + pos;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* Read(ulong size, ulong alignment)
+ {
+ CheckNullAndThrow(_Data);
+ return ReadUnchecked(size, alignment);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public T Read() where T : unmanaged
+ {
+ CheckNullAndThrow(_Data);
+ return *(T*)ReadUnchecked((ulong)sizeof(T), (ulong)UnsafeUtility.AlignOf());
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* ReadUnchecked(ulong size, ulong alignment)
+ => PrepareReadUnchecked(size, alignment);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public T ReadUnchecked() where T : unmanaged
+ {
+ CheckNullAndThrow(_Data);
+ return *(T*)ReadUnchecked((ulong)sizeof(T), (ulong)UnsafeUtility.AlignOf());
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* ReadArray(ulong elementSize, ulong elementAlignment, ulong count)
+ {
+ CheckNullAndThrow(_Data);
+ return ReadArrayUnchecked(elementSize, elementAlignment, count);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* ReadArrayUnchecked(ulong elementSize, ulong elementAlignment, ulong count)
+ => PrepareReadUnchecked(elementSize * count, elementAlignment);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private void GetBufferSpaceToReadFrom(ref ulong position, ref ulong end)
+ {
+ if (end > _Data->_Reader.size)
+ {
+ end -= position;
+ position = 0;
+ _Data->_Reader.@base += _Data->_Reader.size;
+ }
+ for (;;)
+ {
+ var writerPos = _Data->_WriterShared.LoadPosition();
+ var available = writerPos - _Data->_Reader.@base;
+ // Signed comparison (available can be negative)
+ if ((long)available >= (long)end)
+ {
+ _Data->_Reader.end = Math.Min(available, _Data->_Reader.size);
+ break;
+ }
+ }
+ }
+ }
+
+ public struct BlockingWriter
+ {
+ [NativeDisableUnsafePtrRestriction]
+ private Data* _Data;
+
+ public bool IsCreated => _Data != null;
+
+ internal BlockingWriter(void* data)
+ {
+ _Data = (Data*)data;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishWrite()
+ {
+ CheckNullAndThrow(_Data);
+ FinishWriteUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishWriteUnchecked()
+ {
+ _Data->_WriterShared.StorePosition(ref _Data->_Writer);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* PrepareWrite(ulong size, ulong alignment)
+ {
+ CheckNullAndThrow(_Data);
+ return PrepareWriteUnchecked(size, alignment);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void* PrepareWriteUnchecked(ulong size, ulong alignment)
+ {
+ var pos = Align(_Data->_Writer.position, alignment);
+ var end = pos + size;
+ if (end > _Data->_Writer.end)
+ GetBufferSpaceToWriteTo(ref pos, ref end);
+ _Data->_Writer.position = end;
+ return _Data->_Writer.buffer + pos;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void Write(void* value, ulong size, ulong alignment)
+ {
+ CheckNullAndThrow(_Data);
+ WriteUnchecked(value, size, alignment);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void Write(T* value) where T : unmanaged
+ => Write(value, (ulong)sizeof(T), (ulong)UnsafeUtility.AlignOf());
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void Write(ref T value) where T : unmanaged
+ => Write(UnsafeUtility.AddressOf(ref value), (ulong)sizeof(T), (ulong)UnsafeUtility.AlignOf());
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void WriteUnchecked(void* value, ulong size, ulong alignment)
+ {
+ var dest = PrepareWriteUnchecked(size, alignment);
+ UnsafeUtility.MemCpy(dest, value, (long)size);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void WriteArray(void* values, ulong elementSize, ulong elementAlignment, ulong count)
+ {
+ CheckNullAndThrow(_Data);
+ WriteArrayUnchecked(values, elementSize, elementAlignment, count);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void WriteArrayUnchecked(void* values, ulong elementSize, ulong elementAlignment, ulong count)
+ {
+ var szInBytes = elementSize * count;
+ var dest = PrepareWriteUnchecked(szInBytes, elementAlignment);
+ UnsafeUtility.MemCpy(dest, values, (long)szInBytes);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void WriteArray(T* values, ulong count) where T : unmanaged
+ => WriteArray(values, (ulong)sizeof(T), (ulong)UnsafeUtility.AlignOf(), count);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private void GetBufferSpaceToWriteTo(ref ulong position, ref ulong end)
+ {
+ if (end > _Data->_Writer.size)
+ {
+ end -= position;
+ position = 0;
+ _Data->_Writer.@base += _Data->_Writer.size;
+ }
+ for (;;)
+ {
+ var readerPos = _Data->_ReaderShared.LoadPosition();
+ var available = readerPos - _Data->_Writer.@base + _Data->_Writer.size;
+ // Signed comparison (available can be negative)
+ if ((long)available >= (long)end)
+ {
+ _Data->_Writer.end = Math.Min(available, _Data->_Writer.size);
+ break;
+ }
+ }
+ }
+ }
+
+ private struct Data
+ {
+ public LocalState _Reader;
+ public SharedState _ReaderShared;
+ public LocalState _Writer;
+ public SharedState _WriterShared;
+ public Allocator Allocator;
+ }
+
+ // Using the StructLayout attribute to force the struct to be cache-aligned, to avoid false-sharing.
+ [StructLayout(LayoutKind.Sequential, Size = 64)]
+ private struct LocalState
+ {
+ [NativeDisableUnsafePtrRestriction]
+ public byte* buffer;
+ public ulong position;
+ public ulong end;
+ public ulong @base;
+ public ulong size;
+ }
+
+ public struct NonblockingReader
+ {
+ [NativeDisableUnsafePtrRestriction]
+ private Data* _Data;
+
+ public bool IsCreated => _Data != null;
+
+ internal NonblockingReader(void* data)
+ {
+ _Data = (Data*)data;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishRead()
+ {
+ CheckNullAndThrow(_Data);
+ FinishReadUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishReadUnchecked()
+ {
+ _Data->_ReaderShared.StorePosition(ref _Data->_Reader);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryPrepareRead(ulong size, ulong alignment, out void* outData)
+ {
+ CheckNullAndThrow(_Data);
+ return TryPrepareReadUnchecked(size, alignment, out outData);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryPrepareReadUnchecked(ulong size, ulong alignment, out void* outData)
+ {
+ outData = null;
+ var pos = Align(_Data->_Reader.position, alignment);
+ var end = pos + size;
+ var success = end <= _Data->_Reader.end || TryGetBufferSpaceToReadFromNonBlocking(ref pos, ref end);
+ if (!success)
+ return false;
+
+ _Data->_Reader.position = end;
+ outData = _Data->_Reader.buffer + pos;
+ return true;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryRead(ulong size, ulong alignment, out void* outData)
+ {
+ CheckNullAndThrow(_Data);
+ return TryPrepareReadUnchecked(size, alignment, out outData);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryRead(out T outData) where T : unmanaged
+ {
+ CheckNullAndThrow(_Data);
+ outData = default;
+ var ret = TryPrepareReadUnchecked((ulong)sizeof(T), (ulong)UnsafeUtility.AlignOf(), out var ptr);
+ if (!ret)
+ return false;
+ outData = *(T*)ptr;
+ return true;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryReadArray(ulong elementSize, ulong elementAlignment, ulong elementCount, out void* outData)
+ {
+ CheckNullAndThrow(_Data);
+ return TryPrepareReadUnchecked(elementSize * elementCount, elementAlignment, out outData);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryReadArray(ulong elementCount, Allocator allocator, out NativeArray outData) where T : unmanaged
+ {
+ CheckNullAndThrow(_Data);
+ outData = default;
+ var sizeInBytes = (ulong)sizeof(T) * elementCount;
+ var ret = TryPrepareReadUnchecked(sizeInBytes, (ulong)UnsafeUtility.AlignOf(), out var ptr);
+ if (!ret)
+ return false;
+ outData = new NativeArray((int)elementCount, allocator, NativeArrayOptions.UninitializedMemory);
+ UnsafeUtility.MemCpy(outData.GetUnsafePtr(), ptr, (long)sizeInBytes);
+ return true;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private bool TryGetBufferSpaceToReadFromNonBlocking(ref ulong position, ref ulong end)
+ {
+ ulong @base = _Data->_Reader.@base;
+ if (end > _Data->_Reader.size)
+ {
+ end -= position;
+ position = 0;
+ @base += _Data->_Reader.size;
+ }
+ var writerPos = _Data->_WriterShared.LoadPosition();
+ var available = writerPos - @base;
+
+ // Signed comparison (available can be negative)
+ var success = (long)available >= (long)end;
+ if (!success)
+ return false;
+
+ _Data->_Reader.@base = @base;
+ _Data->_Reader.end = Math.Min(available, _Data->_Reader.size);
+ return true;
+ }
+ }
+
+ public struct NonblockingWriter
+ {
+ [NativeDisableUnsafePtrRestriction]
+ private Data* _Data;
+
+ public bool IsCreated => _Data != null;
+
+ internal NonblockingWriter(void* data)
+ {
+ _Data = (Data*)data;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishWrite()
+ {
+ CheckNullAndThrow(_Data);
+ FinishWriteUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void FinishWriteUnchecked()
+ {
+ _Data->_WriterShared.StorePosition(ref _Data->_Writer);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryPrepareWrite(ulong size, ulong alignment, out void* outData)
+ {
+ CheckNullAndThrow(_Data);
+ return TryPrepareWriteUnchecked(size, alignment, out outData);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryPrepareWriteUnchecked(ulong size, ulong alignment, out void* outData)
+ {
+ outData = null;
+ var pos = Align(_Data->_Writer.position, alignment);
+ var end = pos + size;
+ var success = end <= _Data->_Writer.end || TryGetBufferSpaceToWriteToNonBlocking(ref pos, ref end);
+ if (!success)
+ return false;
+
+ _Data->_Writer.position = end;
+ outData = _Data->_Writer.buffer + pos;
+ return true;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryWrite(void* value, ulong size, ulong alignment)
+ {
+ CheckNullAndThrow(_Data);
+ return TryWriteUnchecked(value, size, alignment);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryWrite(T value) where T : unmanaged
+ {
+ CheckNullAndThrow(_Data);
+ return TryWriteUnchecked(&value, (ulong)sizeof(T), (ulong)UnsafeUtility.AlignOf());
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryWriteArray(void* values, ulong elementSize, ulong elementAlignment, ulong elementCount)
+ {
+ CheckNullAndThrow(_Data);
+ return TryWriteUnchecked(values, elementSize * elementCount, elementAlignment);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryWriteArray(NativeArray array) where T : unmanaged
+ {
+ CheckNullAndThrow(_Data);
+ var sizeInBytes = sizeof(T) * array.Length;
+ if (!TryPrepareWriteUnchecked((ulong)sizeInBytes, (ulong)UnsafeUtility.AlignOf(), out var ptr))
+ return false;
+ UnsafeUtility.MemCpy(ptr, array.GetUnsafeReadOnlyPtr(), sizeInBytes);
+ return true;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public bool TryWriteUnchecked(void* value, ulong size, ulong alignment)
+ {
+ if (!TryPrepareWriteUnchecked(size, alignment, out var ptr))
+ return false;
+ UnsafeUtility.MemCpy(ptr, value, (long)size);
+ return true;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private bool TryGetBufferSpaceToWriteToNonBlocking(ref ulong position, ref ulong end)
+ {
+ ulong @base = _Data->_Writer.@base;
+ if (end > _Data->_Writer.size)
+ {
+ end -= position;
+ position = 0;
+ @base += _Data->_Writer.size;
+ }
+ var readerPos = _Data->_ReaderShared.LoadPosition();
+ var available = readerPos - @base + _Data->_Writer.size;
+
+ // Signed comparison (available can be negative)
+ var success = (long)available >= (long)end;
+ if (!success)
+ return false;
+
+ _Data->_Writer.@base = @base;
+ _Data->_Writer.end = Math.Min(available, _Data->_Writer.size);
+ return true;
+
+ }
+ }
+
+ [StructLayout(LayoutKind.Sequential, Size = 64)]
+ private struct SharedState
+ {
+ private ulong position;
+
+ // in .NET, 64-bit reads and writes are atomic on 64-bit systems.
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public ulong LoadPosition()
+ => position;
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void StorePosition(ref LocalState localState)
+ => position = localState.@base + localState.position;
+ }
+
+ [NativeDisableUnsafePtrRestriction]
+ private Data* _Data;
+
+ public Allocator Allocator => IsCreated ? _Data->Allocator : Allocator.Invalid;
+
+ public bool IsCreated => _Data != null;
+
+ public UnsafeRingBuffer(long size, Allocator allocator)
+ {
+ CheckBufferParametersAndThrow(size, allocator);
+ _Data = UnsafeUtilityEx.CallocTracked(allocator);
+ _Data->Allocator = allocator;
+ InitializeUnchecked(size, allocator);
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public BlockingReader AsBlockingReader()
+ {
+ CheckNullAndThrow(_Data);
+ return AsBlockingReaderUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public BlockingReader AsBlockingReaderUnchecked()
+ => new BlockingReader(_Data);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public BlockingWriter AsBlockingWriter()
+ {
+ CheckNullAndThrow(_Data);
+ return AsBlockingWriterUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public BlockingWriter AsBlockingWriterUnchecked()
+ => new BlockingWriter(_Data);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public NonblockingReader AsNonblockingReader()
+ {
+ CheckNullAndThrow(_Data);
+ return AsNonblockingReaderUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public NonblockingReader AsNonblockingReaderUnchecked()
+ => new NonblockingReader(_Data);
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public NonblockingWriter AsNonblockingWriter()
+ {
+ CheckNullAndThrow(_Data);
+ return AsNonblockingWriterUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public NonblockingWriter AsNonblockingWriterUnchecked()
+ => new NonblockingWriter(_Data);
+
+ public void Dispose()
+ {
+ CheckDisposedAndThrow(_Data);
+
+ Reset();
+ var allocator = _Data->Allocator;
+ UnsafeUtility.FreeTracked(_Data, allocator);
+ _Data = null;
+ }
+
+ public void Initialize(long size, Allocator allocator)
+ {
+ CheckNullAndThrow(_Data);
+ CheckBufferParametersAndThrow(size, allocator);
+ InitializeUnchecked(size, allocator);
+ }
+
+ private void InitializeUnchecked(long size, Allocator allocator)
+ {
+ ResetUnchecked();
+ var buffer = UnsafeUtility.MallocTracked(size, UnsafeUtility.AlignOf(), allocator, 0);
+ _Data->_Reader.buffer = _Data->_Writer.buffer = (byte*)buffer;
+ _Data->_Reader.size = _Data->_Writer.size = (ulong)size;
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void Reset()
+ {
+ CheckNullAndThrow(_Data);
+ ResetUnchecked();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ public void ResetUnchecked()
+ {
+ if (_Data->_Reader.buffer != null)
+ UnsafeUtility.FreeTracked(_Data->_Reader.buffer, _Data->Allocator);
+ _Data->_Reader = _Data->_Writer = new LocalState();
+ _Data->_ReaderShared = _Data->_WriterShared = new SharedState();
+ }
+
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static ulong Align(ulong pos, ulong alignment)
+ => (pos + alignment - 1) & ~(alignment - 1);
+
+ [Conditional("DEVELOPMENT_BUILD"), Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private static void CheckBufferParametersAndThrow(long size, Allocator allocator)
+ {
+ if (!IsPowerofTwo(size))
+ throw new ArgumentException($"{nameof(size)} is not a power of two");
+ if (allocator < Allocator.Temp)
+ throw new ArgumentException($"{nameof(allocator)} is not a valid allocator");
+ }
+
+ [Conditional("DEVELOPMENT_BUILD"), Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private static void CheckDisposedAndThrow(Data* data)
+ {
+ if (data == null)
+ throw new ObjectDisposedException("RingBuffer has already been disposed");
+ }
+
+ [Conditional("DEVELOPMENT_BUILD"), Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private static void CheckNullAndThrow(Data* data)
+ {
+ if (data == null)
+ throw new NullReferenceException("RingBuffer is not properly initialized");
+ }
+
+ // Function to check if x is power of 2
+ [MethodImpl(MethodImplOptions.AggressiveInlining)]
+ private static bool IsPowerofTwo(long n)
+ {
+ if (n == 0)
+ return false;
+ if ((n & (~(n - 1))) == n)
+ return true;
+ return false;
+ }
+ }
+ }
+
+ internal unsafe struct NativeRingBuffer : IDisposable
+ {
+ [NativeContainer]
+ public struct BlockingReader : IDisposable
+ {
+ private UnsafeRingBuffer.BlockingReader m_Reader;
+ [NativeDisableUnsafePtrRestriction]
+ private int* m_Count;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ private AtomicSafetyHandle m_Safety;
+#endif
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ internal BlockingReader(UnsafeRingBuffer.BlockingReader reader, int* readerCount, AtomicSafetyHandle safety)
+#else
+ internal BlockingReader(UnsafeRingBuffer.BlockingReader reader, int* readerCount)
+#endif
+ {
+ m_Reader = reader;
+ m_Count = readerCount;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = safety;
+#endif
+ }
+
+ public void Dispose()
+ {
+ CheckSafetyHandle();
+
+ if (m_Count == null)
+ return;
+ Interlocked.Decrement(ref UnsafeUtility.AsRef(m_Count));
+ }
+
+ public JobHandle Dispose(JobHandle depends)
+ {
+ CheckSafetyHandle();
+
+ return new DecrementCountJob { Count = m_Count }.Schedule(depends);
+ }
+
+ public void FinishRead()
+ {
+ CheckSafetyHandle();
+
+ m_Reader.FinishRead();
+ }
+
+ public T Read() where T : unmanaged
+ {
+ CheckSafetyHandle();
+
+ return m_Reader.Read();
+ }
+
+ [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private void CheckSafetyHandle()
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ if (!AtomicSafetyHandle.IsHandleValid(m_Safety))
+ throw new ObjectDisposedException("This container has already been disposed");
+#endif
+ }
+ }
+
+ [NativeContainer]
+ public struct BlockingWriter : IDisposable
+ {
+ private UnsafeRingBuffer.BlockingWriter m_Writer;
+ [NativeDisableUnsafePtrRestriction]
+ private int* m_Count;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ private AtomicSafetyHandle m_Safety;
+#endif
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ internal BlockingWriter(UnsafeRingBuffer.BlockingWriter writer, int* writerCount, AtomicSafetyHandle safety)
+#else
+ internal BlockingWriter(UnsafeRingBuffer.BlockingWriter writer, int* writerCount)
+#endif
+ {
+ m_Writer = writer;
+ m_Count = writerCount;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = safety;
+#endif
+ }
+
+ public void Dispose()
+ {
+ CheckSafetyHandle();
+
+ if (m_Count == null)
+ return;
+ Interlocked.Decrement(ref UnsafeUtility.AsRef(m_Count));
+ }
+
+ public JobHandle Dispose(JobHandle depends)
+ {
+ CheckSafetyHandle();
+
+ return new DecrementCountJob { Count = m_Count }.Schedule(depends);
+ }
+
+ public void FinishWrite()
+ {
+ CheckSafetyHandle();
+
+ m_Writer.FinishWrite();
+ }
+
+ public void Write(T value) where T : unmanaged
+ {
+ CheckSafetyHandle();
+
+ m_Writer.Write(&value);
+ }
+
+ public void WriteRef(ref T value) where T : unmanaged
+ {
+ CheckSafetyHandle();
+
+ m_Writer.Write(ref value);
+ }
+
+ [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private void CheckSafetyHandle()
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ if (!AtomicSafetyHandle.IsHandleValid(m_Safety))
+ throw new ObjectDisposedException("This container has already been disposed");
+#endif
+ }
+ }
+
+ [NativeContainer]
+ public struct NonblockingReader : IDisposable
+ {
+ private UnsafeRingBuffer.NonblockingReader m_Reader;
+ [NativeDisableUnsafePtrRestriction]
+ private int* m_Count;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ private AtomicSafetyHandle m_Safety;
+#endif
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ internal NonblockingReader(UnsafeRingBuffer.NonblockingReader reader, int* readerCount,
+ AtomicSafetyHandle safety)
+#else
+ internal NonblockingReader(UnsafeRingBuffer.NonblockingReader reader, int* readerCount)
+#endif
+ {
+ m_Reader = reader;
+ m_Count = readerCount;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = safety;
+#endif
+ }
+
+ public void Dispose()
+ {
+ CheckSafetyHandle();
+
+ if (m_Count == null)
+ return;
+ Interlocked.Decrement(ref UnsafeUtility.AsRef(m_Count));
+ }
+
+ public JobHandle Dispose(JobHandle depends)
+ {
+ CheckSafetyHandle();
+
+ return new DecrementCountJob { Count = m_Count }.Schedule(depends);
+ }
+
+ public void FinishRead()
+ {
+ CheckSafetyHandle();
+
+ m_Reader.FinishRead();
+ }
+
+ public bool TryRead(out T value) where T : unmanaged
+ {
+ CheckSafetyHandle();
+
+ return m_Reader.TryRead(out value);
+ }
+
+ [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private void CheckSafetyHandle()
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ if (!AtomicSafetyHandle.IsHandleValid(m_Safety))
+ throw new ObjectDisposedException("This container has already been disposed");
+#endif
+ }
+ }
+
+ [NativeContainer]
+ public struct NonblockingWriter : IDisposable
+ {
+ private UnsafeRingBuffer.NonblockingWriter m_Writer;
+ [NativeDisableUnsafePtrRestriction]
+ private int* m_Count;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ private AtomicSafetyHandle m_Safety;
+#endif
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ internal NonblockingWriter(UnsafeRingBuffer.NonblockingWriter writer, int* writerCount,
+ AtomicSafetyHandle safety)
+#else
+ internal NonblockingWriter(UnsafeRingBuffer.NonblockingWriter writer, int* writerCount)
+#endif
+ {
+ m_Writer = writer;
+ m_Count = writerCount;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = safety;
+#endif
+ }
+
+ public void Dispose()
+ {
+ CheckSafetyHandle();
+
+ if (m_Count == null)
+ return;
+ Interlocked.Decrement(ref UnsafeUtility.AsRef(m_Count));
+ }
+
+ public JobHandle Dispose(JobHandle depends)
+ {
+ CheckSafetyHandle();
+
+ return new DecrementCountJob { Count = m_Count }.Schedule(depends);
+ }
+
+ public void FinishWrite()
+ {
+ CheckSafetyHandle();
+
+ m_Writer.FinishWrite();
+ }
+
+ public bool TryWrite(T value) where T : unmanaged
+ {
+ CheckSafetyHandle();
+
+ return m_Writer.TryWrite(value);
+ }
+
+ [Conditional("ENABLE_UNITY_COLLECTIONS_CHECKS")]
+ private void CheckSafetyHandle()
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ if (!AtomicSafetyHandle.IsHandleValid(m_Safety))
+ throw new ObjectDisposedException("This container has already been disposed");
+#endif
+ }
+ }
+
+ private UnsafeRingBuffer m_RingBuffer;
+ private int* m_ReaderCount;
+ private int* m_WriterCount;
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ private AtomicSafetyHandle m_Safety;
+
+ private static int s_StaticSafetyId = AtomicSafetyHandle.NewStaticSafetyId();
+#endif
+
+ public bool IsCreated => m_RingBuffer.IsCreated;
+
+ public NativeRingBuffer(long size, Allocator allocator)
+ {
+ m_RingBuffer = new UnsafeRingBuffer(size, allocator);
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = UnsafeUtilityEx.CreateAtomicSafetyHandleForAllocator(allocator);
+ AtomicSafetyHandle.SetStaticSafetyId(ref m_Safety, s_StaticSafetyId);
+#endif
+ m_ReaderCount = UnsafeUtilityEx.CallocTracked(allocator);
+ m_WriterCount = UnsafeUtilityEx.CallocTracked(allocator);
+ }
+
+ private struct DisposeData
+ {
+ public UnsafeRingBuffer m_RingBuffer;
+ public int* m_ReaderCount;
+ public int* m_WriterCount;
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ public AtomicSafetyHandle m_Safety;
+#endif
+
+ public void Dispose()
+ {
+ if (m_ReaderCount == null || *m_ReaderCount != 0)
+ throw new InvalidOperationException("Cannot dispose buffer with active readers");
+
+ UnsafeUtility.FreeTracked(m_ReaderCount, m_RingBuffer.Allocator);
+
+ if (m_WriterCount == null || *m_WriterCount != 0)
+ throw new InvalidOperationException("Cannot dispose buffer with active writers");
+
+ UnsafeUtility.FreeTracked(m_WriterCount, m_RingBuffer.Allocator);
+
+ m_RingBuffer.Dispose();
+ }
+ }
+
+ public void Dispose()
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ AtomicSafetyHandle.CheckDeallocateAndThrow(m_Safety);
+ AtomicSafetyHandle.Release(m_Safety);
+
+ new DisposeData{ m_RingBuffer = m_RingBuffer, m_Safety = m_Safety, m_ReaderCount = m_ReaderCount, m_WriterCount = m_WriterCount }.Dispose();
+#else
+ new DisposeData{ m_RingBuffer = m_RingBuffer, m_ReaderCount = m_ReaderCount, m_WriterCount = m_WriterCount }.Dispose();
+#endif
+ m_RingBuffer = default;
+ m_ReaderCount = null;
+ m_WriterCount = null;
+ }
+
+ private struct DecrementCountJob : IJob
+ {
+ [NativeDisableUnsafePtrRestriction] public int* Count;
+
+ public void Execute()
+ {
+ if (Count == null)
+ return;
+ Interlocked.Decrement(ref UnsafeUtility.AsRef(Count));
+ }
+ }
+
+ private struct DisposeJob : IJob
+ {
+ public DisposeData data;
+
+ public void Execute()
+ {
+ data.Dispose();
+ }
+ }
+
+ public JobHandle Dispose(JobHandle depends)
+ {
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ AtomicSafetyHandle.CheckDeallocateAndThrow(m_Safety);
+#endif
+
+ var handle = new DisposeJob
+ {
+ data = new DisposeData
+ {
+ m_RingBuffer = m_RingBuffer,
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ m_Safety = m_Safety,
+#endif
+ m_ReaderCount = m_ReaderCount,
+ m_WriterCount = m_WriterCount
+ }
+ }.Schedule(depends);
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ AtomicSafetyHandle.Release(m_Safety);
+#endif
+ m_RingBuffer = default;
+ m_ReaderCount = null;
+ m_WriterCount = null;
+ return handle;
+ }
+
+ public BlockingReader AsBlockingReader()
+ {
+ if (!IsCreated)
+ throw new NullReferenceException();
+
+ CheckIfOnlyReaderAndThrow();
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ return new BlockingReader(m_RingBuffer.AsBlockingReader(), m_ReaderCount, m_Safety);
+#else
+ return new BlockingReader(m_RingBuffer.AsBlockingReader(), m_ReaderCount);
+#endif
+
+ }
+
+ public BlockingWriter AsBlockingWriter()
+ {
+ if (!IsCreated)
+ throw new NullReferenceException();
+
+ CheckIfOnlyWriterAndThrow();
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ return new BlockingWriter(m_RingBuffer.AsBlockingWriter(), m_WriterCount, m_Safety);
+#else
+ return new BlockingWriter(m_RingBuffer.AsBlockingWriter(), m_WriterCount);
+#endif
+ }
+
+ public NonblockingReader AsNonblockingReader()
+ {
+ if (!IsCreated)
+ throw new NullReferenceException();
+
+ CheckIfOnlyReaderAndThrow();
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ return new NonblockingReader(m_RingBuffer.AsNonblockingReader(), m_ReaderCount, m_Safety);
+#else
+ return new NonblockingReader(m_RingBuffer.AsNonblockingReader(), m_ReaderCount);
+#endif
+ }
+
+ public NonblockingWriter AsNonblockingWriter()
+ {
+ if (!IsCreated)
+ throw new NullReferenceException();
+
+ CheckIfOnlyWriterAndThrow();
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ return new NonblockingWriter(m_RingBuffer.AsNonblockingWriter(), m_WriterCount, m_Safety);
+#else
+ return new NonblockingWriter(m_RingBuffer.AsNonblockingWriter(), m_WriterCount);
+#endif
+ }
+
+ private void CheckIfOnlyReaderAndThrow()
+ {
+ if (Interlocked.CompareExchange(ref UnsafeUtility.AsRef(m_ReaderCount), 1, 0) != 0)
+ throw new InvalidOperationException("only a single reader instance may exist at any time");
+ }
+
+ private void CheckIfOnlyWriterAndThrow()
+ {
+ if (Interlocked.CompareExchange(ref UnsafeUtility.AsRef(m_WriterCount), 1, 0) != 0)
+ throw new InvalidOperationException("only a single writer instance may exist at any time");
+ }
+ }
+}
diff --git a/Runtime/Common/NativeRingBuffer.cs.meta b/Runtime/Common/NativeRingBuffer.cs.meta
new file mode 100644
index 0000000..6cd0675
--- /dev/null
+++ b/Runtime/Common/NativeRingBuffer.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 77169d65ab024e7083b3366c04449633
+timeCreated: 1702505075
\ No newline at end of file
diff --git a/Runtime/APIs/Permissions/MLPermissionNames.cs b/Runtime/Common/Utils/MLPermissionNames.cs
similarity index 100%
rename from Runtime/APIs/Permissions/MLPermissionNames.cs
rename to Runtime/Common/Utils/MLPermissionNames.cs
diff --git a/Runtime/APIs/Permissions/MLPermissionNames.cs.meta b/Runtime/Common/Utils/MLPermissionNames.cs.meta
similarity index 100%
rename from Runtime/APIs/Permissions/MLPermissionNames.cs.meta
rename to Runtime/Common/Utils/MLPermissionNames.cs.meta
diff --git a/Runtime/Common/Utils/MLPermissions.cs b/Runtime/Common/Utils/MLPermissions.cs
new file mode 100644
index 0000000..9ed971d
--- /dev/null
+++ b/Runtime/Common/Utils/MLPermissions.cs
@@ -0,0 +1,40 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2018-2024) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+using System;
+using UnityEngine.Android;
+
+namespace MagicLeap.Android
+{
+ public static class Permissions
+ {
+ public static void RequestPermission(string permission,
+ Action onPermissionGranted = null,
+ Action onPermissionDenied = null,
+ Action onPermissionDeniedDontAskAgain = null)
+ {
+ RequestPermissions(new string[] { permission }, onPermissionGranted, onPermissionDenied, onPermissionDeniedDontAskAgain);
+ }
+
+ public static void RequestPermissions(string[] permissions,
+ Action onPermissionGranted = null,
+ Action onPermissionDenied = null,
+ Action onPermissionDeniedDontAskAgain = null)
+ {
+ var callbacks = new PermissionCallbacks();
+ callbacks.PermissionGranted += onPermissionGranted;
+ callbacks.PermissionDenied += onPermissionDenied;
+ callbacks.PermissionDeniedAndDontAskAgain += onPermissionDeniedDontAskAgain;
+ Permission.RequestUserPermissions(permissions, callbacks);
+ }
+
+ public static bool CheckPermission(string permission) => Permission.HasUserAuthorizedPermission(permission);
+ }
+}
diff --git a/Editor/MLAppSim/LabDriverControl.cs.meta b/Runtime/Common/Utils/MLPermissions.cs.meta
similarity index 83%
rename from Editor/MLAppSim/LabDriverControl.cs.meta
rename to Runtime/Common/Utils/MLPermissions.cs.meta
index c7d5258..61219fe 100644
--- a/Editor/MLAppSim/LabDriverControl.cs.meta
+++ b/Runtime/Common/Utils/MLPermissions.cs.meta
@@ -1,5 +1,5 @@
fileFormatVersion: 2
-guid: 6b6e703409d65044e82158dd2b22abbf
+guid: 707e3d8a5a9769240a9fce0b1f923a9f
MonoImporter:
externalObjects: {}
serializedVersion: 2
diff --git a/Runtime/Common/Utils/MLPluginLog.cs b/Runtime/Common/Utils/MLPluginLog.cs
index 9d41470..a7f08ca 100644
--- a/Runtime/Common/Utils/MLPluginLog.cs
+++ b/Runtime/Common/Utils/MLPluginLog.cs
@@ -8,6 +8,9 @@
// ---------------------------------------------------------------------
// %BANNER_END%
+using System;
+using System.Diagnostics;
+
namespace UnityEngine.XR.MagicLeap
{
///
@@ -42,6 +45,36 @@ public enum VerbosityLevel : uint
Verbose,
}
+ internal abstract class ScopedLog : IDisposable
+ {
+ private string m_ScopeName;
+ private bool m_ShowStackTrace;
+
+ protected string scopeName => m_ScopeName;
+
+ protected ScopedLog(string scopeName, bool showStackTrace = false)
+ {
+ m_ScopeName = scopeName;
+ Log("Enter");
+ }
+
+ public void Dispose()
+ {
+ Log("Exit");
+ }
+
+ protected virtual string FormatLogMessage(string message) => $"[{scopeName}]: {message}";
+
+ protected virtual void LogInternal(string message, LogType logType = LogType.Log)
+ {
+ UnityEngine.Debug.LogFormat(logType, m_ShowStackTrace ? LogOption.None : LogOption.NoStacktrace, null, FormatLogMessage(message));
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ public void Log(string message, LogType logType = LogType.Log)
+ => LogInternal(message, logType);
+ }
+
///
/// Gets or sets current level of logs to print.
///
diff --git a/Runtime/Common/Utils/PlayerLoopUtil.cs b/Runtime/Common/Utils/PlayerLoopUtil.cs
new file mode 100644
index 0000000..2c996d1
--- /dev/null
+++ b/Runtime/Common/Utils/PlayerLoopUtil.cs
@@ -0,0 +1,41 @@
+using System;
+using UnityEngine;
+using UnityEngine.LowLevel;
+using UnityEngine.PlayerLoop;
+
+namespace MagicLeap
+{
+ internal class PlayerLoopUtil : MonoBehaviour
+ {
+ internal static Type[] InstallPath = {
+ typeof(Initialization),
+ typeof(Initialization.XREarlyUpdate)
+ };
+
+ internal static bool InstallIntoPlayerLoop(ref PlayerLoopSystem topLevelPlayerLoop, PlayerLoopSystem systemToInstall, params Type[] installPath)
+ {
+ installPath ??= Array.Empty();
+
+ ref var current = ref topLevelPlayerLoop;
+ foreach (var path in installPath)
+ {
+ var idx = Array.FindIndex(current.subSystemList, s => s.type == path);
+ if (idx == -1)
+ return false;
+ current = ref current.subSystemList[idx];
+ }
+
+ InstallSystem(ref current, systemToInstall);
+ return true;
+ }
+
+ private static void InstallSystem(ref PlayerLoopSystem parentSystem, PlayerLoopSystem targetSystem)
+ {
+ var subsystems = parentSystem.subSystemList ?? Array.Empty();
+ var length = subsystems.Length;
+ Array.Resize(ref subsystems, length + 1);
+ subsystems[length] = targetSystem;
+ parentSystem.subSystemList = subsystems;
+ }
+ }
+}
diff --git a/Editor/MLAppSim/AppSimShimLibSupport.cs.meta b/Runtime/Common/Utils/PlayerLoopUtil.cs.meta
similarity index 83%
rename from Editor/MLAppSim/AppSimShimLibSupport.cs.meta
rename to Runtime/Common/Utils/PlayerLoopUtil.cs.meta
index 2105af0..c6544c4 100644
--- a/Editor/MLAppSim/AppSimShimLibSupport.cs.meta
+++ b/Runtime/Common/Utils/PlayerLoopUtil.cs.meta
@@ -1,5 +1,5 @@
fileFormatVersion: 2
-guid: 2121e160397cff7438c63bff73d9653c
+guid: 76d6abfa9bef22c468c2885788c95217
MonoImporter:
externalObjects: {}
serializedVersion: 2
diff --git a/Runtime/Common/Utils/UnsafeUtilityEx.cs b/Runtime/Common/Utils/UnsafeUtilityEx.cs
index 4a917ee..ded4018 100644
--- a/Runtime/Common/Utils/UnsafeUtilityEx.cs
+++ b/Runtime/Common/Utils/UnsafeUtilityEx.cs
@@ -1,14 +1,94 @@
-using Unity.Collections;
-using Unity.Collections.LowLevel.Unsafe;
+using System.Diagnostics;
namespace UnityEngine.XR.MagicLeap.Unsafe
{
+ using System;
+ using Unity.Collections;
+ using Unity.Collections.LowLevel.Unsafe;
internal static unsafe class UnsafeUtilityEx
{
+ public static T* Calloc(Allocator allocator, T initialValue = default) where T : unmanaged
+ {
+ var ptr = Malloc(allocator);
+ *ptr = initialValue;
+ return ptr;
+ }
+
+ public static T* CallocTracked(Allocator allocator, T initialValue = default, int callstacksToSkip = 1)
+ where T : unmanaged
+ {
+ var ptr = MallocTracked(allocator, callstacksToSkip);
+ *ptr = initialValue;
+ return ptr;
+ }
+
public static T* Malloc(Allocator allocator) where T : unmanaged
=> (T*)UnsafeUtility.Malloc(sizeof(T), UnsafeUtility.AlignOf(), allocator);
- public static T* MallocTracked(Allocator allocator, int callstacksToSkip) where T : unmanaged
+ public static T* MallocTracked(Allocator allocator, int callstacksToSkip = 1) where T : unmanaged
=> (T*)UnsafeUtility.MallocTracked(sizeof(T), UnsafeUtility.AlignOf(), allocator, callstacksToSkip);
+
+#if ENABLE_UNITY_COLLECTIONS_CHECKS
+ public static AtomicSafetyHandle CreateAtomicSafetyHandleForAllocator(Allocator allocator)
+ {
+ switch (allocator)
+ {
+ case Allocator.Invalid:
+ throw new InvalidOperationException("Cannot create safety handle for invalid allocator");
+ case Allocator.Temp:
+ return AtomicSafetyHandle.GetTempMemoryHandle();
+ default:
+ return AtomicSafetyHandle.Create();
+ }
+ }
+#endif
+
+ public static U* PunRefTypeUnchecked(ref T obj) where T : unmanaged where U : unmanaged
+ => (U*)UnsafeUtility.AddressOf(ref UnsafeUtility.As(ref obj));
+
+ public static U* PunType(T* obj) where T : unmanaged where U : unmanaged
+ {
+ CheckTypeSizeAndThrow();
+
+ return PunTypeUnchecked(obj);
+ }
+ public static T* PunType(void* obj, int expectedSize) where T : unmanaged
+ {
+ CheckTypeSizeAndThrow(expectedSize);
+
+ return PunTypeUnchecked(obj);
+ }
+
+ public static U* PunTypeUnchecked(void* obj) where U : unmanaged
+ => (U*)UnsafeUtility.AddressOf(ref UnsafeUtility.AsRef(obj));
+
+ public static U* PunTypeUnchecked(T* obj) where T: unmanaged where U : unmanaged
+ => (U*)UnsafeUtility.AddressOf(ref UnsafeUtility.AsRef(obj));
+
+ public static string FormatAddress(void* ptr)
+ {
+ var i64 = new IntPtr(ptr).ToInt64();
+ return $"0x{i64:X}";
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ public static void CheckTypeSizeAndThrow() where T : unmanaged where U : unmanaged
+ {
+ var szT = sizeof(T);
+ var szU = sizeof(U);
+ if (szT != szU)
+ throw new InvalidOperationException(
+ $"Type size mismatch! sizeof({typeof(T).Name}) = {szT}, sizeof({typeof(U).FullName}) = {szU}");
+ }
+
+ [Conditional("DEVELOPMENT_BUILD")]
+ public static void CheckTypeSizeAndThrow(int expectedTypeSize) where T : unmanaged
+ {
+ var szT = sizeof(T);
+ if (szT != expectedTypeSize)
+ throw new InvalidOperationException(
+ $"Type size mismatch! Expected sizeof({typeof(T).FullName}) to be {expectedTypeSize}, was instead {szT}");
+ }
+
}
}
diff --git a/Runtime/Deprecated/Camera/Bindings/MLCameraBaseNativeStructs.cs b/Runtime/Deprecated/Camera/Bindings/MLCameraBaseNativeStructs.cs
index 5329baf..61f035d 100644
--- a/Runtime/Deprecated/Camera/Bindings/MLCameraBaseNativeStructs.cs
+++ b/Runtime/Deprecated/Camera/Bindings/MLCameraBaseNativeStructs.cs
@@ -338,13 +338,22 @@ public MLCamera.PlaneInfo CreatePlaneInfo(bool copyToManagedMemory, byte[] byteA
if (planeInfo.Data != null)
{
- planeInfo.Data = new byte[planeInfo.Stride * planeInfo.Height];
- if (planeInfo.PixelStride == 2) {
- Marshal.Copy(Data, planeInfo.Data, 0, (int)((planeInfo.Stride * (planeInfo.Height - 1)) + (planeInfo.Width * planeInfo.PixelStride) -1));
- } else {
- Marshal.Copy(Data, planeInfo.Data, 0, (int)((planeInfo.Stride * (planeInfo.Height - 1)) + (planeInfo.Width * planeInfo.PixelStride)));
+ uint width = planeInfo.Stride == 0 ? planeInfo.Width : planeInfo.Stride;
+
+ planeInfo.Data = new byte[width * planeInfo.Height];
+ if (planeInfo.PixelStride == 2)
+ {
+ Marshal.Copy(Data, planeInfo.Data, 0, (int)((width * (planeInfo.Height - 1)) + (planeInfo.Width * planeInfo.PixelStride) - 1));
+ }
+ else
+ {
+ Marshal.Copy(Data, planeInfo.Data, 0, (int)((width * (planeInfo.Height - 1)) + (planeInfo.Width * planeInfo.PixelStride)));
+ }
+
+ if (planeInfo.Stride != 0)
+ {
+ planeInfo.Size = planeInfo.Stride * planeInfo.Height;
}
- planeInfo.Size = planeInfo.Stride * planeInfo.Height;
}
return planeInfo;
diff --git a/Runtime/APIs/Permissions/MLPermissions.cs b/Runtime/Deprecated/MLPermissions.cs
similarity index 82%
rename from Runtime/APIs/Permissions/MLPermissions.cs
rename to Runtime/Deprecated/MLPermissions.cs
index 36ab714..8f9ea1a 100644
--- a/Runtime/APIs/Permissions/MLPermissions.cs
+++ b/Runtime/Deprecated/MLPermissions.cs
@@ -10,11 +10,11 @@
using System;
using System.Collections.Generic;
-using System.Threading.Tasks;
using UnityEngine.XR.MagicLeap.Native;
namespace UnityEngine.XR.MagicLeap
{
+ [Obsolete("MLPermissions is deprecated. Use MagicLeap.Android.Permissions instead.")]
public sealed partial class MLPermissions : MLAutoAPISingleton
{
public delegate void OnPermissionGrantedDelegate(string permission);
@@ -48,9 +48,7 @@ internal void TriggerDeniedAndDontAskAgain(string permission)
private readonly HashSet deniedPermissions = new HashSet();
private readonly HashSet dontAskAgainPermissions = new HashSet();
-#pragma warning disable CS0414
private bool currentlyRequestingPermission = false;
-#pragma warning restore CS0414
public static MLResult CheckPermission(string permission) => MLResult.Create(Instance.CheckPermissionInternal(permission));
@@ -83,30 +81,9 @@ private void ProcessRequestQueue()
{
if (permissionRequests.Count > 0)
{
-#if UNITY_EDITOR
- var permissionName = permissionRequests.Peek();
- MLResult.Code resultCode = CheckPermissionInternal(permissionName);
- if (MLResult.IsOK(resultCode))
- {
- permissionRequests.Dequeue();
- foreach(Callbacks callbacks in requestData[permissionName])
- {
- callbacks.TriggerGranted(permissionName);
- }
-
- requestData[permissionName].Clear();
- }
- else if (resultCode == MLResult.Code.PermissionDenied)
- {
- permissionRequests.Dequeue();
- foreach (Callbacks callbacks in requestData[permissionName])
- {
- callbacks.TriggerDenied(permissionName);
- }
+ if (Application.isEditor)
+ return;
- requestData[permissionName].Clear();
- }
-#else
// a request has been issued and is waiting for a callback. during this time,
// the application doesn't have focus due to the popup. we need to wait until a callback
// clears this flag after the user makes their choice and focus returns.
@@ -130,7 +107,6 @@ private void ProcessRequestQueue()
callbacks.PermissionDenied += OnPermissionDenied;
callbacks.PermissionDeniedAndDontAskAgain += OnPermissionDeniedDontAskAgain;
Android.Permission.RequestUserPermission(permissionName, callbacks);
-#endif
}
}
@@ -140,12 +116,12 @@ private MLResult.Code CheckPermissionInternal(string permission)
{
return MLResult.Code.InvalidParam;
}
- nativeMLPermissionsCheckPermissionPerfMarker.Begin();
MLResult.Code result = MLResult.Code.Ok;
-#if UNITY_EDITOR
- result = NativeBindings.MLZIPermissionsIsGranted(permission);
- MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLZIPermissionsIsGranted), NativeCallSuccess);
-#else
+
+ if (Application.isEditor)
+ return result;
+
+ nativeMLPermissionsCheckPermissionPerfMarker.Begin();
if (Android.Permission.HasUserAuthorizedPermission(permission))
{
result = MLResult.Code.Ok;
@@ -167,14 +143,15 @@ private MLResult.Code CheckPermissionInternal(string permission)
}
MLResult.DidNativeCallSucceed(result, nameof(Android.Permission.HasUserAuthorizedPermission), NativeCallSuccess);
-
-#endif
nativeMLPermissionsCheckPermissionPerfMarker.End();
return result;
}
private MLResult.Code RequestPermissionInternal(string permission, Callbacks callbacks)
{
+ if (Application.isEditor)
+ return MLResult.Code.Ok;
+
if (string.IsNullOrEmpty(permission))
{
Debug.LogError($"MLPermissions: requested permission name is blank");
@@ -204,22 +181,9 @@ private MLResult.Code RequestPermissionInternal(string permission, Callbacks cal
}
else
{
- // In ML App Sim we request here itself and use ProcessRequestQueue()
- // to poll for the result and trigger callbacks accordingly.
- // For android, we use ProcessRequestQueue() to just trigger
- // the request.
-
result = MLResult.Code.Pending;
if (!permissionRequests.Contains(permission))
{
-#if UNITY_EDITOR
- Task.Run(() =>
- {
- result = NativeBindings.MLZIPermissionsRequest(permission);
- MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLZIPermissionsRequest), NativeCallSuccess);
- }
- );
-#endif
permissionRequests.Enqueue(permission);
}
}
diff --git a/Runtime/APIs/Permissions/MLPermissions.cs.meta b/Runtime/Deprecated/MLPermissions.cs.meta
similarity index 100%
rename from Runtime/APIs/Permissions/MLPermissions.cs.meta
rename to Runtime/Deprecated/MLPermissions.cs.meta
diff --git a/Runtime/Deprecated/MediaPlayer/API/MLMediaPlayerEditor.cs b/Runtime/Deprecated/MediaPlayer/API/MLMediaPlayerEditor.cs
deleted file mode 100644
index 21b66b3..0000000
--- a/Runtime/Deprecated/MediaPlayer/API/MLMediaPlayerEditor.cs
+++ /dev/null
@@ -1,403 +0,0 @@
-// %BANNER_BEGIN%
-// ---------------------------------------------------------------------
-// %COPYRIGHT_BEGIN%
-// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
-// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
-// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
-// %COPYRIGHT_END%
-// ---------------------------------------------------------------------
-// %BANNER_END%
-
-namespace UnityEngine.XR.MagicLeap
-{
- using System;
- using System.Collections.Generic;
- using System.IO;
- using UnityEngine;
-
- ///
- /// MLMedia APIs.
- ///
- public partial class MLMedia
- {
- public partial class Player
- {
- // TODO : Video module is blacklisted for Relish. Re-enable once we add vulkan media player
-
- ///
- /// This class is the video player used in Editor. It implements the IMLMediaPlayer interface.
- /// This version of the media player does not support DRM videos.
- ///
- // public class Editor : MonoBehaviour, IMLMediaPlayer
- // {
- // ///
- // /// Starting volume of the media player.
- // ///
- // private const float STARTING_VOLUME = 30;
- //
- // ///
- // /// Stores a reference to the audio source.
- // ///
- // private AudioSource audioSource;
- //
- // ///
- // /// Texture used to render the video frame.
- // ///
- // private RenderTexture texture;
- //
- // public bool IsPlaying => false;
- //
- // void Awake()
- // {
- // this.audioSource = gameObject.AddComponent();
- // }
- //
- //
- // /// Initiate asynchronous reset of media player. Use event to know when reset completes,
- // /// the player will be in a pre-prepared state. This method can be called anytime except while asynchronously preparing.
- // ///
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult ResetAsync()
- // {
- // MLPluginLog.Error("MLMedia.Player.ResetAsync is only required on device");
- // return MLResult.Create(MLResult.Code.NotImplemented);
- // }
- //
- // public MLResult SetSourceURI(string source)
- // {
- // // Create a Url with provided string and test if its a local file
- // Uri uri;
- // bool result = Uri.TryCreate(source, UriKind.Absolute, out uri);
- // this.audioSource.playOnAwake = false;
- // this.SetVolume(STARTING_VOLUME);
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // public MLResult SetSourcePath(string source)
- // {
- // Path.Combine(Application.streamingAssetsPath, source);
- // this.audioSource.playOnAwake = false;
- // this.SetVolume(STARTING_VOLUME);
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Plays the video in the editor.
- // ///
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok
- // ///
- // public MLResult Play()
- // {
- // this.audioSource.Play();
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Pauses the video in the editor.
- // ///
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok.
- // ///
- // public MLResult Pause()
- // {
- // this.audioSource.Pause();
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Seeks the specified time in the video in the editor
- // ///
- // /// Absolute time to seek to
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok
- // ///
- // public MLResult Seek(int positionMilliseconds)
- // {
- // const float MSToSeconds = 0.001f;
- // int seconds = (int)(positionMilliseconds * MSToSeconds);
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Sets the volume of the video in the editor
- // ///
- // /// Volume to be set.
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok
- // ///
- // public MLResult SetVolume(float vol)
- // {
- // this.audioSource.volume = vol;
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Stops the video in the editor
- // ///
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok
- // ///
- // public MLResult Stop()
- // {
- // // this.videoPlayer.Stop();
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Resumes the video in the editor
- // ///
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok
- // ///
- // public MLResult Resume()
- // {
- // this.audioSource.Play();
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Sets the loop flag for the video in the editor
- // ///
- // /// Flag to loop
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok
- // ///
- // public MLResult SetLooping(bool loop)
- // {
- // this.audioSource.loop = loop;
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Releases any resource used by this media player ID.
- // ///
- // ///
- // /// MLResult.Result will be MLResult.Code.Ok
- // ///
- // public MLResult Cleanup()
- // {
- // UnityEngine.Object.Destroy(this.audioSource);
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Selects the subtitle track, not available for editor.
- // ///
- // /// (unused) track id to be selected
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult SelectSubtitleTrack(uint trackID)
- // {
- // MLPluginLog.Warning("MLMedia.Player.SelectSubtitleTrack is only available on device");
- // return MLResult.Create(MLResult.Code.NotImplemented);
- // }
- //
- // ///
- // /// Unselects the subtitle track, not available for editor.
- // ///
- // /// (unused) track id to be selected
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult UnselectSubtitleTrack(uint trackID)
- // {
- // MLPluginLog.Warning("MLMedia.Player.UnselectSubtitleTrack is only available on device");
- // return MLResult.Create(MLResult.Code.NotImplemented);
- // }
- //
- // ///
- // /// Gets active audio channel count.
- // ///
- // /// (unused) Return channel count.
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult GetAudioChannelCount(out int outAudioChannelCount)
- // {
- // outAudioChannelCount = this.audioSource.clip?.channels ?? 1;
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Sets spatial audio state.
- // ///
- // /// (unused) Desired state of spatial audio.
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult SetSpatialAudio(bool isEnabled)
- // {
- // this.audioSource.spatialize = isEnabled;
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Gets spatial audio state.
- // ///
- //
- // /// (unused) Return state of spatial audio.
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult GetSpatialAudio(out bool outIsEnabled)
- // {
- // outIsEnabled = this.audioSource.spatialize;
- // return MLResult.Create(MLResult.Code.Ok);
- // }
- //
- // ///
- // /// Sets world position of requested audio channel.
- // ///
- // /// (unused) Selects the channel whose position is being set.
- // /// (unused) Set selected channel's world position
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult SetAudioChannelPosition(MLMedia.Player.AudioChannel channel, Vector3 position)
- // {
- // MLPluginLog.Warning("MLMedia.Player.SetAudioChannelPosition is only available on device");
- // return MLResult.Create(MLResult.Code.NotImplemented);
- // }
- //
- // ///
- // /// Gets world position of requested audio channel.
- // ///
- // /// (unused) Selects the channel whose position is being read.
- // /// (unused) Return selected channel's world position
- // ///
- // /// MLResult.Result will be MLResult.Code.NotImplemented
- // ///
- // public MLResult GetAudioChannelPosition(MLMedia.Player.AudioChannel channel, out Vector3 position)
- // {
- // position = new Vector3(0f, 0f, 0f);
- // MLPluginLog.Warning("MLMedia.Player.GetAudioChannelPosition is only available on device");
- // return MLResult.Create(MLResult.Code.NotImplemented);
- // }
- //
- // ///
- // /// Register a request to get the bytes used for a DRM key request.
- // ///
- // /// Bytes identifying the desired DRM type.
- // /// Callback to be called when successfully retrieved request data.
- // ///
- // /// True if request was successfully registered.
- // ///
- // public bool RequestActivationKeyRequest(byte[] drmUUIDBytes, Action callback)
- // {
- // MLPluginLog.Warning("MLMedia.PlayerEditor.RequestActivationKeyRequest failed, editor version of MLMedia.Player does not support DRM.");
- // return false;
- // }
- //
- // ///
- // /// Get the video track bitrate
- // ///
- // /// The bitrate of the video track
- // public int GetVideoBitrate()
- // {
- // MLPluginLog.Warning("MLMedia.PlayerEditor.GetVideoBitrate failed, editor version of MLMedia.Player does not support bitrate.");
- // return 0;
- // }
- //
- // ///
- // /// Gets the duration of the video in milliseconds.
- // ///
- // /// Duration of the video
- // public TimeSpan GetDurationMilliseconds()
- // {
- // return TimeSpan.Zero;
- // }
- //
- // ///
- // /// Gets the current position of the video in milliseconds
- // ///
- // /// Position of the playback of the video
- // public TimeSpan GetPositionMilliseconds()
- // {
- // // return (int)this.videoPlayer.time * SecondsToMS;
- // return TimeSpan.Zero;
- // }
- //
- // ///
- // /// Get the width of the video in pixels
- // ///
- // /// The width of the video
- // public int GetWidth()
- // {
- // // return this.videoPlayer.targetTexture.width;
- // return this.texture.width;
- // }
- //
- // ///
- // /// Get the height of the video in pixels
- // ///
- // /// The height of the video
- // public int GetHeight()
- // {
- // // return this.videoPlayer.targetTexture.height;
- // return this.texture.height;
- // }
- //
- // ///
- // /// Sets the license server for DRM videos (should not be called)
- // ///
- // /// (unused) URL of the License Server
- // public void SetLicenseServer(string licenseServer)
- // {
- // if (!string.IsNullOrEmpty(licenseServer))
- // {
- // MLPluginLog.Warning("MLMedia.PlayerEditor.SetLicenseServer failed, editor version of MLMedia.Player does not support DRM.");
- // }
- // }
- //
- // ///
- // /// Set custom header key-value pairs to use in addition to default of "User-Agent : Widevine CDM v1.0"
- // /// when performing key request to the DRM license server.
- // ///
- // /// (unused) Dictionary of custom header key-value pairs
- // public void SetCustomLicenseHeaderData(Dictionary headerData)
- // {
- // if (headerData != null)
- // {
- // MLPluginLog.Warning("MLMedia.PlayerEditor.SetCustomLicenseHeaderData failed, editor version of MLMedia.Player does not support DRM.");
- // }
- // }
- //
- // ///
- // /// Set custom key request key-value pair parameters used when generating default key request.
- // ///
- // /// (unused) Dictionary of optional key-value pair parameters
- // public void SetCustomLicenseMessageData(Dictionary messageData)
- // {
- // if (messageData != null)
- // {
- // MLPluginLog.Warning("MLMedia.PlayerEditor.SetCustomLicenseMessageData failed, editor version of MLMedia.Player does not support DRM.");
- // }
- // }
- //
- // ///
- // /// Gets the frame drop threshold.
- // ///
- // /// The currently set millisecond threshold.
- // public ulong GetFrameDropThresholdMs()
- // {
- // MLPluginLog.Warning("MLMedia.PlayerEditor.GetFrameDropThresholdMs is only available on device.");
- // return long.MaxValue;
- // }
- //
- // ///
- // /// Sets a threshold to drop video frames if they are older than specified value.
- // /// Setting this to 0 will not drop any frames, this is the default behavior.
- // ///
- // /// (unused) New threshold in milliseconds.
- // public void SetFrameDropThresholdMs(ulong threshold)
- // {
- // MLPluginLog.Warning("MLMedia.PlayerEditor.SetFrameDropThresholdMs is only available on device.");
- // }
- // }
- }
- }
-}
diff --git a/Runtime/Deprecated/MediaPlayer/API/MLMediaPlayerEditor.cs.meta b/Runtime/Deprecated/MediaPlayer/API/MLMediaPlayerEditor.cs.meta
deleted file mode 100644
index 336a039..0000000
--- a/Runtime/Deprecated/MediaPlayer/API/MLMediaPlayerEditor.cs.meta
+++ /dev/null
@@ -1,11 +0,0 @@
-fileFormatVersion: 2
-guid: 12868a03407f33d46ac7ace6fa8c6ec6
-MonoImporter:
- externalObjects: {}
- serializedVersion: 2
- defaultReferences: []
- executionOrder: 0
- icon: {instanceID: 0}
- userData:
- assetBundleName:
- assetBundleVariant:
diff --git a/Runtime/Deprecated/WebRTC/Shaders/Native.mat b/Runtime/Deprecated/WebRTC/Shaders/Native.mat
index 11b8fb4..159162a 100644
--- a/Runtime/Deprecated/WebRTC/Shaders/Native.mat
+++ b/Runtime/Deprecated/WebRTC/Shaders/Native.mat
@@ -21,17 +21,17 @@ Material:
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: Native
- m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
+ m_Shader: {fileID: 4800000, guid: 933532a4fcc9baf4fa0491de14d08ed7, type: 3}
m_Parent: {fileID: 0}
m_ModifiedSerializedProperties: 0
- m_ValidKeywords:
- - _GLOSSYREFLECTIONS_OFF
+ m_ValidKeywords: []
m_InvalidKeywords: []
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
- stringTagMap: {}
+ stringTagMap:
+ RenderType: Opaque
disabledShaderPasses: []
m_LockedProperties:
m_SavedProperties:
@@ -98,6 +98,7 @@ Material:
- _AlphaClip: 0
- _AlphaToMask: 0
- _Blend: 0
+ - _BlendModePreserveSpecular: 1
- _BlendOp: 0
- _BumpScale: 1
- _ClearCoatMask: 0
diff --git a/Runtime/Deprecated/WebRTC/Shaders/RGB.mat b/Runtime/Deprecated/WebRTC/Shaders/RGB.mat
index 42eca2c..2ff25d0 100644
--- a/Runtime/Deprecated/WebRTC/Shaders/RGB.mat
+++ b/Runtime/Deprecated/WebRTC/Shaders/RGB.mat
@@ -21,7 +21,7 @@ Material:
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: RGB
- m_Shader: {fileID: 46, guid: 0000000000000000f000000000000000, type: 0}
+ m_Shader: {fileID: 4800000, guid: 8d2bb70cbf9db8d4da26e15b26e74248, type: 3}
m_Parent: {fileID: 0}
m_ModifiedSerializedProperties: 0
m_ValidKeywords: []
@@ -30,7 +30,8 @@ Material:
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: -1
- stringTagMap: {}
+ stringTagMap:
+ RenderType: Opaque
disabledShaderPasses: []
m_LockedProperties:
m_SavedProperties:
@@ -80,11 +81,24 @@ Material:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
+ - unity_Lightmaps:
+ m_Texture: {fileID: 0}
+ m_Scale: {x: 1, y: 1}
+ m_Offset: {x: 0, y: 0}
+ - unity_LightmapsInd:
+ m_Texture: {fileID: 0}
+ m_Scale: {x: 1, y: 1}
+ m_Offset: {x: 0, y: 0}
+ - unity_ShadowMasks:
+ m_Texture: {fileID: 0}
+ m_Scale: {x: 1, y: 1}
+ m_Offset: {x: 0, y: 0}
m_Ints: []
m_Floats:
- _AlphaClip: 0
- _AlphaToMask: 0
- _Blend: 0
+ - _BlendModePreserveSpecular: 1
- _BlendOp: 0
- _BumpScale: 1
- _Cull: 2
@@ -94,14 +108,20 @@ Material:
- _DstBlendAlpha: 0
- _GlossMapScale: 1
- _Glossiness: 0.5
+ - _GlossinessSource: 0
- _GlossyReflections: 1
- _Metallic: 0
- _Mode: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _QueueOffset: 0
+ - _ReceiveShadows: 1
- _SampleGI: 0
+ - _Shininess: 0
+ - _Smoothness: 0.5
+ - _SmoothnessSource: 0
- _SmoothnessTextureChannel: 0
+ - _SpecSource: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _SrcBlendAlpha: 1
diff --git a/Runtime/MagicLeap.SDK.asmdef b/Runtime/MagicLeap.SDK.asmdef
index dda9884..504e291 100644
--- a/Runtime/MagicLeap.SDK.asmdef
+++ b/Runtime/MagicLeap.SDK.asmdef
@@ -10,7 +10,9 @@
"GUID:75469ad4d38634e559750d17036d5f7c",
"GUID:dc960734dc080426fa6612f1c5fe95f3",
"GUID:4847341ff46394e83bb78fbd0652937e",
- "GUID:15fc0a57446b3144c949da3e2b9737a9"
+ "GUID:15fc0a57446b3144c949da3e2b9737a9",
+ "GUID:a9420e37d7990b54abdef6688edbe313",
+ "GUID:e0cd26848372d4e5c891c569017e11f1"
],
"includePlatforms": [],
"excludePlatforms": [],
diff --git a/Editor/SettingsProviders/Preferences.meta b/Runtime/OpenXR/FacialExpression.meta
similarity index 77%
rename from Editor/SettingsProviders/Preferences.meta
rename to Runtime/OpenXR/FacialExpression.meta
index 498c1c0..c26d749 100644
--- a/Editor/SettingsProviders/Preferences.meta
+++ b/Runtime/OpenXR/FacialExpression.meta
@@ -1,5 +1,5 @@
fileFormatVersion: 2
-guid: b5849b455f68e9e468ea51d1a983afa9
+guid: de317a1fdfe204e499549e318fedc59d
folderAsset: yes
DefaultImporter:
externalObjects: {}
diff --git a/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpression.cs b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpression.cs
new file mode 100644
index 0000000..a3efb11
--- /dev/null
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpression.cs
@@ -0,0 +1,36 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Collections.Generic;
+using System.Runtime.InteropServices;
+using UnityEngine.XR.MagicLeap;
+
+using NativeBindings = UnityEngine.XR.OpenXR.Features.MagicLeapSupport.MagicLeapFacialExpressionFeature.NativeBindings;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapFacialExpressionFeature
+ {
+ private ulong facialExpressionClient;
+
+ public void CreateClient(FacialBlendShape[] requestedFacialBlendShapes)
+ {
+ var resultCode = NativeBindings.MLOpenXRCreateFacialExpressionClient((uint)requestedFacialBlendShapes.Length, requestedFacialBlendShapes, out ulong facialExpressionClient);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLOpenXRCreateFacialExpressionClient));
+
+ this.facialExpressionClient = facialExpressionClient;
+ }
+
+ public void DestroyClient()
+ {
+ var resultCode = NativeBindings.MLOpenXRDestroyFacialExpressionClient(facialExpressionClient);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLOpenXRDestroyFacialExpressionClient));
+ }
+
+ public void GetBlendShapesInfo(ref BlendShapeProperties[] blendShapes)
+ {
+ var resultCode = NativeBindings.MLOpenXRGetFacialExpressionBlendShapesInfo(facialExpressionClient, (uint)blendShapes.Length, blendShapes);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLOpenXRGetFacialExpressionBlendShapesInfo));
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferencesProvider.cs.meta b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpression.cs.meta
similarity index 83%
rename from Editor/SettingsProviders/Preferences/MagicLeapEditorPreferencesProvider.cs.meta
rename to Runtime/OpenXR/FacialExpression/MagicLeapFacialExpression.cs.meta
index 0c68b21..c078408 100644
--- a/Editor/SettingsProviders/Preferences/MagicLeapEditorPreferencesProvider.cs.meta
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpression.cs.meta
@@ -1,5 +1,5 @@
fileFormatVersion: 2
-guid: ee314fe2723442740b6e85f8a54a8ad5
+guid: 470299748d1181a4bb4859371f09254c
MonoImporter:
externalObjects: {}
serializedVersion: 2
diff --git a/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionData.cs b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionData.cs
new file mode 100644
index 0000000..587b1de
--- /dev/null
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionData.cs
@@ -0,0 +1,95 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+
+using System;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapFacialExpressionFeature
+ {
+ ///
+ /// The type of facial expression blend shape.
+ ///
+ public enum FacialBlendShape
+ {
+ BrowLowererL = 0,
+ BrowLowererR,
+ CheekRaiserL,
+ CheekRaiserR,
+ ChinRaiser,
+ DimplerL,
+ DimplerR,
+ EyesClosedL,
+ EyesClosedR,
+ InnerBrowRaiserL,
+ InnerBrowRaiserR,
+ JawDrop,
+ LidTightenerL,
+ LidTightenerR,
+ LipCornerDepressorL,
+ LipCornerDepressorR,
+ LipCornerPullerL,
+ LipCornerPullerR,
+ LipFunnelerLB,
+ LipFunnelerLT,
+ LipFunnelerRB,
+ LipFunnelerRT,
+ LipPressorL,
+ LipPressorR,
+ LipPuckerL,
+ LipPuckerR,
+ LipStretcherL,
+ LipStretcherR,
+ LipSuckLB,
+ LipSuckLT,
+ LipSuckRB,
+ LipSuckRT,
+ LipTightenerL,
+ LipTightenerR,
+ LipsToward,
+ LowerLipDepressorL,
+ LowerLipDepressorR,
+ NoseWrinklerL,
+ NoseWrinklerR,
+ OuterBrowRaiserL,
+ OuterBrowRaiserR,
+ UpperLidRaiserL,
+ UpperLidRaiserR,
+ UpperLipRaiserL,
+ UpperLipRaiserR,
+ TongueOut
+ }
+
+ ///
+ /// Flags that determine if a blend shape is considered valid and/or tracked.
+ ///
+ [Flags]
+ public enum BlendShapePropertiesFlags
+ {
+ None = 0 << 0,
+ ValidBit = 1 << 0,
+ TrackedBit = 1 << 1
+ }
+
+ ///
+ /// The data properties associated with a given blend shape obtained by the Facial Expressions API.
+ ///
+ public struct BlendShapeProperties
+ {
+ ///
+ /// The type of facial expression blend shape.
+ ///
+ public FacialBlendShape FacialBlendShape;
+
+ ///
+ /// A value between 0 and 1 that states the current weight of the blend shape property.
+ ///
+ public float Weight;
+
+ ///
+ /// Flags which indicate if the blend shape property is valid and/or tracked.
+ ///
+ public BlendShapePropertiesFlags Flags;
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionData.cs.meta b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionData.cs.meta
new file mode 100644
index 0000000..676f4f7
--- /dev/null
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionData.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 9e525bf14826a004db59eacfcd71ff52
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionFeature.cs b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionFeature.cs
new file mode 100644
index 0000000..c703f64
--- /dev/null
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionFeature.cs
@@ -0,0 +1,51 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2019-2023) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using UnityEngine.XR.MagicLeap;
+#if UNITY_EDITOR
+using UnityEditor;
+using UnityEditor.XR.OpenXR.Features;
+#endif
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+#if UNITY_EDITOR
+ [OpenXRFeature(UiName = "Magic Leap 2 Facial Expression",
+ Desc = "Necessary to deploy a Magic Leap 2 compatible application with Facial Expression events.",
+ Company = "Magic Leap",
+ Version = "1.0.0",
+ Priority = -1,
+ BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
+ FeatureId = FeatureId,
+ OpenxrExtensionStrings = "XR_ML_facial_expression"
+ )]
+#endif
+ public partial class MagicLeapFacialExpressionFeature : MagicLeapOpenXRFeatureBase
+ {
+ public const string FeatureId = "com.magicleap.openxr.feature.ml2_facialexpression";
+
+ protected override string GetFeatureId() => FeatureId;
+
+ protected override bool OnInstanceCreate(ulong xrInstance)
+ {
+ if (!OpenXRRuntime.IsExtensionEnabled("XR_ML_facial_expression"))
+ {
+ Debug.LogWarning($"XR_ML_facial_expression is not enabled, disabling {nameof(MagicLeapFacialExpressionFeature)}.");
+ return false;
+ }
+
+ return base.OnInstanceCreate(xrInstance);
+ }
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionFeature.cs.meta b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionFeature.cs.meta
new file mode 100644
index 0000000..6d1e27a
--- /dev/null
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionFeature.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 74ecfbe7bc16a2e41a36f3c8e26f111c
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionNativeBindings.cs b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionNativeBindings.cs
new file mode 100644
index 0000000..9bb81ff
--- /dev/null
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionNativeBindings.cs
@@ -0,0 +1,38 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2019-2023) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Runtime.InteropServices;
+using UnityEngine;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+using UnityEngine.XR.OpenXR.NativeTypes;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapFacialExpressionFeature
+ {
+ internal partial class NativeBindings : MagicLeapNativeBindings
+ {
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRCreateFacialExpressionClient(uint requestedCount, FacialBlendShape[] requestedFacialBlendShapes, out ulong facialExpressionClient);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRDestroyFacialExpressionClient(ulong facialExpressionClient);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRGetFacialExpressionBlendShapesInfo(ulong facialExpressionClient, uint blendShapeCount, [In, Out] BlendShapeProperties[] blendShapes);
+ }
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionNativeBindings.cs.meta b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionNativeBindings.cs.meta
new file mode 100644
index 0000000..1a28daa
--- /dev/null
+++ b/Runtime/OpenXR/FacialExpression/MagicLeapFacialExpressionNativeBindings.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 2e85ec6299021a7498879b1c95bbe4a6
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/LocalizationMap.meta b/Runtime/OpenXR/LocalizationMap.meta
new file mode 100644
index 0000000..9a264d6
--- /dev/null
+++ b/Runtime/OpenXR/LocalizationMap.meta
@@ -0,0 +1,8 @@
+fileFormatVersion: 2
+guid: 3e89d959fb3ac41a3b0839b15447a8b6
+folderAsset: yes
+DefaultImporter:
+ externalObjects: {}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeature.cs b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeature.cs
new file mode 100644
index 0000000..2aa12dd
--- /dev/null
+++ b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeature.cs
@@ -0,0 +1,227 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2019-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.InteropServices;
+using Unity.Collections;
+using Unity.Collections.LowLevel.Unsafe;
+using UnityEngine.XR.OpenXR.NativeTypes;
+using UnityEngine.XR.OpenXR.Features.MagicLeapSupport.NativeInterop;
+
+#if UNITY_EDITOR
+using UnityEditor;
+using UnityEditor.XR.OpenXR.Features;
+#endif // UNITY_EDITOR
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ using static MagicLeapLocalizationMapFeature.NativeBindings;
+
+#if UNITY_EDITOR
+ [OpenXRFeature(UiName = "Magic Leap 2 Localization Maps",
+ Desc = "Import/Export and manage localization maps.",
+ Company = "Magic Leap",
+ Version = "1.0.0",
+ BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
+ FeatureId = FeatureId,
+ OpenxrExtensionStrings = ExtensionName
+ )]
+#endif // UNITY_EDITOR
+ public partial class MagicLeapLocalizationMapFeature : MagicLeapOpenXRFeatureBase
+ {
+ public const string FeatureId = "com.magicleap.openxr.feature.ml2_localizationmap";
+ public const string ExtensionName = "XR_ML_localization_map";
+ const uint LocalizationMapNameSize = 64;
+
+ #region Callbacks
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ internal delegate void OnEventDataLocalizationChangedCallback(ref XrEventDataLocalizationChangedML eventData);
+
+ public delegate void OnLocalizationMapChangedDelegate(LocalizationEventData data);
+ private static event OnLocalizationMapChangedDelegate OnLocalizationChanged = delegate { };
+ public static event OnLocalizationMapChangedDelegate OnLocalizationChangedEvent
+ {
+ add => OnLocalizationChanged += value;
+ remove => OnLocalizationChanged -= value;
+ }
+ #endregion
+
+ #region Enums
+ public enum LocalizationMapState
+ {
+ NotLocalized,
+ Localized,
+ LocalizationPending,
+ SleepingBeforeRetry,
+ }
+
+ public enum LocalizationMapType
+ {
+ OnDevice = 0,
+ Cloud = 1,
+ }
+
+ public enum LocalizationMapConfidence
+ {
+ Poor = 0,
+ Fair = 1,
+ Good = 2,
+ Excellent = 3
+ }
+
+ public enum LocalizationMapErrorFlags
+ {
+ UnknownBit = 1,
+ OutOfMappedAreaBit = 2,
+ LowFeatureCountBit = 4,
+ ExcessiveMotionBit = 8,
+ LowLightBit = 16,
+ HeadposeBit = 32
+ }
+ #endregion
+
+ #region Structs
+ public struct LocalizationMap
+ {
+ public string Name;
+
+ public string MapUUID;
+
+ public LocalizationMapType MapType;
+
+ internal LocalizationMap(XrLocalizationMapML map)
+ {
+ Name = XrLocalizationMapML.GetName(map);
+ MapUUID = XrLocalizationMapML.GetMapUuid(map);
+ MapType = map.MapType;
+ }
+ }
+
+ public struct LocalizationEventData
+ {
+ public LocalizationMapState State;
+
+ public LocalizationMap Map;
+
+ public LocalizationMapConfidence Confidence;
+
+ public LocalizationMapErrorFlags[] Errors;
+
+ internal LocalizationEventData(XrEventDataLocalizationChangedML data)
+ {
+ State = (LocalizationMapState)data.State;
+ Confidence = (LocalizationMapConfidence)data.Confidence;
+ Map = new LocalizationMap(data.Map);
+ var errors = new List();
+ foreach (LocalizationMapErrorFlags flag in Enum.GetValues(typeof(LocalizationMapErrorFlags)))
+ {
+ if ((data.ErrorFlags & (ulong)flag) != 0)
+ errors.Add(flag);
+ }
+ Errors = errors.ToArray();
+ }
+ }
+ #endregion
+
+ #region Methods
+ protected override bool OnInstanceCreate(ulong xrInstance)
+ {
+ if (OpenXRRuntime.IsExtensionEnabled(ExtensionName))
+ {
+ return base.OnInstanceCreate(xrInstance);
+ }
+ Debug.LogError($"{ExtensionName} is not enabled. Disabling {nameof(MagicLeapLocalizationMapFeature)}");
+ return false;
+ }
+
+ protected override string GetFeatureId() => FeatureId;
+
+ public XrResult EnableLocalizationEvents(bool enableEvents)
+ {
+ return MLOpenXREnableLocalizationEvents(enableEvents);
+ }
+
+ public XrResult GetLocalizationMapsList(out LocalizationMap[] maps)
+ {
+ NativeArray nativeArray = new();
+ maps = null;
+ unsafe
+ {
+ var resultCode = MLOpenXRQueryLocalizationMaps(0, out uint mapCount, (XrLocalizationMapML*)nativeArray.GetUnsafePtr());
+ if (resultCode != XrResult.Success)
+ return resultCode;
+
+ nativeArray = new NativeArray((int)mapCount, Allocator.Temp);
+ for (int i = 0; i < nativeArray.Length; i++)
+ {
+ nativeArray[i] = XrLocalizationMapML.Create();
+ }
+ resultCode = MLOpenXRQueryLocalizationMaps(mapCount, out mapCount, (XrLocalizationMapML*)nativeArray.GetUnsafePtr());
+ if (resultCode != XrResult.Success)
+ return resultCode;
+ }
+ maps = nativeArray.Select(element => new LocalizationMap(element)).ToArray();
+ return XrResult.Success;
+ }
+
+ public XrResult RequestMapLocalization(string mapId)
+ {
+ XrUUID xrUuid = new XrUUID(mapId);
+ return MLOpenXRRequestMapLocalization(xrUuid);
+ }
+
+ public XrResult ExportLocalizatioMap(string mapId, out byte[] mapData)
+ {
+ // create map handle
+ ulong mapHandle;
+ mapData = Array.Empty ();
+ XrUUID mapUuid = new XrUUID(mapId);
+ var resultCode = MLOpenXRCreateExportedLocalizationMap(in mapUuid, out mapHandle);
+
+ // Create exported map data
+ uint mapDataSize = 0;
+ resultCode = MLOpenXRGetExportedLocalizationMapData(mapHandle, mapDataSize, out mapDataSize, mapData);
+
+ if (mapDataSize > 0)
+ {
+ mapData = new byte[mapDataSize];
+ MLOpenXRGetExportedLocalizationMapData(mapHandle, mapDataSize, out mapDataSize, mapData);
+
+ // destroy map handle
+ resultCode = MLOpenXRDestroyExportedLocalizationMap(mapHandle);
+ }
+ return resultCode;
+ }
+
+ public XrResult ImportLocalizationMap(byte[] mapData, out string mapId)
+ {
+ var requestInfo = new XrLocalizationMapImportInfoML(mapData);
+ var resultCode = MLOpenXRImportLocalizationMap(ref requestInfo, out XrUUID xrUUID);
+ mapId = xrUUID.ToString();
+ return resultCode;
+ }
+
+ public bool GetLatestLocalizationMapData(out LocalizationEventData data)
+ {
+ data = new();
+ XrEventDataLocalizationChangedML localizationData;
+ localizationData.Map = XrLocalizationMapML.Create();
+
+ var resultCode = MLOpenXRGetLocalizationMapData(out localizationData);
+ if (resultCode == true)
+ data = new LocalizationEventData(localizationData);
+ return resultCode;
+ }
+ #endregion
+ }
+}
+#endif // UNITY_OPENXR_1_9_0_OR_NEWER
diff --git a/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeature.cs.meta b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeature.cs.meta
new file mode 100644
index 0000000..e475021
--- /dev/null
+++ b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeature.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 69b8341c5e7c6406f8d60fd00f034d8b
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeatureNativeBindings.cs b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeatureNativeBindings.cs
new file mode 100644
index 0000000..fc3723d
--- /dev/null
+++ b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeatureNativeBindings.cs
@@ -0,0 +1,148 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2019-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Runtime.InteropServices;
+using System.Text;
+using Unity.Collections;
+using Unity.Collections.LowLevel.Unsafe;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+using UnityEngine.XR.OpenXR.Features.MagicLeapSupport.NativeInterop;
+using UnityEngine.XR.OpenXR.NativeTypes;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapLocalizationMapFeature
+ {
+ internal class NativeBindings : MagicLeapNativeBindings
+ {
+ [AOT.MonoPInvokeCallback(typeof(OnEventDataLocalizationChangedCallback))]
+ internal static void HandleOnEventDataLocalizationChanged(ref XrEventDataLocalizationChangedML eventData)
+ {
+ LocalizationEventData data = new(eventData);
+ MLThreadDispatch.ScheduleMain(() =>
+ {
+ OnLocalizationChanged?.Invoke(data);
+ });
+ }
+
+ #region NativeStructs
+ internal struct XrEventDataLocalizationChangedML
+ {
+ internal int State;
+
+ internal XrLocalizationMapML Map;
+
+ internal int Confidence;
+
+ internal ulong ErrorFlags;
+ }
+
+ internal unsafe struct XrLocalizationMapML
+ {
+ internal byte* Name;
+
+ internal byte* MapUUid;
+
+ internal LocalizationMapType MapType;
+
+ internal static XrLocalizationMapML Create()
+ {
+ var result = new XrLocalizationMapML();
+ result.Name = (byte*)new NativeArray((int)LocalizationMapNameSize, Allocator.Temp).GetUnsafePtr();
+ result.MapUUid = (byte*)new NativeArray(16, Allocator.Temp).GetUnsafePtr();
+ return result;
+ }
+
+ internal static string GetName(XrLocalizationMapML map)
+ {
+ var name = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(map.Name, (int)LocalizationMapNameSize, Allocator.Temp).ToArray();
+ return Encoding.UTF8.GetString(name).TrimEnd('\0');
+ }
+
+ internal static string GetMapUuid(XrLocalizationMapML map)
+ {
+ var bytes = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(map.MapUUid, 16, Allocator.Temp);
+ var uuid = new XrUUID();
+
+ unsafe
+ {
+ UnsafeUtility.MemCpy(uuid.Data, map.MapUUid, sizeof(byte)*16);
+ }
+ return uuid.ToString();
+ }
+ }
+
+ internal unsafe struct XrLocalizationMapImportInfoML
+ {
+ internal int Type;
+
+ internal IntPtr Next;
+
+ internal uint Size;
+
+ internal byte* Data;
+
+ internal XrLocalizationMapImportInfoML(byte[] data)
+ {
+ Type = 0;
+ Next = IntPtr.Zero;
+ Data = (byte*)new NativeArray(data, Allocator.Temp).GetUnsafePtr();
+ Size = (uint)data.Length;
+ }
+ }
+
+ internal struct XrLocalizationEnableEventsInfoML
+ {
+ internal int Type;
+
+ internal IntPtr Next;
+
+ internal uint Size;
+
+ internal bool Enabled;
+ }
+ #endregion
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXREnableLocalizationEvents(bool enableEvents);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public unsafe static extern XrResult MLOpenXRQueryLocalizationMaps(uint maxMapSize, out uint mapOutputSize, XrLocalizationMapML* mapsPtr);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRRequestMapLocalization(NativeInterop.XrUUID xrUUID);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRCreateExportedLocalizationMap(in NativeInterop.XrUUID xrUUID, out ulong mapHandle);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRGetExportedLocalizationMapData(ulong mapHandle, uint maxMapSize, out uint mapOutputDataSize, byte[] mapData);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRDestroyExportedLocalizationMap(ulong mapHandle);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRImportLocalizationMap(ref XrLocalizationMapImportInfoML requestInfo, out NativeInterop.XrUUID xrUUID);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRLocalizationMapRegisterCallback(OnEventDataLocalizationChangedCallback callback);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLOpenXRLocalizationMapClearCallback();
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public unsafe static extern bool MLOpenXRGetLocalizationMapData(out XrEventDataLocalizationChangedML data);
+ }
+ }
+}
+#endif // UNITY_OPENXR_1_9_0_OR_NEWER
diff --git a/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeatureNativeBindings.cs.meta b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeatureNativeBindings.cs.meta
new file mode 100644
index 0000000..aa1eb44
--- /dev/null
+++ b/Runtime/OpenXR/LocalizationMap/MagicLeapLocalizationMapFeatureNativeBindings.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 588019665754644139588766b39d5e6b
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MLCompat.meta b/Runtime/OpenXR/MLCompat.meta
new file mode 100644
index 0000000..c49935e
--- /dev/null
+++ b/Runtime/OpenXR/MLCompat.meta
@@ -0,0 +1,8 @@
+fileFormatVersion: 2
+guid: 3b0d07bbf76b1f94bbb2492448833c9a
+folderAsset: yes
+DefaultImporter:
+ externalObjects: {}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MLCompat/MLEyeTracking.cs b/Runtime/OpenXR/MLCompat/MLEyeTracking.cs
new file mode 100644
index 0000000..d3ec3da
--- /dev/null
+++ b/Runtime/OpenXR/MLCompat/MLEyeTracking.cs
@@ -0,0 +1,62 @@
+using System.Runtime.InteropServices;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+using static UnityEngine.XR.MagicLeap.InputSubsystem.Extensions.MLEyes.NativeBindings;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ internal partial class MLEyeTracking
+ {
+ private static ulong handle;
+ private static MLEyeTrackingStaticData staticData;
+ private static MLEyeTrackingStateEx state;
+
+ public static void Start()
+ {
+ MLResult.DidNativeCallSucceed(NativeBindings.MLEyeTrackingCreate(out handle));
+ state = MLEyeTrackingStateEx.Init();
+ }
+
+ public static void Stop()
+ {
+ MLResult.DidNativeCallSucceed(NativeBindings.MLEyeTrackingDestroy(handle));
+ }
+
+ public static bool TryGetState(out InputSubsystem.Extensions.MLEyes.State eyeTrackingState)
+ {
+ eyeTrackingState = default;
+ if (!MLResult.DidNativeCallSucceed(NativeBindings.MLEyeTrackingGetStateEx(handle, out state)))
+ {
+ return false;
+ }
+ eyeTrackingState = new InputSubsystem.Extensions.MLEyes.State(state);
+ return true;
+ }
+
+ public static void GetStaticData(out InputSubsystem.Extensions.MLEyes.StaticData eyeTrackingStaticData)
+ {
+ MLResult.DidNativeCallSucceed(NativeBindings.MLEyeTrackingGetStaticData(handle, out staticData));
+ eyeTrackingStaticData = new()
+ {
+ Vergence = staticData.vergence,
+ LeftCenter = staticData.left_center,
+ RightCenter = staticData.right_center
+ };
+ }
+
+ internal class NativeBindings : MagicLeapNativeBindings
+ {
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLEyeTrackingCreate(out ulong handle);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLEyeTrackingDestroy(ulong handle);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLEyeTrackingGetStaticData(ulong handle, out MLEyeTrackingStaticData data);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLEyeTrackingGetStateEx(ulong handle, out MLEyeTrackingStateEx state);
+ }
+ }
+}
diff --git a/Runtime/OpenXR/MLCompat/MLEyeTracking.cs.meta b/Runtime/OpenXR/MLCompat/MLEyeTracking.cs.meta
new file mode 100644
index 0000000..a563970
--- /dev/null
+++ b/Runtime/OpenXR/MLCompat/MLEyeTracking.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 198205e134a9d3e4295fca3ad5128b11
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MLCompat/MLHeadTracking.cs b/Runtime/OpenXR/MLCompat/MLHeadTracking.cs
new file mode 100644
index 0000000..a52cdb4
--- /dev/null
+++ b/Runtime/OpenXR/MLCompat/MLHeadTracking.cs
@@ -0,0 +1,95 @@
+using System;
+using System.Runtime.InteropServices;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+using static UnityEngine.XR.MagicLeap.InputSubsystem.Extensions.MLHeadTracking.NativeBindings;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ internal partial class MLHeadTracking
+ {
+ private static ulong handle = MagicLeapNativeBindings.InvalidHandle;
+ private static MLHeadTrackingStaticData staticData;
+ private static MLHeadTrackingStateEx state;
+ private static ulong mapEvents;
+
+ private static void CreateHandle()
+ {
+ if(handle == MagicLeapNativeBindings.InvalidHandle)
+ {
+ MLResult.DidNativeCallSucceed(NativeBindings.MLHeadTrackingCreate(out handle));
+ state = default;
+ staticData = default;
+ }
+ }
+
+ public static ulong Handle
+ {
+ get
+ {
+ CreateHandle();
+ return handle;
+ }
+ }
+
+ public static bool IsAvailable()
+ {
+ CreateHandle();
+ return handle != MagicLeapNativeBindings.InvalidHandle;
+ }
+
+ public static bool TryGetStateEx(out InputSubsystem.Extensions.MLHeadTracking.StateEx headTrackingState)
+ {
+ headTrackingState = default;
+ CreateHandle();
+
+ if (!MLResult.DidNativeCallSucceed(NativeBindings.MLHeadTrackingGetStateEx(handle, out state)))
+ return false;
+
+ headTrackingState = new InputSubsystem.Extensions.MLHeadTracking.StateEx(state);
+ return true;
+ }
+
+ public static bool GetStaticData(out MagicLeapNativeBindings.MLCoordinateFrameUID outUID)
+ {
+ outUID = MagicLeapNativeBindings.MLCoordinateFrameUID.EmptyFrame;
+ CreateHandle();
+
+ if (!MLResult.DidNativeCallSucceed(NativeBindings.MLHeadTrackingGetStaticData(handle, out staticData)))
+ return false;
+
+ outUID = staticData.coord_frame_head;
+ return true;
+ }
+
+ public static bool TryGetMapEvents(out InputSubsystem.Extensions.MLHeadTracking.MapEvents outMapEvents)
+ {
+ outMapEvents = default;
+ CreateHandle();
+
+ if (!MLResult.DidNativeCallSucceed(NativeBindings.MLHeadTrackingGetMapEvents(handle, out mapEvents)))
+ return false;
+
+ outMapEvents = (InputSubsystem.Extensions.MLHeadTracking.MapEvents)mapEvents;
+ return (outMapEvents != 0);
+ }
+
+ internal class NativeBindings : MagicLeapNativeBindings
+ {
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLHeadTrackingCreate(out ulong handle);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLHeadTrackingDestroy(ulong handle);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLHeadTrackingGetStateEx(ulong handle, out MLHeadTrackingStateEx state);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLHeadTrackingGetStaticData(ulong handle, out MLHeadTrackingStaticData data);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLHeadTrackingGetMapEvents(ulong handle, out ulong mapEvents);
+ }
+ }
+}
diff --git a/Runtime/OpenXR/MLCompat/MLHeadTracking.cs.meta b/Runtime/OpenXR/MLCompat/MLHeadTracking.cs.meta
new file mode 100644
index 0000000..6f66819
--- /dev/null
+++ b/Runtime/OpenXR/MLCompat/MLHeadTracking.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 38c6f27b3a5934a46be280a8e2fc2f79
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MLCompat/MLInput.cs b/Runtime/OpenXR/MLCompat/MLInput.cs
new file mode 100644
index 0000000..3e9ff8e
--- /dev/null
+++ b/Runtime/OpenXR/MLCompat/MLInput.cs
@@ -0,0 +1,204 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Drawing;
+using System.Runtime.InteropServices;
+using UnityEngine;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ internal class MLInput
+ {
+ public enum PreDefinedPatternType { A, B, C }
+
+ public enum CustomHapticsType { None, Buzz, Predefined }
+
+ private static Dictionary createdPatternIds = new Dictionary();
+
+ private static ulong handle = MagicLeapNativeBindings.InvalidHandle;
+
+ private static void CreateHandle()
+ {
+ if(handle == MagicLeapNativeBindings.InvalidHandle)
+ MLResult.DidNativeCallSucceed(NativeBindings.MLInputCreate(out handle));
+ }
+
+ public static MLResult StartBuzz(ushort startHz, ushort endHz, uint durationMs, byte amplitude)
+ {
+ CreateHandle();
+ var result = NativeBindings.MLInputStopControllerFeedback(handle, 0);
+ if(MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLInputStopControllerFeedback)))
+ {
+ var command = NativeBindings.MLInputBuzzCommand.Init();
+ command.StartHz = startHz;
+ command.EndHz = endHz;
+ command.DurationMs = durationMs;
+ command.Amplitude = amplitude;
+ result = NativeBindings.MLInputStartControllerFeedbackBuzzCommand(handle, 0, in command);
+ MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLInputStartControllerFeedbackBuzzCommand));
+ }
+ return MLResult.Create(result);
+ }
+
+ public static MLResult StartPredefined(PreDefinedPatternType patternType)
+ {
+ CreateHandle();
+ var result = NativeBindings.MLInputStopControllerFeedback(handle, 0);
+ if (MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLInputStopControllerFeedback)))
+ {
+ var pattern = NativeBindings.MLInputPreDefinedPattern.Init();
+ pattern.Type = patternType;
+ result = NativeBindings.MLInputStartControllerFeedbackPreDefinedPattern(handle, 0, ref pattern);
+ MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLInputStartControllerFeedbackPreDefinedPattern));
+ }
+ return MLResult.Create(result);
+ }
+
+ public static MLResult StartCustomPattern(List hapticsList)
+ {
+ CreateHandle();
+ var customHaptics = new List();
+ foreach(var h in hapticsList)
+ {
+ NativeBindings.MLInputCustomHaptics hap = new()
+ {
+ Type = (CustomHapticsType)h.Type
+ };
+ if (h.Type == InputSubsystem.Extensions.Haptics.Type.Buzz)
+ {
+ hap.Buzz = NativeBindings.MLInputBuzzCommand.Init();
+ hap.Buzz.StartHz = h.Buzz.StartHz;
+ hap.Buzz.EndHz = h.Buzz.EndHz;
+ hap.Buzz.DurationMs = h.Buzz.DurationMs;
+ hap.Buzz.Amplitude = h.Buzz.Amplitude;
+ hap.DurationMs = h.Buzz.DurationMs;
+ }
+ else
+ {
+ hap.PreDefined = NativeBindings.MLInputPreDefinedPattern.Init();
+ hap.PreDefined.Type = (PreDefinedPatternType)h.PreDefined.Pattern;
+ hap.DurationMs = h.DurationMs;
+ }
+ customHaptics.Add(hap);
+ }
+
+ var info = new NativeBindings.MLInputCustomHapticsInfo((uint)customHaptics.Count);
+ IntPtr itr = info.CustomHaptics;
+ Debug.Log($"\n\n [bas] filling haptics info array with {info.Size} commands\n");
+ for(int i = 0; i < customHaptics.Count; ++i)
+ {
+ var haptic = customHaptics[i];
+ Marshal.StructureToPtr(haptic, itr, true);
+ itr = new IntPtr(itr.ToInt64()) + Marshal.SizeOf();
+ }
+
+ uint id = 0;
+ if(!createdPatternIds.ContainsKey(info))
+ {
+ if(NativeBindings.MLInputCreateCustomHapticsPattern(handle, ref info, ref id) == MLResult.Code.Ok)
+ createdPatternIds.Add(info, id);
+ }
+ else
+ {
+ id = createdPatternIds[info];
+ }
+
+ var result = NativeBindings.MLInputStartControllerFeedbackCustomHapticsPattern(handle, 0, id);
+ MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLInputStartControllerFeedbackCustomHapticsPattern));
+ return MLResult.Create(result);
+ }
+
+ public static MLResult Stop()
+ {
+ var result = NativeBindings.MLInputStopControllerFeedback(handle, 0);
+ MLResult.DidNativeCallSucceed(result, nameof(NativeBindings.MLInputStopControllerFeedback));
+ return MLResult.Create(result);
+ }
+
+ internal class NativeBindings : MagicLeapNativeBindings
+ {
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLInputCreate(out ulong handle);
+
+ #region Haptics
+ public struct MLInputPreDefinedPattern // size 8
+ {
+ public uint Version;
+ public PreDefinedPatternType Type;
+ public static MLInputPreDefinedPattern Init() => new()
+ {
+ Version = 1,
+ Type = PreDefinedPatternType.C
+ };
+ }
+
+ [StructLayout(LayoutKind.Sequential, Size = 13)]
+ public struct MLInputBuzzCommand // size 13
+ {
+ public uint Version; // 4
+ public ushort StartHz; //2
+ public ushort EndHz; // 2
+ public uint DurationMs; //4
+ public byte Amplitude; // 1
+ public static MLInputBuzzCommand Init() => new()
+ {
+ Version = 1,
+ StartHz = 200,
+ EndHz = 800,
+ DurationMs = 1000,
+ Amplitude = 20
+ };
+ }
+
+ [StructLayout(LayoutKind.Explicit, Size = 21)]
+ public struct MLInputCustomHaptics
+ {
+ [FieldOffset(0)]
+ public CustomHapticsType Type; // 4
+ [FieldOffset(4)]
+ public MLInputBuzzCommand Buzz; // 13
+ [FieldOffset(4)]
+ public MLInputPreDefinedPattern PreDefined; // 8
+ [FieldOffset(17)]
+ public uint DurationMs; // 4
+ }
+
+ public struct MLInputCustomHapticsInfo : IDisposable
+ {
+ public uint Version;
+ public IntPtr CustomHaptics;
+ public uint Size;
+
+ public MLInputCustomHapticsInfo(uint size)
+ {
+ Version = 1;
+ CustomHaptics = Marshal.AllocHGlobal(Marshal.SizeOf() * (int)size);
+ Size = size;
+ }
+
+ public void Dispose() => Marshal.FreeHGlobal(CustomHaptics);
+ }
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLInputStartControllerFeedbackPreDefinedPattern(ulong handle, byte controllerId, ref MLInputPreDefinedPattern pattern);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLInputStartControllerFeedbackBuzzCommand(ulong handle, byte controllerId, in MLInputBuzzCommand command);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLInputCreateCustomHapticsPattern(ulong handle, ref MLInputCustomHapticsInfo info, ref uint patternId);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLInputStartControllerFeedbackCustomHapticsPattern(ulong handle, byte controllerId, uint patternId);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLInputDeleteCustomHapticsPattern(ulong handle, uint patternId);
+
+ [DllImport(MLSdkLoaderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern MLResult.Code MLInputStopControllerFeedback(ulong handle, byte controllerId);
+ #endregion
+ }
+ }
+}
diff --git a/Runtime/OpenXR/MLCompat/MLInput.cs.meta b/Runtime/OpenXR/MLCompat/MLInput.cs.meta
new file mode 100644
index 0000000..fd7dd18
--- /dev/null
+++ b/Runtime/OpenXR/MLCompat/MLInput.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: dd182d7f8a12f574cbbbba26771ea4de
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MagicLeapFeature.cs b/Runtime/OpenXR/MagicLeapFeature.cs
index a756d30..17020c1 100644
--- a/Runtime/OpenXR/MagicLeapFeature.cs
+++ b/Runtime/OpenXR/MagicLeapFeature.cs
@@ -12,10 +12,8 @@
using UnityEngine.XR.ARSubsystems;
using UnityEngine.XR.MagicLeap;
using System;
-using UnityEngine.InputSystem.Layouts;
-using UnityEngine.InputSystem.XR;
using UnityEngine.LowLevel;
-using UnityEngine.PlayerLoop;
+using MagicLeap;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.XR.OpenXR.Features;
@@ -33,7 +31,7 @@ namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
Version = "1.0.0",
BuildTargetGroups = new []{ BuildTargetGroup.Android, BuildTargetGroup.Standalone },
FeatureId = FeatureId,
- OpenxrExtensionStrings = "XR_ML_compat XR_KHR_convert_timespec_time XR_EXT_view_configuration_depth_range"
+ OpenxrExtensionStrings = "XR_ML_compat XR_KHR_convert_timespec_time XR_EXT_view_configuration_depth_range XR_ML_view_configuration_depth_range_change"
)]
#endif
public partial class MagicLeapFeature : OpenXRFeature
@@ -77,7 +75,7 @@ public enum NearClipMode : byte
Recommended,
#if DISABLE_MAGICLEAP_CLIP_ENFORCEMENT
///
- /// Unsupported
+ /// Do not restrict the Camera's near clip plane distance.
///
None,
#endif
@@ -110,6 +108,10 @@ public enum NearClipMode : byte
public float MaxFarZ => NativeBindings.MLOpenXRGetMaxFarClippingPlane();
public float RecommendedFarZ => NativeBindings.MLOpenXRGetRecommendedFarClippingPlane();
+ //This is used when DISABLE_MAGICLEAP_CLIP_ENFORCEMENT flag is toggled, for use with NearClipMode.None
+ // Unity doesn't like its camera nearClip going below 0.01 and will lock up if it does.
+ private const float minimumNearClip = 0.01f;
+
private static List meshSubsysDesc = new List();
private static List sessionSubsysDesc = new List();
@@ -137,19 +139,22 @@ protected override void OnSessionCreate(ulong xrSession)
{
NativeBindings.MLOpenXROnSessionCreate(xrSession);
- if (perceptionSnapshots)
+ if (!Application.isEditor)
{
- var update = new PlayerLoopSystem()
+ if (perceptionSnapshots)
{
- subSystemList = Array.Empty(),
- updateDelegate = PerformMLPerceptionSnapshot,
- type = typeof(MLPerceptionSnapshotUpdate)
- };
- var playerLoop = LowLevel.PlayerLoop.GetCurrentPlayerLoop();
- if (!Utils.InstallIntoPlayerLoop(ref playerLoop, update, Utils.InstallPath))
- Debug.LogError("Unable to install snapshotting Update delegate into player loop!");
- else
- LowLevel.PlayerLoop.SetPlayerLoop(playerLoop);
+ var update = new PlayerLoopSystem()
+ {
+ subSystemList = Array.Empty(),
+ updateDelegate = PerformMLPerceptionSnapshot,
+ type = typeof(MLPerceptionSnapshotUpdate)
+ };
+ var playerLoop = LowLevel.PlayerLoop.GetCurrentPlayerLoop();
+ if (!PlayerLoopUtil.InstallIntoPlayerLoop(ref playerLoop, update, PlayerLoopUtil.InstallPath))
+ Debug.LogError("Unable to install snapshotting Update delegate into player loop!");
+ else
+ LowLevel.PlayerLoop.SetPlayerLoop(playerLoop);
+ }
}
}
@@ -157,14 +162,20 @@ protected override void OnSessionBegin(ulong xrSession)
{
base.OnSessionBegin(xrSession);
- Application.onBeforeRender += EnforceClippingPlanes;
+ if (!Application.isEditor)
+ {
+ Application.onBeforeRender += EnforceClippingPlanes;
+ }
}
protected override void OnSessionEnd(ulong xrSession)
{
base.OnSessionEnd(xrSession);
- Application.onBeforeRender -= EnforceClippingPlanes;
+ if (!Application.isEditor)
+ {
+ Application.onBeforeRender -= EnforceClippingPlanes;
+ }
}
protected override void OnSessionDestroy(ulong xrSession)
@@ -174,37 +185,26 @@ protected override void OnSessionDestroy(ulong xrSession)
protected override void OnSessionStateChange(int oldState, int newState)
{
- NativeBindings.MLOpenXRUpdateDepthRangeValues();
+ NativeBindings.MLHandleSessionStateChange(oldState, newState);
}
protected override void OnSubsystemCreate()
{
base.OnSubsystemCreate();
- CreateSubsystem(meshSubsysDesc, MagicLeapXrProvider.MeshingSubsystemId);
CreateSubsystem(sessionSubsysDesc, MagicLeapXrProvider.SessionSubsystemId);
}
protected override void OnSubsystemDestroy()
{
base.OnSubsystemDestroy();
- DestroySubsystem();
- DestroySubsystem();
- }
-
- public void StartMeshSubsystem()
- {
- StartSubsystem();
- }
- public void StopMeshSubsystem()
- {
- StopSubsystem();
+ DestroySubsystem();
}
private void PerformMLPerceptionSnapshot()
{
- if (!perceptionSnapshots)
+ if (!perceptionSnapshots || Application.isEditor)
return;
var result = MLResult.Code.Ok;
@@ -223,7 +223,13 @@ private void PerformMLPerceptionSnapshot()
private void EnforceClippingPlanes() => ApplyToCamera(Camera.main);
- public void ApplyFarClip(ref float zFar)
+ public void SetNearClipPolicy(NearClipMode mode)
+ {
+ nearClipPolicy = mode;
+ EnforceClippingPlanes();
+ }
+
+ private void ApplyFarClip(ref float zFar)
{
switch (farClipPolicy)
{
@@ -244,7 +250,7 @@ public void ApplyNearClip(ref float zNear)
switch (nearClipPolicy)
{
// Whatever is set in the system settings menu is our new minimum, even if it is
- // above the system recommendation
+ // above the system recommendation.
case NearClipMode.Minimum:
zNear = Mathf.Max(zNear, MinNearZ);
break;
@@ -253,9 +259,11 @@ public void ApplyNearClip(ref float zNear)
break;
#if DISABLE_MAGICLEAP_CLIP_ENFORCEMENT
case NearClipMode.None:
- default:
+ zNear = minimumNearClip;
break;
#endif
+ default:
+ break;
}
}
@@ -269,7 +277,6 @@ public void ApplyToCamera(Camera camera, bool warnIfNearClipChanged = true)
ApplyFarClip(ref zFar);
ApplyNearClip(ref zNear);
-
if (warnIfNearClipChanged && zNear > camera.nearClipPlane)
Debug.LogWarning($"Main Camera's nearClipPlane value is less than the minimum value for this device. Increasing to {zNear}");
diff --git a/Runtime/OpenXR/MagicLeapFeatureNativeBindings.cs b/Runtime/OpenXR/MagicLeapFeatureNativeBindings.cs
index d174d4a..d0d8dec 100644
--- a/Runtime/OpenXR/MagicLeapFeatureNativeBindings.cs
+++ b/Runtime/OpenXR/MagicLeapFeatureNativeBindings.cs
@@ -25,7 +25,7 @@ internal class NativeBindings : MagicLeapNativeBindings
[DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
public static extern bool MLOpenXROnInstanceCreate(IntPtr loaderFunc, ulong instance);
-
+
[DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
public static extern void MLOpenXROnInstanceDestroy(ulong instance);
@@ -39,7 +39,7 @@ internal class NativeBindings : MagicLeapNativeBindings
public static extern void MLOpenXROnSessionDestroy(ulong session);
[DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
- public static extern void MLOpenXRUpdateDepthRangeValues();
+ public static extern void MLHandleSessionStateChange(int oldState, int newState);
[DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
public static extern float MLOpenXRGetMinNearClippingPlane();
@@ -58,6 +58,25 @@ internal class NativeBindings : MagicLeapNativeBindings
[DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
public static extern MLResult.Code MLOpenXRConvertXrTimeToTimespecTime(long mlXrTime, out TimeSpec timeSpec);
+
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void MLOpenXROnFeatureInstanceCreate([MarshalAs(UnmanagedType.LPStr)] string featureId, ulong instance, IntPtr xrGetInstanceProcAddr);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void MLOpenXROnFeatureSessionCreate([MarshalAs(UnmanagedType.LPStr)] string featureId, ulong session);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void MLOpenXROnFeatureAppSpaceChange([MarshalAs(UnmanagedType.LPStr)] string featureId, ulong space);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern void MLOpenXRFeatureOnInstanceDestroy([MarshalAs(UnmanagedType.LPStr)] string featureId, ulong instance);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern IntPtr MLOpenXRInterceptFunctionsForFeature([MarshalAs(UnmanagedType.LPStr)] string featureId, IntPtr original);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern bool MLOpenXRGetUnityPoseForFeature([MarshalAs(UnmanagedType.LPStr)] string featureId, ulong space, out Pose pose);
}
}
}
diff --git a/Runtime/OpenXR/MagicLeapOpenXRFeatureBase.cs b/Runtime/OpenXR/MagicLeapOpenXRFeatureBase.cs
index 6113747..61f7d9f 100644
--- a/Runtime/OpenXR/MagicLeapOpenXRFeatureBase.cs
+++ b/Runtime/OpenXR/MagicLeapOpenXRFeatureBase.cs
@@ -14,14 +14,76 @@
#if UNITY_EDITOR
using UnityEditor;
-using UnityEditor.XR.OpenXR.Features;
#endif
namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
{
public abstract class MagicLeapOpenXRFeatureBase : OpenXRFeature
{
- protected virtual IEnumerable dependsOn => Enumerable.Empty();
+ protected virtual string GetFeatureId()
+ {
+ return "";
+ }
+
+ protected override IntPtr HookGetInstanceProcAddr(IntPtr func)
+ {
+ var featureId = GetFeatureId();
+ if (string.IsNullOrEmpty(featureId))
+ {
+ return base.HookGetInstanceProcAddr(func);
+ }
+ return MagicLeapFeature.NativeBindings.MLOpenXRInterceptFunctionsForFeature(featureId, func);
+ }
+
+ protected override bool OnInstanceCreate(ulong xrInstance)
+ {
+ var result = base.OnInstanceCreate(xrInstance);
+ if (!result)
+ {
+ return false;
+ }
+
+ var featureId = GetFeatureId();
+ if (string.IsNullOrEmpty(featureId))
+ {
+ return true;
+ }
+ MagicLeapFeature.NativeBindings.MLOpenXROnFeatureInstanceCreate(featureId, xrInstance, xrGetInstanceProcAddr);
+ return true;
+ }
+
+ protected override void OnInstanceDestroy(ulong xrInstance)
+ {
+ base.OnInstanceDestroy(xrInstance);
+ var featureId = GetFeatureId();
+ if (string.IsNullOrEmpty(featureId))
+ {
+ return;
+ }
+ MagicLeapFeature.NativeBindings.MLOpenXRFeatureOnInstanceDestroy(featureId, xrInstance);
+ }
+
+ protected override void OnSessionCreate(ulong xrSession)
+ {
+ base.OnSessionCreate(xrSession);
+ var featureId = GetFeatureId();
+ if (string.IsNullOrEmpty(featureId))
+ {
+ return;
+ }
+ MagicLeapFeature.NativeBindings.MLOpenXROnFeatureSessionCreate(featureId, xrSession);
+ }
+
+ protected override void OnAppSpaceChange(ulong xrSpace)
+ {
+ base.OnAppSpaceChange(xrSpace);
+ var featureId = GetFeatureId();
+ if (string.IsNullOrEmpty(featureId))
+ {
+ return;
+ }
+ MagicLeapFeature.NativeBindings.MLOpenXROnFeatureAppSpaceChange(featureId, xrSpace);
+ }
protected void CheckEnabledExtension(string extensionName, bool required = false)
{
@@ -33,6 +95,15 @@ protected void CheckEnabledExtension(string extensionName, bool required = false
Debug.LogWarning($"OpenXR extension '{extensionName}' was not enabled!");
}
+
+ protected virtual IEnumerable dependsOn => Enumerable.Empty();
+
+ public bool GetUnityPose(ulong space, out Pose pose)
+ {
+ pose = default;
+ var featureId = GetFeatureId();
+ return !string.IsNullOrEmpty(featureId) && MagicLeapFeature.NativeBindings.MLOpenXRGetUnityPoseForFeature(featureId, space, out pose);
+ }
#if UNITY_EDITOR
protected override void GetValidationChecks(List rules, BuildTargetGroup targetGroup)
diff --git a/Runtime/OpenXR/MagicLeapProjectValidation.cs b/Runtime/OpenXR/MagicLeapProjectValidation.cs
new file mode 100644
index 0000000..f1dffd4
--- /dev/null
+++ b/Runtime/OpenXR/MagicLeapProjectValidation.cs
@@ -0,0 +1,301 @@
+#if UNITY_EDITOR
+using System;
+using UnityEditor;
+using System.Linq;
+using UnityEngine.Rendering;
+using System.Reflection;
+using System.Collections.Generic;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapFeature
+ {
+ protected override void GetValidationChecks(List rules, BuildTargetGroup targetGroup)
+ {
+ base.GetValidationChecks(rules, targetGroup);
+
+#if !UNITY_2023_1_OR_NEWER
+ getDefaultTextureCompressionFormat ??= TryGetPlayerSettingsMethod("GetDefaultTextureCompressionFormat");
+ isTextureCompressionAPIOk = TryGetDXTCEnum();
+ isTextureCompressionAPIOk &= IsDefaultTextureCompressionAPIValid(getDefaultTextureCompressionFormat);
+#endif
+
+ if (targetGroup == BuildTargetGroup.Android)
+ {
+ foreach (var rule in MagicLeapProjectRules)
+ {
+ rules.Add(rule);
+ }
+ }
+ else if (targetGroup == BuildTargetGroup.Standalone)
+ {
+ foreach(var rule in MagicLeapProjectRulesEditor)
+ {
+ rules.Add(rule);
+ }
+ }
+ }
+
+ private ValidationRule[] MagicLeapProjectRules => new ValidationRule[]
+ {
+ // must build for x86-64
+ new ValidationRule(this)
+ {
+ message = "Target architectures must include x86-64",
+ checkPredicate = () => PlayerSettings.Android.targetArchitectures.HasFlag(AndroidArchitecture.X86_64),
+ fixIt = () =>
+ {
+ var arch = PlayerSettings.Android.targetArchitectures | AndroidArchitecture.X86_64;
+ PlayerSettings.Android.buildApkPerCpuArchitecture = (PlayerSettings.Android.targetArchitectures != AndroidArchitecture.None);
+ PlayerSettings.Android.targetArchitectures = arch;
+ },
+ fixItMessage = "Set PlayerSettings Target Architecture to contain x86-64",
+ error = true,
+ errorEnteringPlaymode = false
+ },
+ // require Vulkan
+ new ValidationRule(this)
+ {
+ message = "Vulkan must be specified as the default Graphics API.",
+ checkPredicate = () =>
+ {
+ var currentApis = PlayerSettings.GetGraphicsAPIs(BuildTarget.Android);
+ return !PlayerSettings.GetUseDefaultGraphicsAPIs(BuildTarget.Android) &&
+ currentApis.Length > 0 &&
+ currentApis[0] == GraphicsDeviceType.Vulkan;
+ },
+ fixIt = () =>
+ {
+ var currentApis = PlayerSettings.GetGraphicsAPIs(BuildTarget.Android);
+ PlayerSettings.SetUseDefaultGraphicsAPIs(BuildTarget.Android, false);
+
+ var required = new GraphicsDeviceType[] { GraphicsDeviceType.Vulkan };
+ var graphicAPIs = required.Union(currentApis);
+ PlayerSettings.SetGraphicsAPIs(BuildTarget.Android, graphicAPIs.ToArray());
+ },
+ fixItMessage = "Set PlayerSettings 'Graphic Apis' to only contain Vulkan on Android target",
+ error = true,
+ errorEnteringPlaymode = false
+ },
+ // require DXT compression
+ new ValidationRule(this)
+ {
+ message = "Only DXT texture compression is supported.",
+#if !UNITY_2023_1_OR_NEWER
+ checkPredicate = () => IsDefaultTextureCompressionFormatDxtForTarget(BuildTargetGroup.Android),
+ // For now we must let the user fix the setting manually, there is no simple way to update
+ fixIt = () => SettingsService.OpenProjectSettings("Project/Player"),
+ fixItMessage = "Open Player Settings and manually update Texture compression format.",
+#else
+ checkPredicate = () =>
+ {
+ var first = PlayerSettings.Android.textureCompressionFormats[0];
+ return first == TextureCompressionFormat.DXTC || first == TextureCompressionFormat.DXTC_RGTC;
+ },
+ fixIt = () =>
+ {
+ var dxtc = new TextureCompressionFormat[] { TextureCompressionFormat.DXTC };
+ var formats = dxtc.Union(PlayerSettings.Android.textureCompressionFormats).ToArray();
+ PlayerSettings.Android.textureCompressionFormats = formats;
+ },
+ fixItMessage = "Set DXTC as default texture compression formats",
+#endif
+ error = true,
+ errorEnteringPlaymode = true,
+ },
+ // set target devices to 'any'
+ new ValidationRule(this)
+ {
+ message = "Must target all Android Devices.",
+ checkPredicate = () => PlayerSettings.Android.androidTargetDevices == AndroidTargetDevices.AllDevices,
+ fixIt = () => PlayerSettings.Android.androidTargetDevices = AndroidTargetDevices.AllDevices,
+ fixItMessage = "Set Target Devices to \"All Devices\".",
+ error = true
+ }
+ };
+
+ private ValidationRule[] MagicLeapProjectRulesEditor => new ValidationRule[]
+ {
+ // require Vulkan
+ new ValidationRule(this)
+ {
+ message = "Vulkan must be specified as the default Graphics API.",
+ checkPredicate = () =>
+ {
+ var currentApis = PlayerSettings.GetGraphicsAPIs(BuildTarget.StandaloneWindows);
+ return !PlayerSettings.GetUseDefaultGraphicsAPIs(BuildTarget.StandaloneWindows) &&
+ currentApis.Length > 0 &&
+ currentApis[0] == GraphicsDeviceType.Vulkan;
+ },
+ fixIt = () =>
+ {
+ var currentApis = PlayerSettings.GetGraphicsAPIs(BuildTarget.StandaloneWindows);
+ PlayerSettings.SetUseDefaultGraphicsAPIs(BuildTarget.StandaloneWindows, false);
+
+ var required = new GraphicsDeviceType[] { GraphicsDeviceType.Vulkan };
+ var graphicAPIs = required.Union(currentApis);
+ PlayerSettings.SetGraphicsAPIs(BuildTarget.StandaloneWindows, graphicAPIs.ToArray());
+ },
+ fixItMessage = "Set PlayerSettings 'Graphic Apis' to only contain Vulkan on Android target",
+ error = true,
+ errorEnteringPlaymode = false
+ },
+ // Multi-pass rendering
+ new ValidationRule(this)
+ {
+ message = "Multi-pass rendering is required in order for Play Mode to render both eyes.",
+ checkPredicate = () =>
+ {
+ var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Standalone);
+ return settings.renderMode == OpenXRSettings.RenderMode.MultiPass;
+ },
+ fixIt = () =>
+ {
+ var settings = OpenXRSettings.GetSettingsForBuildTargetGroup(BuildTargetGroup.Standalone);
+ settings.renderMode = OpenXRSettings.RenderMode.MultiPass;
+ },
+ fixItMessage = "Set Render Mode to Multi-pass.",
+ error = true,
+ errorEnteringPlaymode = true
+ },
+ // Use system default runtime
+ new ValidationRule(this)
+ {
+ message = "The Application Simulator needs to use your system's default OpenXR runtime.",
+ checkPredicate = () => IsPlaymodeRuntimeSetToSystemDefault(),
+ fixIt = () => SetPlaymodeRuntimeToSystemDefault(),
+ fixItMessage = "Set Play Mode OpenXR Runtime to \"System Default\"",
+ error = true,
+ errorEnteringPlaymode = false
+ }
+ };
+
+ private static bool IsPlaymodeRuntimeSetToSystemDefault()
+ {
+ if (Application.isPlaying)
+ {
+ return true;
+ }
+ try
+ {
+ var selectorClass = Type.GetType("UnityEditor.XR.OpenXR.OpenXRRuntimeSelector,Unity.XR.OpenXR.Editor.dll");
+ if (selectorClass != null && selectorClass.IsClass)
+ {
+ var selectedIndexField = selectorClass.GetField("selectedRuntimeIndex", BindingFlags.NonPublic | BindingFlags.Static);
+ if (selectedIndexField != null && selectedIndexField.FieldType == typeof(int))
+ {
+ int selectedIndex = (int)selectedIndexField.GetValue(null);
+ return selectedIndex == 0;
+ }
+ }
+ return false;
+ }
+ catch (Exception)
+ {
+ return false;
+ }
+ }
+
+ private static void SetPlaymodeRuntimeToSystemDefault()
+ {
+ if (Application.isPlaying)
+ return;
+
+ var selectorClass = Type.GetType("UnityEditor.XR.OpenXR.OpenXRRuntimeSelector,Unity.XR.OpenXR.Editor.dll");
+ if (selectorClass != null && selectorClass.IsClass)
+ {
+ var selectedIndexField = selectorClass.GetField("selectedRuntimeIndex", BindingFlags.NonPublic | BindingFlags.Static);
+ if (selectedIndexField != null && selectedIndexField.FieldType == typeof(int))
+ {
+ selectedIndexField.SetValue(null, 0);
+ Environment.SetEnvironmentVariable("XR_SELECTED_RUNTIME_JSON", "");
+ }
+ }
+ }
+
+ #region Texture Compression Format reflection
+ // Before Unity 2023.1 there is no public API in PlayerSettings for getting or setting "Texture compression format" so we need to use reflection
+ // unnecessary in 2023.1: https://docs.unity3d.com/2023.1/Documentation/ScriptReference/PlayerSettings.Android-textureCompressionFormats.html
+#if !UNITY_2023_1_OR_NEWER
+ private static bool isTextureCompressionAPIOk;
+ private static MethodInfo getDefaultTextureCompressionFormat;
+ private static int dxtcEnumValue;
+ private static int dxtcRGTCEnumValue;
+
+ private static bool TryGetDXTCEnum()
+ {
+ dxtcEnumValue = -1;
+ dxtcRGTCEnumValue = -1;
+ try
+ {
+ var textureCompressionFormatEnum = Type.GetType("UnityEditor.TextureCompressionFormat,UnityEditor.dll");
+ if (textureCompressionFormatEnum != null && textureCompressionFormatEnum.IsEnum)
+ {
+ string[] enumNames = textureCompressionFormatEnum.GetEnumNames();
+ Array enumValues = textureCompressionFormatEnum.GetEnumValues();
+ for (int i = 0; i < enumValues.Length; ++i)
+ {
+ if (enumNames[i] == "DXTC")
+ dxtcEnumValue = Convert.ToInt32(enumValues.GetValue(i));
+ if (enumNames[i] == "DXTC_RGTC")
+ dxtcRGTCEnumValue = Convert.ToInt32(enumValues.GetValue(i));
+ }
+ }
+ return dxtcEnumValue != -1 && dxtcRGTCEnumValue != -1;
+ }
+ catch (Exception)
+ {
+ return false;
+ }
+ }
+
+ private static MethodInfo TryGetPlayerSettingsMethod(string methodName)
+ {
+ MethodInfo playerSettingsMethod;
+ try
+ {
+ var playerSettingsType = Type.GetType("UnityEditor.PlayerSettings,UnityEditor.dll");
+ playerSettingsMethod = playerSettingsType?.GetMethod(methodName, BindingFlags.Static | BindingFlags.NonPublic);
+ }
+ catch (Exception)
+ {
+ return null;
+ }
+
+ return playerSettingsMethod;
+ }
+
+ private static bool ValidateEnumParameter(ParameterInfo param, string enumName, string parameterName)
+ {
+ return param.Name == parameterName && param.ParameterType.Name == enumName && param.ParameterType.IsEnum;
+ }
+
+ private static bool IsDefaultTextureCompressionAPIValid(MethodInfo s_GetDefaultTextureCompressionFormat)
+ {
+ if (s_GetDefaultTextureCompressionFormat == null || s_GetDefaultTextureCompressionFormat.MemberType != MemberTypes.Method)
+ return false;
+ var getterReturnType = s_GetDefaultTextureCompressionFormat.ReturnType;
+ if (!getterReturnType.IsEnum || getterReturnType.Name != "TextureCompressionFormat")
+ return false;
+ var getterParameters = s_GetDefaultTextureCompressionFormat.GetParameters();
+ if (getterParameters.Length != 1
+ || !ValidateEnumParameter(getterParameters[0], "BuildTargetGroup", "platform"))
+ return false;
+
+ return dxtcEnumValue != -1 || dxtcRGTCEnumValue != -1;
+ }
+
+ private static bool IsDefaultTextureCompressionFormatDxtForTarget(BuildTargetGroup buildTargetGroup)
+ {
+ if (!isTextureCompressionAPIOk || getDefaultTextureCompressionFormat == null)
+ return true;
+
+ object enabledStateResult = getDefaultTextureCompressionFormat.Invoke(null, new object[] { buildTargetGroup });
+ var textureCompression = Convert.ToInt32(enabledStateResult);
+ return textureCompression == dxtcEnumValue || textureCompression == dxtcRGTCEnumValue;
+ }
+#endif
+#endregion
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/MagicLeapProjectValidation.cs.meta b/Runtime/OpenXR/MagicLeapProjectValidation.cs.meta
new file mode 100644
index 0000000..fbf141b
--- /dev/null
+++ b/Runtime/OpenXR/MagicLeapProjectValidation.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: da922633a99ac7e40ba8b83f9173f197
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MagicLeapRenderingExtensionsFeature.cs b/Runtime/OpenXR/MagicLeapRenderingExtensionsFeature.cs
index 1f925ad..72d1a4d 100644
--- a/Runtime/OpenXR/MagicLeapRenderingExtensionsFeature.cs
+++ b/Runtime/OpenXR/MagicLeapRenderingExtensionsFeature.cs
@@ -28,7 +28,7 @@ namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
Desc="Support for controlling rendering features specific to Magic Leap 2.",
Company = "Magic Leap",
Version = "1.0.0",
- BuildTargetGroups = new []{ BuildTargetGroup.Android, BuildTargetGroup.Standalone },
+ BuildTargetGroups = new []{ BuildTargetGroup.Android },
FeatureId = FeatureId,
OpenxrExtensionStrings = "XR_ML_frame_end_info " +
"XR_ML_global_dimmer "
diff --git a/Runtime/OpenXR/MarkerUnderstanding.meta b/Runtime/OpenXR/MarkerUnderstanding.meta
new file mode 100644
index 0000000..d899ed1
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding.meta
@@ -0,0 +1,8 @@
+fileFormatVersion: 2
+guid: 6d02426040db8ac4ebd455968f3de0c8
+folderAsset: yes
+DefaultImporter:
+ externalObjects: {}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstanding.cs b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstanding.cs
new file mode 100644
index 0000000..5edd3a6
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstanding.cs
@@ -0,0 +1,103 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.InteropServices;
+using UnityEngine.XR.MagicLeap;
+
+using NativeBindings = UnityEngine.XR.OpenXR.Features.MagicLeapSupport.MagicLeapUserCalibrationFeature.NativeBindings;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapMarkerUnderstandingFeature
+ {
+ private List markerDetectors = new();
+
+ ///
+ /// The active marker detectors tracked by the marker understanding feature.
+ ///
+ /// A readonly list of the active marker detectors.
+ public IReadOnlyList MarkerDetectors => markerDetectors;
+
+ ///
+ /// Creates a marker detector with predefined settings.
+ ///
+ /// The marker detector settings to be associated with the marker detector to be created.
+ /// The marker detector that has been created. Returns null if the number of active marker detectors is at the limit.
+ public MarkerDetector CreateMarkerDetector(MarkerDetectorSettings settings)
+ {
+ if (markerDetectors.Count >= MarkerDetectorsLimit)
+ {
+ Debug.LogError($"The number of active marker detectors cannot exceed {MarkerDetectorsLimit}");
+ return null;
+ }
+
+ MarkerDetector markerDetector = new MarkerDetector(settings);
+
+ markerDetectors.Add(markerDetector);
+
+ return markerDetector;
+ }
+
+ ///
+ /// Provides the ability to modify a marker detector with new settings.
+ /// Note: this method actually destroys the old marker detector and replaces it with a newly created one.
+ /// However, this approach maintains the index position of it in the marker detectors list.
+ ///
+ /// The marker detector settings to be associated with the marker detector to be created.
+ /// The specified marker detector to modify.
+ /// The newly created marker detector that replaced the old one. This returns null if the specified marker detector is not tracked.
+ public void ModifyMarkerDetector(MarkerDetectorSettings settings, ref MarkerDetector markerDetector)
+ {
+ int index = markerDetectors.IndexOf(markerDetector, 0);
+
+ if (index == -1)
+ {
+ Debug.LogError("Marker detector cannot be modified because it is not tracked by the MarkerUnderstandingFeature. Either it was already destroyed or not created properly.");
+ return;
+ }
+
+ DestroyMarkerDetector(markerDetector);
+
+ markerDetector = new MarkerDetector(settings);
+ markerDetector.UpdateData();
+
+ markerDetectors.Insert(index, markerDetector);
+ }
+
+ ///
+ /// Updates the status and data for all actively tracked marker detectors.
+ ///
+ public void UpdateMarkerDetectors()
+ {
+ foreach(MarkerDetector markerDetector in markerDetectors)
+ {
+ markerDetector.UpdateData();
+ }
+ }
+
+ ///
+ /// Destroys the specified marker detector.
+ ///
+ /// The marker detector to be destroyed.
+ public void DestroyMarkerDetector(MarkerDetector markerDetector)
+ {
+ markerDetectors.Remove(markerDetector);
+ markerDetector.Destroy();
+ }
+
+ ///
+ /// Destroys all actively tracked marker detectors.
+ ///
+ public void DestroyAllMarkerDetectors()
+ {
+ foreach(MarkerDetector markerDetector in markerDetectors)
+ {
+ markerDetector.Destroy();
+ }
+
+ markerDetectors.Clear();
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstanding.cs.meta b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstanding.cs.meta
new file mode 100644
index 0000000..8e261f1
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstanding.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 1638e5f1b4300104f8fd40decebd09bf
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingData.cs b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingData.cs
new file mode 100644
index 0000000..bb5832c
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingData.cs
@@ -0,0 +1,498 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapMarkerUnderstandingFeature
+ {
+ ///
+ /// The maximum number of marker detectors allowed at once.
+ ///
+ public const int MarkerDetectorsLimit = 64;
+
+ ///
+ /// Represents the different tracker profiles used to optimize marker tracking in difference use cases.
+ ///
+ public enum MarkerDetectorProfile
+ {
+ ///
+ /// Generic tracker profile.
+ /// Tracker profile that covers standard use cases. If this does not fit the
+ /// needs of the application try the other profiles.
+ ///
+ Default = 0,
+
+ ///
+ /// Use this profile to reduce the compute load and increase detection/tracker speed.
+ /// This can result poor poses.
+ ///
+ Speed,
+
+ ///
+ /// Use this profile to optimize for accurate marker poses.
+ /// This can cause increased load on the compute.
+ ///
+ Accuracy,
+
+ ///
+ /// Use this profile to optimize for markers that are small or for larger
+ /// markers that need to detected from far.
+ ///
+ SmallTargets,
+
+ ///
+ /// Use this profile to be able to detect markers across a larger Field Of View.
+ /// Marker Tracker system will attempt to use multiple cameras to detect the markers.
+ ///
+ LargeFOV,
+
+ ///
+ /// Application can define a custom tracker profiler.
+ ///
+ Custom,
+ }
+
+ ///
+ /// Represents the different marker types supported by the API
+ ///
+ public enum MarkerType
+ {
+ Aruco = 0,
+ AprilTag,
+ QR,
+ EAN13,
+ UPCA,
+ Code128
+ }
+
+ ///
+ /// The current status of the readiness of the marker tracker.
+ ///
+ public enum MarkerDetectorStatus
+ {
+ Pending = 0,
+ Ready,
+ Error
+ }
+
+ ///
+ /// Supported pre-defined ArUco dictionaries.
+ /// Marker Understanding supports pre-defined ArUco dictionaries.
+ /// ArUco dictionaries can be looked up and markers can be generated for them here:
+ /// http://chev.me/arucogen/
+ ///
+ public enum ArucoType
+ {
+ ///
+ /// 4 by 4 pixel ArUco marker dictionary with 50 IDs.
+ ///
+ Dictionary_4x4_50 = 0,
+
+ ///
+ /// 4 by 4 pixel ArUco marker dictionary with 100 IDs.
+ ///
+ Dictionary_4x4_100,
+
+ ///
+ /// 4 by 4 pixel ArUco marker dictionary with 250 IDs.
+ ///
+ Dictionary_4x4_250,
+
+ ///
+ /// 4 by 4 pixel ArUco marker dictionary with 1000 IDs.
+ ///
+ Dictionary_4x4_1000,
+
+ ///
+ /// 5 by 5 pixel ArUco marker dictionary with 50 IDs.
+ ///
+ Dictionary_5x5_50,
+
+ ///
+ /// 5 by 5 pixel ArUco marker dictionary with 100 IDs.
+ ///
+ Dictionary_5x5_100,
+
+ ///
+ /// 5 by 5 pixel ArUco marker dictionary with 250 IDs.
+ ///
+ Dictionary_5x5_250,
+
+ ///
+ /// 5 by 5 pixel ArUco marker dictionary with 1000 IDs.
+ ///
+ Dictionary_5x5_1000,
+
+ ///
+ /// 6 by 6 pixel ArUco marker dictionary with 50 IDs.
+ ///
+ Dictionary_6x6_50,
+
+ ///
+ /// 6 by 6 pixel ArUco marker dictionary with 100 IDs.
+ ///
+ Dictionary_6x6_100,
+
+ ///
+ /// 6 by 6 pixel ArUco marker dictionary with 250 IDs.
+ ///
+ Dictionary_6x6_250,
+
+ ///
+ /// 6 by 6 pixel ArUco marker dictionary with 1000 IDs.
+ ///
+ Dictionary_6x6_1000,
+
+ ///
+ /// 7 by 7 pixel ArUco marker dictionary with 50 IDs.
+ ///
+ Dictionary_7x7_50,
+
+ ///
+ /// 7 by 7 pixel ArUco marker dictionary with 100 IDs.
+ ///
+ Dictionary_7x7_100,
+
+ ///
+ /// 7 by 7 pixel ArUco marker dictionary with 250 IDs.
+ ///
+ Dictionary_7x7_250,
+
+ ///
+ /// 7 by 7 pixel ArUco marker dictionary with 1000 IDs.
+ ///
+ Dictionary_7x7_1000,
+ }
+
+ ///
+ /// Supported pre-defined AprilTag dictionaries.
+ /// Marker Understanding supports pre-defined AprilTag dictionaries.
+ /// AprilTag dictionaries can be looked up and markers can be generated for them here:
+ /// http://chev.me/arucogen/
+ ///
+ public enum AprilTagType
+ {
+ ///
+ /// 4x4 bits, minimum hamming distance between any two codes = 5, 30 codes
+ ///
+ Dictionary_16H5 = 0,
+
+ ///
+ /// 5x5 bits, minimum hamming distance between any two codes = 9, 35 codes
+ ///
+ Dictionary_25H9,
+
+ ///
+ /// 6x6 bits, minimum hamming distance between any two codes = 10, 2320 codes
+ ///
+ Dictionary_36H10,
+
+ ///
+ /// 6x6 bits, minimum hamming distance between any two codes = 11, 587 codes
+ ///
+ Dictionary_36H11
+ }
+
+ ///
+ /// Used to hint to the back-end the max frames per second
+ /// that should be analyzed. This is set in the
+ /// CustomProfileSettings structure and this setting
+ /// applies to all enabled trackers.
+ ///
+ /// CPU load is a combination of enabled detector types,
+ /// FpsHint and ResolutionHint. More detectors with a higher FPS
+ /// and resolution hints will result in a higher CPU load. High CPU load can affect the
+ /// performance of your system.
+ ///
+ public enum MarkerDetectorFPS
+ {
+ Low = 0,
+ Medium,
+ High,
+ Max
+ }
+
+ ///
+ /// The MarkerDetectorResolution enum values are
+ /// used to hint to the back-end the resolution
+ /// that should be used. This is set in the
+ /// CustomProfileSettings structure and this setting
+ /// currently only applies to the QR, UPC and EAN detectors.
+ ///
+ /// CPU load is a combination of enabled detector types,
+ /// MarkerDetectorFPS and MarkerDetectorResolution. More detectors and a higher
+ /// fps and resolution hints will result in a higher CPU load.
+ /// High CPU load can affect the performance of your system.
+ ///
+ public enum MarkerDetectorResolution
+ {
+ Low = 0,
+ Medium,
+ High
+ }
+
+ ///
+ /// The MarkerDetectorCamera enum values are
+ /// used to hint to the camera
+ /// that should be used. This is set in the
+ /// CustomProfileSettings structure and this setting
+ /// currently only applies to the aruco detectors.
+ ///
+ /// RGB camera has higher resolution than world cameras and are better suited
+ /// for use cases where the target to be tracked is small or needs to be detected
+ /// from far.
+ ///
+ /// World cameras make use of multiple world cameras to improve accuracy and
+ /// increase the FoV for detection.
+ /// from far.
+ ///
+ public enum MarkerDetectorCamera
+ {
+ ///
+ /// Single RGB Camera.
+ ///
+ RGB = 0,
+
+ ///
+ /// One or more world cameras.
+ ///
+ World
+ }
+
+ ///
+ /// The Aruco/April tag detector comes with several corner refinement methods.
+ /// Choosing the right corner refinement method has an impact on the accuracy and
+ /// speed trade-off that comes with each detection pipeline.
+ /// Corner refinement only applies to Aruco and April tags, not QR codes.
+ ///
+ public enum MarkerDetectorCornerRefineMethod
+ {
+ ///
+ /// No refinement, may have inaccurate corners.
+ ///
+ None = 0,
+
+ ///
+ /// Corners have subpixel coordinates.
+ /// High detection rate, very fast, reasonable accuracy.
+ ///
+ Subpix,
+
+ ///
+ /// High detection rate, fast, reasonable accuracy.
+ ///
+ Contour,
+
+ ///
+ /// Reasonable detection rate, slowest, but very accurate.
+ ///
+ AprilTag
+ }
+
+ ///
+ /// In order to improve performance, the detectors don't always run on the full
+ /// frame. Full frame analysis is however necessary to detect new markers that
+ /// weren't detected before. Use this option to control how often the detector may
+ /// detect new markers and its impact on tracking performance.
+ ///
+ public enum MarkerDetectorFullAnalysisInterval
+ {
+
+ ///
+ /// Detector analyzes every frame fully.
+ ///
+ Max = 0,
+
+ ///
+ /// Detector analyzes frame fully very often.
+ ///
+ Fast,
+
+ ///
+ /// Detector analyzes frame fully a few times per second.
+ ///
+ Medium,
+
+ ///
+ /// Detector analyzes frame fully about every second.
+ ///
+ Slow
+ }
+
+ ///
+ /// The data retrieved from a marker detector.
+ ///
+ public struct MarkerData
+ {
+ ///
+ /// The reprojection error of this QR code detection in degrees.
+ ///
+ /// The reprojection error is only useful when tracking QR codes. A high
+ /// reprojection error means that the estimated pose of the QR code doesn't match
+ /// well with the 2D detection on the processed video frame and thus the pose might
+ /// be inaccurate. The error is given in degrees, signifying by how much either
+ /// camera or QR code would have to be moved or rotated to create a perfect
+ /// reprojection. The further away your QR code is, the smaller this reprojection
+ /// error will be for the same displacement error of the code.
+ ///
+ public float ReprojectionErrorMeters;
+
+ ///
+ /// The estimated length of the marker in meters.
+ ///
+ public float MarkerLength;
+
+ ///
+ /// The number data retreived from the marker. Does not apply to QR or Code128.
+ ///
+ public ulong MarkerNumber;
+
+ ///
+ /// The string data obtained from the marker. Only applies to QR and Code128.
+ ///
+ public string MarkerString;
+
+ ///
+ /// The position and rotation data of the marker. Only applies to Aruco, AprilTag, and QR.
+ ///
+ public Pose MarkerPose;
+ }
+
+ ///
+ // All of the settings associated with the marker tracker.
+ ///
+ public struct MarkerDetectorSettings
+ {
+ ///
+ /// The marker type to be associated with the marker tracker.
+ ///
+ public MarkerType MarkerType;
+
+ ///
+ /// The type of tracker profile to be associated with the marker tracker.
+ ///
+ public MarkerDetectorProfile MarkerDetectorProfile;
+
+ ///
+ /// The custom settings to be applied to the marker tracker. This is only applicable when a custom type is selected for the MarkerDetectorProfile.
+ ///
+ public CustomProfileSettings CustomProfileSettings;
+
+ ///
+ /// The settings associated with the Aruco marker type to be applied to the marker tracker. This only applies if the MarkerType is Aruco.
+ ///
+ public ArucoSettings ArucoSettings;
+
+ ///
+ /// The settings associated with the AprilTag marker type to be applied to the marker tracker. This only applies if the MarkerType is AprilTag.
+ ///
+ public AprilTagSettings AprilTagSettings;
+
+ ///
+ /// The settings associated with the QR marker type to be applied to the marker tracker. This only applies if the MarkerType is QR.
+ ///
+ public QRSettings QRSettings;
+ }
+
+ ///
+ /// The custom settings applied to the marker tracker when a custom profile is used.
+ ///
+ public struct CustomProfileSettings
+ {
+ ///
+ /// A hint to the back-end the max frames per second hat should be analyzed.
+ ///
+ public MarkerDetectorFPS FPSHint;
+
+ ///
+ /// A hint to the back-end the resolution that should be used.
+ ///
+ public MarkerDetectorResolution ResolutionHint;
+
+ ///
+ /// A hint to the back-end for what cameras should be used.
+ ///
+ public MarkerDetectorCamera CameraHint;
+
+ ///
+ /// This option provides control over corner refinement methods and a way to
+ /// balance detection rate, speed and pose accuracy. Always available and
+ /// applicable for Aruco and April tags.
+ ///
+ public MarkerDetectorCornerRefineMethod CornerRefinement;
+
+ ///
+ /// Run refinement step that uses marker edges to generate even more accurate
+ /// corners, but slow down tracking rate overall by consuming more compute.
+ /// Aruco/April tags only.
+ ///
+ public bool UseEdgeRefinement;
+
+ ///
+ /// In order to improve performance, the detectors don't always run on the full
+ /// frame. Full frame analysis is however necessary to detect new markers that
+ /// weren't detected before. Use this option to control how often the detector may
+ /// detect new markers and its impact on tracking performance.
+ ///
+ public MarkerDetectorFullAnalysisInterval AnalysisInterval;
+ }
+
+ ///
+ /// Settings associated with the Aruco marker type.
+ ///
+ public struct ArucoSettings
+ {
+ ///
+ /// Whether the length of the Aruco marker will be estimated automatically.
+ ///
+ public bool EstimateArucoLength;
+
+ ///
+ /// The estimated length of the Aruco marker in meters.
+ ///
+ public float ArucoLength;
+
+ ///
+ /// The type of pre-defined Aruco dictionary.
+ ///
+ public ArucoType ArucoType;
+ }
+
+ ///
+ /// Settings associated with the AprilTag marker type.
+ ///
+ public struct AprilTagSettings
+ {
+ ///
+ /// Whether the length of the AprilTag marker will be estimated automatically.
+ ///
+ public bool EstimateAprilTagLength;
+
+ ///
+ /// The estimated length of the AprilTag marker in meters.
+ ///
+ public float AprilTagLength;
+
+ ///
+ /// The type of pre-defined AprilTag dictionary.
+ ///
+ public AprilTagType AprilTagType;
+ }
+
+ ///
+ /// Settings associated with the QR marker type.
+ ///
+ public struct QRSettings
+ {
+ ///
+ /// Whether the length of the QR marker will be estimated automatically.
+ ///
+ public bool EstimateQRLength;
+
+ ///
+ /// The estimated length of the QR marker in meters.
+ ///
+ public float QRLength;
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingData.cs.meta b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingData.cs.meta
new file mode 100644
index 0000000..e7a5542
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingData.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: deb853be8f300d1408e09eb5849aefa6
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingFeature.cs b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingFeature.cs
new file mode 100644
index 0000000..e948d65
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingFeature.cs
@@ -0,0 +1,56 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2019-2023) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using UnityEngine.XR.MagicLeap;
+#if UNITY_EDITOR
+using UnityEditor;
+using UnityEditor.XR.OpenXR.Features;
+#endif
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+#if UNITY_EDITOR
+ [OpenXRFeature(UiName = "Magic Leap 2 Marker Understanding",
+ Desc = "Necessary to deploy a Magic Leap 2 compatible application with Marker Understanding events.",
+ Company = "Magic Leap",
+ Version = "1.0.0",
+ Priority = -1,
+ BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
+ FeatureId = FeatureId,
+ OpenxrExtensionStrings = "XR_ML_marker_understanding"
+ )]
+#endif
+ public partial class MagicLeapMarkerUnderstandingFeature : MagicLeapOpenXRFeatureBase
+ {
+ public const string FeatureId = "com.magicleap.openxr.feature.ml2_markerunderstanding";
+
+ protected override bool OnInstanceCreate(ulong xrInstance)
+ {
+ if (!OpenXRRuntime.IsExtensionEnabled("XR_ML_marker_understanding"))
+ {
+ Debug.LogWarning($"XR_ML_marker_understanding is not enabled, disabling {nameof(MagicLeapMarkerUnderstandingFeature)}.");
+ return false;
+ }
+
+ return base.OnInstanceCreate(xrInstance);
+ }
+
+ protected override string GetFeatureId() => FeatureId;
+
+ protected override void OnSessionCreate(ulong xrSession)
+ {
+ base.OnSessionCreate(xrSession);
+ }
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingFeature.cs.meta b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingFeature.cs.meta
new file mode 100644
index 0000000..0a252a6
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingFeature.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 839b445bcefe86e4c850a6223d8e213f
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingMarkerDetector.cs b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingMarkerDetector.cs
new file mode 100644
index 0000000..27b9f77
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingMarkerDetector.cs
@@ -0,0 +1,213 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Linq;
+using System.Collections.Generic;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapMarkerUnderstandingFeature
+ {
+ ///
+ /// Used to detect data from a specified type of marker tracker based on specific settings.
+ ///
+ public class MarkerDetector
+ {
+ private ulong handle;
+ private MarkerData[] data;
+ private ulong[] markers;
+ private Dictionary markerSpaces;
+
+ ///
+ /// The current settings associated with the marker detector.
+ ///
+ public MarkerDetectorSettings Settings { get; private set; }
+
+ ///
+ /// The current status of the readiness of the marker detector.
+ ///
+ public MarkerDetectorStatus Status { get; private set; }
+
+ ///
+ /// The data retrieved from the marker detector.
+ ///
+ /// A readonly collection of data retrieved from the marker detector.
+ public IReadOnlyList Data => Array.AsReadOnly(data);
+
+ private bool activeSnapshot;
+
+ ///
+ /// Creates a marker detector based on specific settings and initializes the data values.
+ ///
+ /// The marker detector settings to be associated with the marker detector to be created.
+ internal MarkerDetector(MarkerDetectorSettings settings)
+ {
+ var resultCode = NativeBindings.MLCreateMarkerDetector(settings, out handle);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLCreateMarkerDetector));
+
+ Settings = settings;
+ Status = MarkerDetectorStatus.Pending;
+
+ data = Array.Empty();
+ markers = Array.Empty();
+ markerSpaces = new();
+ }
+
+ ///
+ /// Updates the status readiness of the marker detector and collects the current data values if it is in a ready state.
+ ///
+ internal void UpdateData()
+ {
+ Status = GetMarkerDetectorState();
+
+ if (Status == MarkerDetectorStatus.Ready)
+ {
+ activeSnapshot = false;
+ data = GetMarkersData();
+ }
+ }
+
+ ///
+ /// Destroys this marker detector and clears the associated data.
+ ///
+ internal void Destroy()
+ {
+ var resultCode = NativeBindings.MLDestroyMarkerDetector(handle);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLDestroyMarkerDetector));
+
+ data = Array.Empty();
+ markers = Array.Empty();
+ markerSpaces.Clear();
+ }
+
+ ///
+ /// Takes a snapshot of the active marker detector and gets the current status of it.
+ ///
+ /// The status of the marker detector, as either Pending, Ready, or Error.
+ private MarkerDetectorStatus GetMarkerDetectorState()
+ {
+ if (!activeSnapshot)
+ {
+ SnapshotMarkerDetector();
+ activeSnapshot = true;
+ }
+
+ var resultCode = NativeBindings.MLGetMarkerDetectorState(handle, out MarkerDetectorStatus status);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLGetMarkerDetectorState));
+ return status;
+ }
+
+ ///
+ /// Gets the relevant marker data of all markers associated with the active marker detector.
+ ///
+ /// An array representing all data retrieved from the active marker detector for all markers.
+ private MarkerData[] GetMarkersData()
+ {
+ GetMarkers();
+
+ MarkerData[] markersData = markers?.Select(GetMarkerData).ToArray();
+
+ return markersData ?? Array.Empty();
+ }
+
+ private void SnapshotMarkerDetector()
+ {
+ var resultCode = NativeBindings.MLSnapshotMarkerDetector(handle);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLSnapshotMarkerDetector));
+ }
+
+ private void GetMarkers()
+ {
+ // call first time to get marker count
+ var resultCode = NativeBindings.MLGetMarkers(handle, out uint markerCount, null);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLGetMarkers));
+
+ markers = new ulong[(int)markerCount];
+
+ // call second time to get markers
+ resultCode = NativeBindings.MLGetMarkers(handle, out markerCount, markers);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLGetMarkers));
+ }
+
+ private MarkerData GetMarkerData(ulong marker)
+ {
+ MarkerData markerData;
+
+ markerData.MarkerLength = GetMarkerLength(marker);
+
+ if (Settings.MarkerType == MarkerType.QR || Settings.MarkerType == MarkerType.Code128 || Settings.MarkerType == MarkerType.EAN13 || Settings.MarkerType == MarkerType.UPCA)
+ {
+ markerData.ReprojectionErrorMeters = 0;
+ markerData.MarkerNumber = 0;
+ markerData.MarkerString = GetMarkerString(marker);
+ }
+ else
+ {
+ markerData.ReprojectionErrorMeters = GetMarkerReprojectionError(marker);
+ markerData.MarkerNumber = GetMarkerNumber(marker);
+ markerData.MarkerString = null;
+ }
+
+ if (Settings.MarkerType == MarkerType.Aruco || Settings.MarkerType == MarkerType.QR || Settings.MarkerType == MarkerType.AprilTag)
+ {
+ markerData.MarkerPose = CreateMarkerSpace(marker);
+ }
+ else
+ {
+ markerData.MarkerPose = default;
+ }
+
+ return markerData;
+ }
+
+ private float GetMarkerReprojectionError(ulong marker)
+ {
+ var resultCode = NativeBindings.MLGetMarkerReprojectionError(handle, marker, out float reprojectionErrorMeters);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLGetMarkerReprojectionError));
+ return reprojectionErrorMeters;
+ }
+
+ private float GetMarkerLength(ulong marker)
+ {
+ var resultCode = NativeBindings.MLGetMarkerLength(handle, marker, out float meters);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLGetMarkerLength));
+ return meters;
+ }
+
+ private ulong GetMarkerNumber(ulong marker)
+ {
+ var resultCode = NativeBindings.MLGetMarkerNumber(handle, marker, out ulong number);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLGetMarkerNumber));
+ return number;
+ }
+
+ private string GetMarkerString(ulong marker)
+ {
+ uint bufferSize = 100;
+ char[] buffer = new char[bufferSize];
+
+ var resultCode = NativeBindings.MLGetMarkerString(handle, marker, bufferSize, buffer);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLGetMarkerString));
+
+ return new string(buffer);
+ }
+
+ private Pose CreateMarkerSpace(ulong marker)
+ {
+ Pose pose = default;
+
+ // a marker space is not created if one already exists for that marker
+ if (markerSpaces.TryGetValue(marker, out ulong space))
+ {
+ MagicLeapFeature.NativeBindings.MLOpenXRGetUnityPoseForFeature(FeatureId, space, out pose);
+ return pose;
+ }
+ var resultCode = NativeBindings.MLCreateMarkerSpace(handle, marker, out var xrSpace);
+ Utils.DidXrCallSucceed(resultCode, nameof(NativeBindings.MLCreateMarkerSpace));
+
+ markerSpaces.Add(marker, xrSpace);
+ return pose;
+ }
+ }
+ }
+}
+#endif
\ No newline at end of file
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingMarkerDetector.cs.meta b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingMarkerDetector.cs.meta
new file mode 100644
index 0000000..03f66b8
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingMarkerDetector.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: e32badda6cc93244da9efa5fb91e22de
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingNativeBindings.cs b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingNativeBindings.cs
new file mode 100644
index 0000000..920648e
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingNativeBindings.cs
@@ -0,0 +1,59 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2019-2023) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Runtime.InteropServices;
+using UnityEngine;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+using UnityEngine.XR.OpenXR.NativeTypes;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapMarkerUnderstandingFeature
+ {
+ internal partial class NativeBindings : MagicLeapNativeBindings
+ {
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLCreateMarkerDetector(in MarkerDetectorSettings settings, out ulong markerDetector);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLDestroyMarkerDetector(ulong markerDetector);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLSnapshotMarkerDetector(ulong markerDetector);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLGetMarkerDetectorState(ulong markerDetector, out MarkerDetectorStatus status);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLGetMarkers(ulong markerDetector, out uint markerCountOutput, [In, Out] ulong[] markers);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLGetMarkerReprojectionError(ulong markerDetector, ulong marker, out float reprojectionErrorMeters);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLGetMarkerLength(ulong markerDetector, ulong marker, out float meters);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLGetMarkerNumber(ulong markerDetector, ulong marker, out ulong number);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLGetMarkerString(ulong markerDetector, ulong marker, uint bufferSize, [In, Out] char[] buffer);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ public static extern XrResult MLCreateMarkerSpace(ulong markerDetector, ulong marker, out ulong xrSpace);
+ }
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingNativeBindings.cs.meta b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingNativeBindings.cs.meta
new file mode 100644
index 0000000..3a828f4
--- /dev/null
+++ b/Runtime/OpenXR/MarkerUnderstanding/MagicLeapMarkerUnderstandingNativeBindings.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 324ec53ca46de3f48b29463a0ada81df
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/Meshing.meta b/Runtime/OpenXR/Meshing.meta
new file mode 100644
index 0000000..1ba0d2b
--- /dev/null
+++ b/Runtime/OpenXR/Meshing.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: baee575c683e43e5bac32859beedf90b
+timeCreated: 1701469860
\ No newline at end of file
diff --git a/Runtime/OpenXR/Meshing/MagicLeapMeshingFeature.cs b/Runtime/OpenXR/Meshing/MagicLeapMeshingFeature.cs
new file mode 100644
index 0000000..49c9689
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/MagicLeapMeshingFeature.cs
@@ -0,0 +1,241 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+
+using System;
+using System.Collections.Generic;
+using OpenXR.PointCloud;
+using Unity.Collections;
+using Unity.Collections.LowLevel.Unsafe;
+using UnityEngine.XR.ARSubsystems;
+using UnityEngine.XR.MagicLeap;
+#if UNITY_EDITOR
+using UnityEditor;
+using UnityEditor.XR.OpenXR.Features;
+#endif
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ ///
+ /// Enables the Magic Leap OpenXR Loader for Android, and modifies the AndroidManifest to be compatible with ML2.
+ ///
+#if UNITY_EDITOR
+ [OpenXRFeature(UiName = "Magic Leap 2 Meshing Subsystem",
+ Desc = "Necessary to deploy a Magic Leap 2 compatible application with world meshing",
+ Company = "Magic Leap",
+ Version = "1.0.0",
+ Priority = -2,
+ FeatureId = FeatureId,
+ BuildTargetGroups = new[] { BuildTargetGroup.Android, BuildTargetGroup.Standalone },
+ OpenxrExtensionStrings = MeshingExtensionName
+ )]
+#endif
+ public partial class MagicLeapMeshingFeature : MagicLeapOpenXRFeatureBase
+ {
+ public const string FeatureId = "com.magicleap.openxr.feature.ml2_mesh_detection";
+ private const string MeshingExtensionName = "XR_ML_world_mesh_detection XR_EXT_future";
+
+ private MeshingMode currentMode = MeshingMode.Triangles;
+ private Vector3 meshBoundsOrigin;
+ private Vector3 meshBoundsScale;
+ private Quaternion meshBoundsRotation;
+
+ ///
+ /// The origin of the meshing volume bounds
+ ///
+ public Vector3 MeshBoundsOrigin
+ {
+ get => meshBoundsOrigin;
+ set
+ {
+ meshBoundsOrigin = value;
+ MagicLeapXrMeshingNativeBindings.MLOpenXRSetMeshBoundsOrigin(value);
+ }
+ }
+
+ ///
+ /// The scale of the meshing bounds volume
+ ///
+ public Vector3 MeshBoundsScale
+ {
+ get => meshBoundsScale;
+ set
+ {
+ meshBoundsScale = value;
+ MagicLeapXrMeshingNativeBindings.MLOpenXRSetMeshBoundsScale(value);
+ }
+ }
+
+ ///
+ /// The rotation of the meshing bounds volume
+ ///
+ public Quaternion MeshBoundsRotation
+ {
+ get => meshBoundsRotation;
+ set
+ {
+ meshBoundsRotation = value;
+ MagicLeapXrMeshingNativeBindings.MLOpenXRSetMeshBoundsOrientation(value);
+ }
+ }
+
+ ///
+ /// The render mode of the generated mesh.
+ ///
+ public MeshingMode MeshRenderMode
+ {
+ get => currentMode;
+ set => SetRenderMode(value);
+ }
+
+ ///
+ /// The density of the meshes generated
+ ///
+ public float MeshDensity
+ {
+ set
+ {
+ MagicLeapXrMeshingNativeBindings.MLOpenXRSetMeshDensity(value);
+ }
+ }
+
+ protected override string GetFeatureId()
+ {
+ return FeatureId;
+ }
+
+ protected override bool OnInstanceCreate(ulong xrInstance)
+ {
+ var exts = MeshingExtensionName.Split(' ');
+ foreach (var ext in exts)
+ {
+ if (!OpenXRRuntime.IsExtensionEnabled(ext))
+ {
+ Debug.LogError($"{ext} is not enabled. Disabling {nameof(MagicLeapMeshingFeature)}");
+ return false;
+ }
+ }
+
+ return base.OnInstanceCreate(xrInstance);
+ }
+
+ protected override void OnSubsystemCreate()
+ {
+ base.OnSubsystemCreate();
+ CreateSubsystem(new List(), MagicLeapXrProvider.MeshingSubsystemId);
+ CreateSubsystem(new List(), MagicLeapXrProvider.PointCloudSubsystemId);
+ }
+
+ protected override void OnSubsystemStart()
+ {
+ base.OnSubsystemStart();
+ StartSubsystem();
+ }
+
+ protected override void OnSubsystemStop()
+ {
+ base.OnSubsystemStop();
+ StopSubsystem();
+ StopSubsystem();
+ }
+
+ protected override void OnSubsystemDestroy()
+ {
+ base.OnSubsystemDestroy();
+ DestroySubsystem();
+ DestroySubsystem();
+ }
+
+ internal void StartSubsystemForMLMeshing()
+ {
+ StartSubsystem();
+ }
+
+ internal void StopSubsystemForMLMeshing()
+ {
+ StopSubsystem();
+ }
+
+ ///
+ /// Update the query settings for the mesh generation
+ ///
+ ///
+ public void UpdateMeshQuerySettings(in MeshingQuerySettings settings)
+ {
+ MagicLeapXrMeshingNativeBindings.MLOpenXRMeshingUpdateSettings(in settings);
+ }
+
+ private void SetRenderMode(MeshingMode mode)
+ {
+ if (mode == currentMode)
+ {
+ return;
+ }
+ if (mode == MeshingMode.PointCloud && currentMode == MeshingMode.Triangles)
+ {
+ StopSubsystem();
+ StartSubsystem();
+ }
+
+ if (mode == MeshingMode.Triangles && currentMode == MeshingMode.PointCloud)
+ {
+ StopSubsystem();
+ StartSubsystem();
+ }
+ currentMode = mode;
+ MagicLeapXrMeshingNativeBindings.MLOpenXRSetMeshRenderMode(currentMode);
+ }
+
+ public void SetMeshQueryBounds(Vector3 position, Vector3 scale, Quaternion rotation)
+ {
+ meshBoundsOrigin = position;
+ meshBoundsScale = scale;
+ meshBoundsRotation = rotation;
+ MagicLeapXrMeshingNativeBindings.MLOpenXRSetMeshQueryBounds(in position,in rotation,in scale);
+ }
+
+ public void GetMeshIds(out TrackableId[] trackableIds)
+ {
+ trackableIds = Array.Empty();
+ unsafe
+ {
+ var buffer = (TrackableId*)IntPtr.Zero;
+ MagicLeapXrMeshingNativeBindings.MLOpenXRAcquireMeshIds(ref buffer, out var trackableCount);
+ if (trackableCount == 0)
+ {
+ return;
+ }
+
+ trackableIds = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(buffer, trackableCount, Allocator.None).ToArray();
+ }
+ }
+
+ public bool GetMeshData(in TrackableId meshId, out Vector3[] positions, out Vector3[] normals, out float[] confidence)
+ {
+ positions = Array.Empty();
+ normals = Array.Empty();
+ confidence = Array.Empty();
+ unsafe
+ {
+ var positionsBuffer = (Vector3*)IntPtr.Zero;
+ var normalBuffer = (Vector3*)IntPtr.Zero;
+ var confidenceBuffer = (float*)IntPtr.Zero;
+
+ var result = MagicLeapXrMeshingNativeBindings.MLOpenXRAcquireMeshData(in meshId, ref positionsBuffer, out var positionCount, ref normalBuffer, out var normalCount, ref confidenceBuffer, out var confidenceCount);
+ if (!result)
+ {
+ return false;
+ }
+
+ positions = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(positionsBuffer, positionCount, Allocator.None).ToArray();
+ normals = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(normalBuffer, normalCount, Allocator.None).ToArray();
+ confidence = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(confidenceBuffer, confidenceCount, Allocator.None).ToArray();
+ return true;
+ }
+ }
+
+ public void InvalidateMeshes()
+ {
+ MagicLeapXrMeshingNativeBindings.MLOpenXRInvalidateMeshes();
+ }
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/Meshing/MagicLeapMeshingFeature.cs.meta b/Runtime/OpenXR/Meshing/MagicLeapMeshingFeature.cs.meta
new file mode 100644
index 0000000..aa365be
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/MagicLeapMeshingFeature.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: e8893c58a68b4ff98668b47e5c935e57
+timeCreated: 1701469876
\ No newline at end of file
diff --git a/Runtime/OpenXR/Meshing/MagicLeapXrMeshingNativeBindings.cs b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingNativeBindings.cs
new file mode 100644
index 0000000..3808fa8
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingNativeBindings.cs
@@ -0,0 +1,47 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+
+using System.Runtime.InteropServices;
+using UnityEngine.XR.ARSubsystems;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ internal abstract class MagicLeapXrMeshingNativeBindings : MagicLeapNativeBindings
+ {
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRMeshingUpdateSettings(in MagicLeapMeshingFeature.MeshingQuerySettings settings);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern unsafe void MLOpenXRAcquireMeshIds(ref TrackableId* trackableIds, out int trackableCount);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern unsafe bool MLOpenXRAcquireMeshData(in TrackableId trackableId, ref Vector3* positions, out int positionCount, ref Vector3* normals, out int normalCount, ref float* confidence, out int confidenceCount);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRSetMeshQueryBounds(in Vector3 position,in Quaternion rotation, in Vector3 scale);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRSetMeshRenderMode(MagicLeapMeshingFeature.MeshingMode mode);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRSetMeshBoundsOrigin(in Vector3 position);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRSetMeshBoundsScale(in Vector3 scale);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRSetMeshBoundsOrientation(in Quaternion rotation);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRSetMeshDensity(float density);
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRInvalidateMeshes();
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRGetCurrentFrameMeshData(out MagicLeapMeshingFeature.FrameMeshInfo frameMeshInfo);
+
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/Meshing/MagicLeapXrMeshingNativeBindings.cs.meta b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingNativeBindings.cs.meta
new file mode 100644
index 0000000..c2a9fa6
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingNativeBindings.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 2f043ff7d897430da326bc562ba25654
+timeCreated: 1701715794
\ No newline at end of file
diff --git a/Runtime/OpenXR/Meshing/MagicLeapXrMeshingTypes.cs b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingTypes.cs
new file mode 100644
index 0000000..03232c4
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingTypes.cs
@@ -0,0 +1,83 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+
+using System;
+using System.Runtime.InteropServices;
+using UnityEngine.XR.ARSubsystems;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MagicLeapMeshingFeature
+ {
+ public enum MeshingMode : byte
+ {
+ Triangles,
+ PointCloud
+ }
+
+ internal unsafe struct FrameMeshInfo
+ {
+ internal TrackableId* addedIds;
+ internal uint addedCount;
+ internal TrackableId* removedIds;
+ internal uint removedCount;
+ internal TrackableId* updatedIds;
+ internal uint updatedCount;
+ }
+
+ ///
+ /// The flags to represent the generated mesh's properties
+ ///
+ [Flags]
+ public enum MeshDetectorFlags : byte
+ {
+ ///
+ /// Whether to compute the normals of the mesh
+ ///
+ ComputeNormals = 1 << 1,
+
+ ///
+ /// Whether to compute the confidence data for the mesh
+ ///
+ ComputeConfidence = 1 << 2,
+
+ ///
+ /// Whether to planarize the generated mesh
+ ///
+ Planarize = 1 << 3,
+
+ ///
+ /// When enabled, the mesh skirt (overlapping area between two mesh blocks) will be removed. This field is only valid
+ /// when the mesh is not a point cloud.
+ ///
+ MeshSkirt = 1 << 4
+ }
+
+ ///
+ /// The mesh generation settings
+ ///
+ [Serializable]
+ public struct MeshingQuerySettings
+ {
+ public float fillHoleLength;
+ public float appliedDisconnectedComponentArea;
+ public MeshDetectorFlags meshDetectorFlags;
+ ///
+ /// Whether to use the ion allocator on the device to store the mesh data.
+ ///
+ [MarshalAs(UnmanagedType.I1)] public bool useIonAllocator;
+
+ public static MeshingQuerySettings DefaultSettings()
+ {
+ return new MeshingQuerySettings
+ {
+ fillHoleLength = 0.25f,
+ appliedDisconnectedComponentArea = 0.25f,
+ meshDetectorFlags = MeshDetectorFlags.Planarize | MeshDetectorFlags.ComputeNormals,
+ useIonAllocator = false,
+ };
+ }
+ }
+ }
+}
+
+#endif
diff --git a/Runtime/OpenXR/Meshing/MagicLeapXrMeshingTypes.cs.meta b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingTypes.cs.meta
new file mode 100644
index 0000000..1a4fa22
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/MagicLeapXrMeshingTypes.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: baea2086ab394b428183f4b33dae2e8a
+timeCreated: 1701715830
\ No newline at end of file
diff --git a/Runtime/OpenXR/Meshing/PointCloud.meta b/Runtime/OpenXR/Meshing/PointCloud.meta
new file mode 100644
index 0000000..4c83b6c
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/PointCloud.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 4ce0d51310a24c70b87b3285b68358c2
+timeCreated: 1706049300
\ No newline at end of file
diff --git a/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudNativeBindings.cs b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudNativeBindings.cs
new file mode 100644
index 0000000..c52d5f0
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudNativeBindings.cs
@@ -0,0 +1,22 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+using System.Runtime.InteropServices;
+using UnityEngine;
+using UnityEngine.XR.ARSubsystems;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.MagicLeap.Native;
+
+namespace OpenXR.PointCloud
+{
+ internal abstract class MagicLeapOpenXRPointCloudNativeBindings : MagicLeapNativeBindings
+ {
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRBeginPointCloudDetection();
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern void MLOpenXRStopPointCloudDetection();
+
+ [DllImport(MagicLeapXrProviderNativeBindings.MagicLeapXrProviderDll, CallingConvention = CallingConvention.Cdecl)]
+ internal static extern unsafe void MLOpenXRPointCloudGetChanges(ref TrackableId* meshIds, out int count);
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudNativeBindings.cs.meta b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudNativeBindings.cs.meta
new file mode 100644
index 0000000..a1dcb11
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudNativeBindings.cs.meta
@@ -0,0 +1,11 @@
+fileFormatVersion: 2
+guid: 66ecf803352341a4bb163d7925bfd9e0
+MonoImporter:
+ externalObjects: {}
+ serializedVersion: 2
+ defaultReferences: []
+ executionOrder: 0
+ icon: {instanceID: 0}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudSubsystem.cs b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudSubsystem.cs
new file mode 100644
index 0000000..8b97d48
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudSubsystem.cs
@@ -0,0 +1,148 @@
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+
+using System;
+using Unity.Collections;
+using Unity.Collections.LowLevel.Unsafe;
+using Unity.Jobs;
+using UnityEngine;
+using UnityEngine.XR.ARSubsystems;
+using UnityEngine.XR.MagicLeap;
+using UnityEngine.XR.OpenXR.Features.MagicLeapSupport;
+
+namespace OpenXR.PointCloud
+{
+ public class MagicLeapOpenXRPointCloudSubsystem : XRPointCloudSubsystem
+ {
+#if !UNITY_XR_MAGICLEAP_PROVIDER
+
+ [RuntimeInitializeOnLoadMethod(RuntimeInitializeLoadType.SubsystemRegistration)]
+ public static void RegisterDescriptor()
+ {
+ Debug.Log("Registering Point Cloud Subsystem");
+ XRPointCloudSubsystemDescriptor.RegisterDescriptor(new XRPointCloudSubsystemDescriptor.Cinfo
+ {
+ id = MagicLeapXrProvider.PointCloudSubsystemId,
+ providerType = typeof(MagicLeapProvider),
+ subsystemTypeOverride = typeof(MagicLeapOpenXRPointCloudSubsystem),
+ supportsFeaturePoints = true,
+ supportsConfidence = true,
+ supportsUniqueIds = false
+ });
+ }
+#endif
+ private struct XRPointCloudCreationJob : IJobParallelFor
+ {
+ [ReadOnly] public NativeArray InputIds;
+ [ReadOnly] public XRPointCloud DefaultPointCloud;
+ [WriteOnly] public NativeList.ParallelWriter Output;
+
+ public void Execute(int index)
+ {
+ var idToProcess = InputIds[index];
+ var cloud = new XRPointCloud(idToProcess, DefaultPointCloud.pose, TrackingState.Tracking, IntPtr.Zero);
+ Output.AddNoResize(cloud);
+ }
+ }
+
+ private class MagicLeapProvider : Provider
+ {
+ private MagicLeapMeshingFeature.FrameMeshInfo currentFrameInfo;
+
+ public override void Start()
+ {
+ base.Start();
+ MagicLeapOpenXRPointCloudNativeBindings.MLOpenXRBeginPointCloudDetection();
+ }
+
+ public override void Stop()
+ {
+ base.Stop();
+ MagicLeapOpenXRPointCloudNativeBindings.MLOpenXRStopPointCloudDetection();
+ }
+
+
+ public override TrackableChanges GetChanges(XRPointCloud defaultPointCloud, Allocator allocator)
+ {
+ unsafe
+ {
+ //Fetch frame info
+ MagicLeapXrMeshingNativeBindings.MLOpenXRGetCurrentFrameMeshData(out currentFrameInfo);
+
+ var addedIds = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(currentFrameInfo.addedIds, (int)currentFrameInfo.addedCount, Allocator.None);
+ var updatedIds = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(currentFrameInfo.updatedIds, (int)currentFrameInfo.updatedCount, Allocator.None);
+ var removedIds = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(currentFrameInfo.removedIds, (int)currentFrameInfo.removedCount, Allocator.None);
+
+
+ var addedPointClouds = new NativeList((int)currentFrameInfo.addedCount, Allocator.TempJob);
+ var updatedPointClouds = new NativeList((int)currentFrameInfo.updatedCount, Allocator.TempJob);
+
+ new XRPointCloudCreationJob
+ {
+ InputIds = addedIds,
+ DefaultPointCloud = defaultPointCloud,
+ Output = addedPointClouds.AsParallelWriter()
+ }.Schedule(addedIds.Length, 10).Complete();
+
+ new XRPointCloudCreationJob
+ {
+ InputIds = updatedIds,
+ DefaultPointCloud = defaultPointCloud,
+ Output = updatedPointClouds.AsParallelWriter()
+ }.Schedule(updatedIds.Length, 10).Complete();
+
+ var result = new TrackableChanges(addedPointClouds.GetUnsafePtr(), addedPointClouds.Length, updatedPointClouds.GetUnsafePtr(), updatedPointClouds.Length, removedIds.GetUnsafePtr(), removedIds.Length, defaultPointCloud, sizeof(XRPointCloud), allocator);
+ return result;
+ }
+ }
+
+ public override XRPointCloudData GetPointCloudData(TrackableId trackableId, Allocator allocator)
+ {
+ unsafe
+ {
+ var positions = (Vector3*)IntPtr.Zero;
+ var confidence = (float*)IntPtr.Zero;
+ var normals = (Vector3*)IntPtr.Zero;
+
+ var result = MagicLeapXrMeshingNativeBindings.MLOpenXRAcquireMeshData(in trackableId, ref positions, out var positionCount, ref normals, out _, ref confidence, out var confidenceCount);
+ if (!result)
+ {
+ return default;
+ }
+
+ var pointCloudData = new XRPointCloudData();
+
+ var positionsArray = new NativeArray(NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(positions, positionCount, Allocator.None), Allocator.Temp);
+ var confidenceArray = new NativeArray(NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray(confidence, confidenceCount, Allocator.None), Allocator.Temp);
+
+ pointCloudData.confidenceValues = new NativeArray(confidenceArray.Length, allocator);
+ pointCloudData.positions = new NativeArray(positionsArray.Length, allocator);
+ new XRCopyPointCloudDataJob
+ {
+ VertexInput = positionsArray,
+ ConfidenceInput = confidenceArray,
+ VertexOutput = pointCloudData.positions,
+ ConfidenceOutput = pointCloudData.confidenceValues
+ }.Schedule().Complete();
+
+ return pointCloudData;
+ }
+ }
+
+ private struct XRCopyPointCloudDataJob : IJob
+ {
+ [ReadOnly] public NativeArray VertexInput;
+ [ReadOnly] public NativeArray ConfidenceInput;
+
+ [WriteOnly] public NativeArray VertexOutput;
+ [WriteOnly] public NativeArray ConfidenceOutput;
+
+ public void Execute()
+ {
+ VertexOutput.CopyFrom(VertexInput);
+ ConfidenceOutput.CopyFrom(ConfidenceInput);
+ }
+ }
+ }
+ }
+}
+#endif
diff --git a/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudSubsystem.cs.meta b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudSubsystem.cs.meta
new file mode 100644
index 0000000..26e9dec
--- /dev/null
+++ b/Runtime/OpenXR/Meshing/PointCloud/MagicLeapOpenXRPointCloudSubsystem.cs.meta
@@ -0,0 +1,3 @@
+fileFormatVersion: 2
+guid: 790092b261d94b43b8d4640ee2d33741
+timeCreated: 1706049315
\ No newline at end of file
diff --git a/Runtime/OpenXR/NativeInteropTypes.cs b/Runtime/OpenXR/NativeInteropTypes.cs
index 7903def..08eaba4 100644
--- a/Runtime/OpenXR/NativeInteropTypes.cs
+++ b/Runtime/OpenXR/NativeInteropTypes.cs
@@ -11,6 +11,7 @@
#if UNITY_OPENXR_1_9_0_OR_NEWER
using System;
using System.Runtime.InteropServices;
+using System.Text;
namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport.NativeInterop
{
@@ -110,17 +111,17 @@ internal unsafe struct XrUUID
public override string ToString()
{
- string idString = string.Empty;
+ StringBuilder idString = new StringBuilder(20);
for (int i = 0; i < 16; i++)
{
- idString += string.Format("{0:x2}", this.Data[i]);
+ idString.AppendFormat("{0:x2}", this.Data[i]);
}
foreach (int i in hyphenIndices)
- idString = idString.Insert(i, "-");
+ idString.Insert(i, "-");
- return idString;
+ return idString.ToString();
}
internal XrUUID(string id)
diff --git a/Runtime/OpenXR/OpenXRUtils.cs b/Runtime/OpenXR/OpenXRUtils.cs
index f2ca5e8..faa097f 100644
--- a/Runtime/OpenXR/OpenXRUtils.cs
+++ b/Runtime/OpenXR/OpenXRUtils.cs
@@ -25,6 +25,69 @@ namespace UnityEngine.XR.OpenXR
{
public static class Utils
{
+ public enum MLXrResult
+ {
+ GraphicsRequirementsCallMissing = -50,
+ RuntimeUnavailable = -51,
+ ControllerModelKeyInvalidMSFT = -1000055000,
+ ReprojectionModeUnsupportedMSFT = -1000066000,
+ ComputeNewSceneNotCompletedMSFT = -1000097000,
+ SceneComponentIdInvalidMSFT = -1000097001,
+ SceneComponentTypeMismatchMSFT = -1000097002,
+ SceneMeshBufferIdInvalidMSFT = -1000097003,
+ SceneComputefeatureincompatibleMSFT = -1000097004,
+ SceneComputeConsistencyMismatchMSFT = -1000097005,
+ DisplayRefreshRateUnsupportedFB = -1000101000,
+ ColorSpaceUnsupportedFB = -1000108000,
+ SpaceComponentNotSupportedFB = -1000113000,
+ SpaceComponentNotEnabledFB = -1000113001,
+ SpaceComponentStatusPendingFB = -1000113002,
+ SpaceComponentStatusAlreadySetFB = -1000113003,
+ UnexpectedStatePassthroughFB = -1000118000,
+ FeatureAlreadyCreatedPassthroughFB = -1000118001,
+ FeatureRequiredPassthroughFB = -1000118002,
+ NotPermittedPassthroughFB = -1000118003,
+ InsufficientResourcesPassthroughFB = -1000118004,
+ UnknownPassthroughFB = -1000118050,
+ RenderModelKeyInvalidFB = -1000119000,
+ RenderModelUnavailableFB = 1000119020,
+ MarkerNotTrackedVarjo = -1000124000,
+ MarkerIdInvalidVarjo = -1000124001,
+ SpatialAnchorNameNotFoundMSFT = -1000142001,
+ SpatialAnchorNameInvalidMSFT = -1000142002,
+ SpaceMappingInsufficientFB = -1000169000,
+ SpaceLocalizationFailedFB = -1000169001,
+ SpaceNetworkTimeoutFB = -1000169002,
+ SpaceNetworkRequestFailedFB = -1000169003,
+ SpaceCloudStorageDisabledFB = -1000169004,
+ PassthroughColorLutBufferSizeMismatchMETA = -1000266000,
+ HintAlreadySetQCOM = -1000306000,
+ SpaceNotLocatableEXT = -1000429000,
+ PlaneDetectionPermissionDeniedEXT = -1000429001,
+ FuturePendingEXT = -1000469001,
+ FutureInvalidEXT = -1000469002,
+ FacialExpressionPermissionDeniedML = 1000482000,
+ LocalizationMapIncompatibleML = -1000139000,
+ LocalizationMapUnavailableML = -1000139001,
+ LocalizationMapFailML = -1000139002,
+ LocalizationMapImportExportPermissionDeniedML = -1000139003,
+ LocalizationMapPermissionDeniedML = -1000139004,
+ LocalizationMapAlreadyExistsML = -1000139005,
+ LocalizationMapCannotExportCloudMapML = -1000139006,
+ MarkerDetectorPermissionDeniedML = -1000138000,
+ MarkerDetectorLocateFailedML = -1000138001,
+ MarkerDetectorInvalidDataQueryML = -1000138002,
+ MarkerDetectorInvalidCreateInfoML = -1000138003,
+ MarkerInvalidML = -1000138004,
+ SpatialAnchorsPermissionDeniedML = -1000140000,
+ SpatialAnchorsNotLocalizedML = -1000140001,
+ SpatialAnchorsOutOfMapBoundsML = -1000140002,
+ SpatialAnchorsAnchorNotFoundML = -1000141000,
+ SystemNotificationPermissionDeniedML = -1000473000,
+ SystemNotificationIncompatibleSKUML = -1000473001,
+ WorldMeshDetectorPermissionDeniedML = -1000474000,
+ WorldMeshDetectorSpaceNotLocatableML = -1000474001
+ }
///
/// Finds an appropriate Magic Leap Controller registered through OpenXR which is properly named and is valid.
///
@@ -89,7 +152,14 @@ internal static bool DidXrCallSucceed(XrResult result, string functionName = "A
var success = successCase?.Invoke(result) ?? result == XrResult.Success;
if (!success && showError)
- MLPluginLog.ErrorFormat($"{functionName} in the Magic Leap API failed. Reason: {result} ");
+ {
+ string logResult = result.ToString();
+ if (!Enum.IsDefined(typeof(XrResult), result))
+ {
+ logResult = ((MLXrResult)result).ToString();
+ }
+ MLPluginLog.ErrorFormat($"{functionName} in the Magic Leap API failed. Reason: {logResult} ");
+ }
return success;
}
@@ -145,38 +215,5 @@ internal static bool TryEnableFeature(BuildTargetGroup group) where TF
}
#endif // UNITY_EDITOR
#endif // UNITY_OPENXR_1_9_0_OR_NEWER
-
- internal static Type[] InstallPath = {
- typeof(Initialization),
- typeof(Initialization.XREarlyUpdate)
- };
-
- internal static bool InstallIntoPlayerLoop(ref PlayerLoopSystem topLevelPlayerLoop, PlayerLoopSystem systemToInstall, params Type[] installPath)
- {
- installPath ??= Array.Empty();
-
- ref var current = ref topLevelPlayerLoop;
- foreach (var path in installPath)
- {
- var idx = Array.FindIndex(current.subSystemList, s => s.type == path);
- if (idx == -1)
- return false;
- current = ref current.subSystemList[idx];
- }
-
- InstallSystem(ref current, systemToInstall);
- return true;
- }
-
- private static void InstallSystem(ref PlayerLoopSystem parentSystem, PlayerLoopSystem targetSystem)
- {
- var subsystems = parentSystem.subSystemList ?? Array.Empty();
- var length = subsystems.Length;
- Array.Resize(ref subsystems, length + 1);
- subsystems[length] = targetSystem;
- parentSystem.subSystemList = subsystems;
- }
}
-
-
}
diff --git a/Runtime/OpenXR/Planes/MagicLeapPlanesFeature.cs b/Runtime/OpenXR/Planes/MagicLeapPlanesFeature.cs
index 3fd602e..549a0cf 100644
--- a/Runtime/OpenXR/Planes/MagicLeapPlanesFeature.cs
+++ b/Runtime/OpenXR/Planes/MagicLeapPlanesFeature.cs
@@ -15,7 +15,7 @@ namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
/// Enables the Magic Leap OpenXR Loader for Android, and modifies the AndroidManifest to be compatible with ML2.
///
#if UNITY_EDITOR
- [OpenXRFeature(UiName = "Magic Leap 2 Plane Detection",
+ [OpenXRFeature(UiName = "Magic Leap 2 Planes Subsystem",
Desc="Necessary to deploy a Magic Leap 2 compatible application with Planes detection",
Company = "Magic Leap",
Version = "1.0.0",
@@ -32,6 +32,8 @@ public class MagicLeapPlanesFeature : MagicLeapOpenXRFeatureBase
private readonly List planeSubsystemDescriptors = new();
+ protected override string GetFeatureId() => FeatureId;
+
protected override bool OnInstanceCreate(ulong xrInstance)
{
if (OpenXRRuntime.IsExtensionEnabled(PlaneExtensionName))
diff --git a/Runtime/OpenXR/Planes/Subsystem.meta b/Runtime/OpenXR/Planes/Subsystem.meta
new file mode 100644
index 0000000..abd6387
--- /dev/null
+++ b/Runtime/OpenXR/Planes/Subsystem.meta
@@ -0,0 +1,8 @@
+fileFormatVersion: 2
+guid: 5a50b2227985d5549a40fb5477236624
+folderAsset: yes
+DefaultImporter:
+ externalObjects: {}
+ userData:
+ assetBundleName:
+ assetBundleVariant:
diff --git a/Runtime/OpenXR/Planes/Subsystem/ConvexHullGenerator.cs b/Runtime/OpenXR/Planes/Subsystem/ConvexHullGenerator.cs
new file mode 100644
index 0000000..a9e938a
--- /dev/null
+++ b/Runtime/OpenXR/Planes/Subsystem/ConvexHullGenerator.cs
@@ -0,0 +1,190 @@
+// %BANNER_BEGIN%
+// ---------------------------------------------------------------------
+// %COPYRIGHT_BEGIN%
+// Copyright (c) (2021-2022) Magic Leap, Inc. All Rights Reserved.
+// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
+// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
+// %COPYRIGHT_END%
+// ---------------------------------------------------------------------
+// %BANNER_END%
+#if UNITY_OPENXR_1_9_0_OR_NEWER
+
+using System;
+using System.Collections.Generic;
+using Unity.Collections;
+using UnityEngine.XR.MagicLeap;
+
+namespace UnityEngine.XR.OpenXR.Features.MagicLeapSupport
+{
+ public partial class MLXrPlaneSubsystem
+ {
+ internal static class ConvexHullGenerator
+ {
+ // Get a single static reference to AngleComparer to avoid additional GC allocs
+ private static readonly Comparison polarAngleComparer = AngleComparer;
+
+ // Used by AngleComparer
+ private static Vector2 pivot;
+
+ // Reusable List to avoid additional GC alloc
+ private static readonly List points = new();
+
+ ///
+ /// Used to sort a collection of points by the polar angle
+ /// made with against the +x axis.
+ ///
+ /// The first point to compare.
+ /// The second point to compare.
+ ///
+ /// -1 if the vector from
+ /// to makes a larger
+ /// angle against the +x axis than to ,
+ /// +1 if the angle is smaller, and 0 if they are equal.
+ ///
+ private static int AngleComparer(Vector2 lhs, Vector2 rhs)
+ {
+ // Compute the angle against the pivot
+ var u = lhs - pivot;
+ var v = rhs - pivot;
+ var cross = u.x * v.y - u.y * v.x;
+
+ // cross > 0 => lhs is more to the right than rhs
+ return Math.Sign(cross);
+ }
+
+ ///
+ /// returns true if a, b, c form a clockwise turn
+ ///
+ private static bool ClockwiseTurn(Vector2 a, Vector2 b, Vector2 c)
+ {
+ var u = a - b;
+ var v = c - b;
+ return u.x * v.y - u.y * v.x > 0f;
+ }
+
+ ///
+ /// Computes convex hull using the Graham Scan method.
+ ///
+ /// An arbitrary collection of 2D points.
+ /// The allocator to use for the returned array.
+ ///
+ /// A new NativeArray containing the convex hull. The allocated Length of the array will always
+ /// be the same as . contains the true number of
+ /// points in the hull, which will always be less than .Length.
+ ///
+ private static NativeFixedList GrahamScan(NativeArray newPoints, Allocator allocator)
+ {
+ // Step 1: Find the lowest y-coordinate and leftmost point,
+ // called the pivot
+ var pivotIndex = 0;
+ for (var i = 1; i < newPoints.Length; ++i)
+ {
+ var point = newPoints[i];
+ var pointPivot = newPoints[pivotIndex];
+ if (point.y < pointPivot.y)
+ {
+ pivotIndex = i;
+ }
+ else if (Mathf.Approximately(point.y, pointPivot.y) && point.x < pointPivot.x)
+ {
+ pivotIndex = i;
+ }
+ }
+
+ pivot = newPoints[pivotIndex];
+
+ // Step 2: Copy all points except the pivot into a List
+ points.Clear();
+ for (var i = 0; i < pivotIndex; ++i)
+ points.Add(newPoints[i]);
+ for (var i = pivotIndex + 1; i < newPoints.Length; ++i)
+ points.Add(newPoints[i]);
+
+ // Step 3: Sort points by polar angle with the pivot
+ points.Sort(polarAngleComparer);
+
+ // Step 4: Compute the hull
+ var length = 0;
+ var hull = new NativeArray(newPoints.Length, allocator);
+ hull[length++] = pivot;
+ foreach (var point in points)
+ {
+ while (length > 1 && !ClockwiseTurn(hull[length - 2], hull[length - 1], point))
+ {
+ --length;
+ }
+
+ hull[length++] = point;
+ }
+
+ return new NativeFixedList