Skip to main content
Version: 21 Aug 2024

Depth Camera - Pixel Sensor API Example

This section includes examples on how to configure the Depth Camera Pixel Sensor and display it's data.

caution

This features requires the Depth Camera permission to be requested at runtime and enabled in your project's Manifest Settings (Edit > Project Settings > Magic Leap > Manifest Settings).

Example

This example obtains data from the pixel sensor and configures the streams to match the specified settings.

using System.Collections;
using System.Collections.Generic;
using System.Linq;
using MagicLeap.Android;
using Unity.Collections;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using UnityEngine.XR.OpenXR;
using MagicLeap.OpenXR.Features.PixelSensors;

public class DepthCameraExample : MonoBehaviour
{

[Header("General Configuration")]
//public DepthStreamVisualizer streamVisualizer;

[Tooltip("If Tue will return a raw depth image. If False will return depth32")]
public bool UseRawDepth;

[Range(0.2f, 5.00f)] public float DepthRange;

[Header("ShortRange =< 1m")] public ShortRangeUpdateRate SRUpdateRate;

[Header("LongRange > 1m")] public LongRangeUpdateRate LRUpdateRate;

public enum LongRangeUpdateRate
{
OneFps = 1, FiveFps = 5

}
public enum ShortRangeUpdateRate
{
FiveFps = 5, ThirtyFps = 30, SixtyFps = 60
}

private const string depthCameraSensorPath = "/pixelsensor/depth/center";

private MagicLeapPixelSensorFeature pixelSensorFeature;
private PixelSensorId? sensorId;
private List<uint> configuredStreams = new List<uint>();

public uint targetStream
{
get { return DepthRange > 1.0f ? (uint)0 : (uint)1; }
}

void Start()
{
pixelSensorFeature = OpenXRSettings.Instance.GetFeature<MagicLeapPixelSensorFeature>();
if (pixelSensorFeature == null || !pixelSensorFeature.enabled)
{
Debug.LogError("Pixel Sensor Feature not found or not enabled!");
enabled = false;
return;
}
Permissions.RequestPermission(MLPermission.DepthCamera, OnPermissionGranted, OnPermissionDenied,
OnPermissionDenied);
}

private void OnPermissionGranted(string permission)
{
if (permission.Contains(MLPermission.DepthCamera))
FindAndInitializeSensor();

}

private void OnPermissionDenied(string permission)
{
Debug.LogError($"Permission { permission} not granted. Example script will not work.");
enabled = false;
}

private void FindAndInitializeSensor()
{
var sensors = pixelSensorFeature.GetSupportedSensors();

foreach (var sensor in sensors)
{
Debug.Log("Sensor Name Found: " + sensor.XrPathString);
if (sensor.XrPathString.Contains(depthCameraSensorPath))
{
sensorId = sensor;
break;
}
}

if (!sensorId.HasValue)
{
Debug.LogError($"`{depthCameraSensorPath}` sensor not found.");
return;
}

// Subscribe to the Availability changed callback if the sensor becomes available.
pixelSensorFeature.OnSensorAvailabilityChanged += OnSensorAvailabilityChanged;
TryInitializeSensor();
}

private void OnSensorAvailabilityChanged(PixelSensorId id, bool available)
{
if (sensorId.HasValue && id == sensorId && available)
{
Debug.Log("Sensor became available.");
TryInitializeSensor();
}
}

private void TryInitializeSensor()
{
if (sensorId.HasValue && pixelSensorFeature.GetSensorStatus(sensorId.Value) ==
PixelSensorStatus.Undefined && pixelSensorFeature.CreatePixelSensor(sensorId.Value))
{
Debug.Log("Sensor created successfully.");
ConfigureSensorStreams();
}
else
{
Debug.LogWarning("Failed to create sensor. Will retry when it becomes available.");
}
}

// The capabilities that the script will edit
private PixelSensorCapabilityType[] targetCapabilityTypes = new[]
{
PixelSensorCapabilityType.UpdateRate,
PixelSensorCapabilityType.Format,
PixelSensorCapabilityType.Resolution,
PixelSensorCapabilityType.Depth,
};


private void ConfigureSensorStreams()
{
if (!sensorId.HasValue)
{
Debug.LogError("Sensor ID not set.");
return;
}

uint streamCount = pixelSensorFeature.GetStreamCount(sensorId.Value);
if (streamCount < 1)
{
Debug.LogError("Expected at least one stream from the sensor.");
return;
}

// Only add the target
configuredStreams.Add(targetStream);


pixelSensorFeature.GetPixelSensorCapabilities(sensorId.Value, targetStream, out var capabilities);
foreach (var pixelSensorCapability in capabilities)
{
if (!targetCapabilityTypes.Contains(pixelSensorCapability.CapabilityType))
{
continue;
}

// More details about the capability
if (pixelSensorFeature.QueryPixelSensorCapability(sensorId.Value, pixelSensorCapability.CapabilityType, targetStream, out PixelSensorCapabilityRange range) && range.IsValid)
{
if (range.CapabilityType == PixelSensorCapabilityType.UpdateRate)
{
var configData = new PixelSensorConfigData(range.CapabilityType, targetStream);
configData.IntValue = DepthRange > 1 ? (uint)LRUpdateRate : (uint)SRUpdateRate;
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
}
else if (range.CapabilityType == PixelSensorCapabilityType.Format)
{
var configData = new PixelSensorConfigData(range.CapabilityType, targetStream);
configData.IntValue = (uint)range.FrameFormats[UseRawDepth ? 1 : 0];
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
}
else if (range.CapabilityType == PixelSensorCapabilityType.Resolution)
{
var configData = new PixelSensorConfigData(range.CapabilityType, targetStream);
configData.VectorValue = range.ExtentValues[0];
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
}
else if (range.CapabilityType == PixelSensorCapabilityType.Depth)
{
var configData = new PixelSensorConfigData(range.CapabilityType, targetStream);
configData.FloatValue = DepthRange;
pixelSensorFeature.ApplySensorConfig(sensorId.Value, configData);
}
}
}

StartCoroutine(ConfigureStreamsAndStartSensor());
}

private IEnumerator ConfigureStreamsAndStartSensor()
{

var configureOperation = pixelSensorFeature.ConfigureSensor(sensorId.Value, configuredStreams.ToArray());

yield return configureOperation;

if (configureOperation.DidOperationSucceed)
{
Debug.Log("Sensor configured with defaults successfully.");
}
else
{
Debug.LogError("Failed to configure sensor.");
yield break;
}


Dictionary<uint, PixelSensorMetaDataType[]> supportedMetadataTypes =
new Dictionary<uint, PixelSensorMetaDataType[]>();

foreach (uint stream in configuredStreams)
{
if (pixelSensorFeature.EnumeratePixelSensorMetaDataTypes(sensorId.Value, stream, out var metaDataTypes))
{
supportedMetadataTypes[stream] = metaDataTypes;
}
}

// Assuming that `configuredStreams` is correctly populated with the intended stream indices
PixelSensorAsyncOperationResult startOperation = pixelSensorFeature.StartSensor(sensorId.Value, configuredStreams, supportedMetadataTypes);

yield return startOperation;

if (startOperation.DidOperationSucceed)
{
Debug.Log("Sensor started successfully. Monitoring data...");
StartCoroutine(MonitorSensorData());
}
else
{
Debug.LogError("Failed to start sensor.");
}
}

private IEnumerator MonitorSensorData()
{
Quaternion frameRotation = pixelSensorFeature.GetSensorFrameRotation(sensorId.Value);

// Initialize Stream ...
// streamVisualizer.Initialize(stream,frameRotation, pixelSensorFeature, sensorId.Value);

while (pixelSensorFeature.GetSensorStatus(sensorId.Value) ==
PixelSensorStatus.Started)
{
foreach (uint stream in configuredStreams)
{
if (pixelSensorFeature.GetSensorData(sensorId.Value, stream, out var frame, out var metaData,
Allocator.Temp, shouldFlipTexture: true))
{
// Process Frames ...
// streamVisualizer.ProcessFrame(frame);

var confidenceMetadata = metaData
.OfType<PixelSensorDepthConfidenceBuffer>().FirstOrDefault();
if (confidenceMetadata != null)
{
// streamVisualizer.ProcessDepthConfidenceData(in confidenceMetadata);


var flagMetadata = metaData.OfType<PixelSensorDepthFlagBuffer>()
.FirstOrDefault();
if (flagMetadata != null)
{
// streamVisualizer.ProcessDepthFlagData(in flagMetadata);
}
}
}

yield return null;
}
}
}

public void OnDisable()
{
//We start the Coroutine on another MonoBehaviour since it can only run while the object is enabled.
MonoBehaviour camMono = Camera.main.GetComponent<MonoBehaviour>();
camMono.StartCoroutine(StopSensorCoroutine());
}

private IEnumerator StopSensorCoroutine()
{
if (sensorId.HasValue)
{
PixelSensorAsyncOperationResult stopSensorAsyncResult =
pixelSensorFeature.StopSensor(sensorId.Value, configuredStreams);

yield return stopSensorAsyncResult;

if (stopSensorAsyncResult.DidOperationSucceed)
{
Debug.Log("Sensor stopped successfully.");
pixelSensorFeature.ClearAllAppliedConfigs(sensorId.Value);
// Free the sensor so it can be marked available and used in other scripts.
pixelSensorFeature.DestroyPixelSensor(sensorId.Value);
}
else
{
Debug.LogError("Failed to stop the sensor.");
}
}
}
}

Depth Visualizer

The following section provides an example of how to visualize the depth sensor's data. This example uses a custom shader and C# script.

How to use:

  1. Create a new shader and apply the shader code below
  2. Create a new material that uses the depth shader
  3. Create a c# script using the example code below
  4. Apply the script to a transform, then assign a renderer target and the custom material in the inspector
  5. Use the script from the previous example and uncomment the sections that reference the streamVisualizer

Depth Sensor Shader Example

Shader "Unlit/DepthSensorShader"
{
Properties
{
[KeywordEnum(Depth, Confidence, Flags)] _Buffer("Buffer", Integer) = 0
_MinDepth("Min Depth", Float) = 0
_MaxDepth("Max Depth", Float) = 5
[HideInInspector] _MainTex("Texture", 2D) = "white" {}
_MapTex("Frame Data Map", 2D) = "white" {}
[HideInInspector] _FlagTex("FlagColorTexture", 2D) = "white"
[HideInInspector] _MetadataTex("Metadata Texture", 2D) = "white"
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100

Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_fog

#include "UnityCG.cginc"

struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};

struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
};

sampler2D _MainTex;
sampler2D _MetadataTex;
sampler2D _MapTex;
sampler2D _FlagTex;

float4 _MainTex_ST;
float _MinDepth;
float _MaxDepth;
int _Buffer;


float InverseLerp(float v, float min, float max)
{
return clamp((v - min) / (max - min), 0.0, 1.0);
}

float Normalize(float v, float min, float end)
{
return InverseLerp(v, min, end);
}

float NormalizeDepth(float depth_meters)
{
return InverseLerp(depth_meters, _MinDepth, _MaxDepth);
}

float NormalizeConfidence(float confidence)
{
float conf = clamp(abs(confidence), 0.0, 0.5);
return Normalize(conf, 0.0, 0.5);
}

fixed3 GetColorVisualization(float x) {
return tex2D(_MapTex, fixed2(x, 0.5)).rgb;
}

fixed3 GetConfidenceVisualization(float x)
{
if(x <= 0.5f)
{
return fixed3(1,1,1);
}
return fixed3(0,0,0);
}

float3 GetFlagColor(int conf)
{
//if flag is valid, then return white
if(conf & 1)
{
return float3(1,1,1);
}

const int valid_bits = (conf & (~3));
fixed3 color = fixed3(0,0,0);
[unroll]
for(int j = 0; j < 8; j++)
{
int i = 4 << j;
if(valid_bits & i)
{
int x = log2(valid_bits);
color += tex2D(_FlagTex, fixed2(x,0)).rgb;
}
}
return color;
}

float3 GetConfidenceColor(float conf)
{
return float3(conf, 1.0 - conf, 0.0);
}

v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}

fixed4 frag (v2f i) : SV_Target
{
float depth = tex2D(_MainTex, i.uv).r;
float normalized_depth = NormalizeDepth(depth);
if(_Buffer == 0)
{
fixed4 depth_color = fixed4(GetColorVisualization(normalized_depth), 1.0);
// Values outside of range mapped to black.
if (depth < _MinDepth || depth > _MaxDepth)
{
depth_color.rgb *= 0.0;
}

return depth_color;
}

if(_Buffer == 1)
{
//Confidence texture is applied
float confidence = tex2D(_MetadataTex, i.uv).r;
float normalized_confidence = NormalizeConfidence(confidence);
fixed4 depth_color = fixed4(GetConfidenceColor(normalized_confidence), 1.0);
depth_color *= (1.0 - depth / 2.0);
return depth_color;
}

if(_Buffer == 2)
{
int flag = tex2D(_MetadataTex, i.uv).r;
fixed4 depth_color = fixed4(GetFlagColor(flag), 1.0);
depth_color *= (1.0 - depth / 2.0);
return depth_color;
}

return fixed4(1, 1,1,1);
}
ENDCG
}
}
}

Depth Stream Visualizer Example

using System.Collections.Generic;
using UnityEngine;
using MagicLeap.OpenXR.Features.PixelSensors;

public class DepthStreamVisualizer : MonoBehaviour
{
public Renderer TargetRenderer;

public Material DepthMaterial;

private Texture2D depthConfidenceTexture;
private Texture2D depthFlagColorKeyTexture;
private Texture2D depthFlagTexture;

private Texture2D targetTexture;

private int metadataTextureKey;
private int depthFlagTextureKey;
private int depthBufferKey;
private int maxDepthKey;
private int minDepthKey;

private float minDepth;
private float maxDepth = 5;

public enum DepthMode
{
Depth,
DepthWithConfidence,
DepthWithFlags,
}

public DepthMode currentDepthMode = DepthMode.Depth;

// Start is called before the first frame update
void Start()
{
metadataTextureKey = Shader.PropertyToID("_MetadataTex");
maxDepthKey = Shader.PropertyToID("_MaxDepth");
minDepthKey = Shader.PropertyToID("_MinDepth");
depthBufferKey = Shader.PropertyToID("_Buffer");
depthFlagTextureKey = Shader.PropertyToID("_FlagTex");

InitializeDepthFlagColorKey();
}


private void OnDestroy()
{
Destroy(depthFlagColorKeyTexture);
Destroy(targetTexture);
Destroy(depthConfidenceTexture);
Destroy(depthFlagTexture);
}

public void Initialize(uint streamId, Quaternion frameRotation, MagicLeapPixelSensorFeature pixelSensorFeature, PixelSensorId sensorType)
{
TargetRenderer.gameObject.transform.rotation *= frameRotation;

if (pixelSensorFeature.QueryPixelSensorCapability(sensorType, PixelSensorCapabilityType.Depth, streamId, out var range))
{
if (range.IntRange.HasValue)
{
minDepth = range.IntRange.Value.Min;
maxDepth = range.IntRange.Value.Max;
}

if (range.FloatRange.HasValue)
{
minDepth = range.FloatRange.Value.Min;
maxDepth = range.FloatRange.Value.Max;
}
}
}

public void ProcessFrame(in PixelSensorFrame frame)
{
if (!frame.IsValid || TargetRenderer == null || frame.Planes.Length == 0)
{
return;
}

// You can obtain the capture time as well. Note it is returned as a long and needs to be converted.
// ie : DateTimeOffset.FromUnixTimeMilliseconds(frame.CaptureTime / 1000);

if (targetTexture == null)
{
var frameType = frame.FrameType;
ref var plane = ref frame.Planes[0];
targetTexture = new Texture2D((int)plane.Width, (int)plane.Height, TextureFormat.RFloat, false);
var materialToUse = DepthMaterial;
TargetRenderer.material = materialToUse;
TargetRenderer.material.mainTexture = targetTexture;
UpdateMaterialParameters();
}

targetTexture.LoadRawTextureData(frame.Planes[0].ByteData);
targetTexture.Apply();
}

private void UpdateMaterialParameters()
{
TargetRenderer.material.SetFloat(maxDepthKey, maxDepth);
TargetRenderer.material.SetFloat(minDepthKey, minDepth);
TargetRenderer.material.SetInt(depthBufferKey, (int)currentDepthMode);
TargetRenderer.material.SetTexture(depthFlagTextureKey, depthFlagColorKeyTexture);
TargetRenderer.material.SetTexture(metadataTextureKey, Texture2D.whiteTexture);
}

public void ProcessDepthConfidenceData(in PixelSensorDepthConfidenceBuffer confidenceBuffer)
{
var frame = confidenceBuffer.Frame;
if (!frame.IsValid || TargetRenderer == null || frame.Planes.Length == 0)
{
return;
}

if (depthConfidenceTexture == null)
{
ref var plane = ref frame.Planes[0];
depthConfidenceTexture = new Texture2D((int)plane.Width, (int)plane.Height, TextureFormat.RFloat, false);
TargetRenderer.material.SetTexture(metadataTextureKey, depthConfidenceTexture);
}

depthConfidenceTexture.LoadRawTextureData(frame.Planes[0].ByteData);
depthConfidenceTexture.Apply();
}

public void ProcessDepthFlagData(in PixelSensorDepthFlagBuffer flagBuffer)
{
var frame = flagBuffer.Frame;
if (!frame.IsValid || TargetRenderer == null || frame.Planes.Length == 0)
{
return;
}

if (depthFlagTexture == null)
{
ref var plane = ref frame.Planes[0];
depthFlagTexture = new Texture2D((int)plane.Width, (int)plane.Height, TextureFormat.RFloat, false);
TargetRenderer.material.SetTexture(metadataTextureKey, depthFlagTexture);
}

depthFlagTexture.LoadRawTextureData(frame.Planes[0].ByteData);
depthFlagTexture.Apply();
}

// Create a texture that has the keys for each of the depth flags. This will be applied in the material
private void InitializeDepthFlagColorKey()
{
depthFlagColorKeyTexture = new Texture2D(16, 1, TextureFormat.RGBA32, false);

Dictionary<int, Color> depthValueTable = new Dictionary<int, Color>()
{
{(int)PixelSensorDepthFlags.Valid, Color.white},
{(int)PixelSensorDepthFlags.Invalid, Color.black},
{(int)PixelSensorDepthFlags.Saturated, Color.yellow},
{(int)PixelSensorDepthFlags.Inconsistent, Color.red},
{(int)PixelSensorDepthFlags.LowSignal, Color.magenta},
{(int)PixelSensorDepthFlags.FlyingPixel, Color.cyan},
{(int)PixelSensorDepthFlags.MaskedBit, Color.black},
{(int)PixelSensorDepthFlags.Sbi, Color.gray},
{(int)PixelSensorDepthFlags.StrayLight, Color.blue},
{(int)PixelSensorDepthFlags.ConnectedComponents, Color.green},
};

var appliedColors = new Color[16];
for (var i = 0; i < 16; i++)
{
var enumValue = (int)Mathf.Pow(2, i);
if (depthValueTable.TryGetValue(enumValue, out var color))
{
appliedColors[i] = color;
}
else
{
appliedColors[i] = Color.white;
}
}

depthFlagColorKeyTexture.SetPixels(appliedColors);
depthFlagColorKeyTexture.Apply();
}

}

Depth Flags

Developers can read Depth Flags if requested in the metadata when starting the Depth Sensor.

Depth Flags

The depth flags provided in the DepthFlag Buffer represent the state of a depth pixel.

  • PixelSensorDepthFlags.Valid — Indicates that there is no additional flag data for this pixel.
  • PixelSensorDepthFlags.Invalid — This bit is set to one to indicate that one or more flags from below have been set. Depending on the use case the application can correlate the flag data and corresponding pixel data to determine how to handle the pixel data.
  • PixelSensorDepthFlags.Saturated — The pixel intensity is either below the min or the max threshold value.
  • PixelSensorDepthFlags.Inconsistent — Inconsistent data received when capturing frames. This can happen due to fast motion.
  • PixelSensorDepthFlags.LowSignal — Pixel has very low signal to noise ratio. One example of when this can happen is for pixels in far end of the range.
  • PixelSensorDepthFlags.FlyingPixel — This typically happens when there is step jump in the distance of adjoining pixels in the scene. Example: When you open a door looking into the room the edges along the door’s edges can cause flying pixels.
  • PixelSensorDepthFlags.MaskedBit — If this bit is on it indicates that the corresponding pixel may not be within the illuminator’s illumination cone.
  • PixelSensorDepthFlags.Sbi — This bit will be set when there is high noise.
  • PixelSensorDepthFlags.StrayLight — This could happen when there is another light source apart from the depth camera illuminator.
  • PixelSensorDepthFlags.ConnectedComponents — If a small group of PixelSensorDepthFlags.Valid is surrounded by a set of PixelSensorDepthFlags.Invalid then this bit will be set to 1.

Simple Example

...
public void ProcessDepthFlagData(in PixelSensorDepthFlagBuffer flagBuffer)
{
var frame = flagBuffer.Frame;
if (!frame.IsValid || targetRenderer == null || frame.Planes.Length == 0)
{
return;
}

if (depthFlagTexture == null)
{
ref var plane = ref frame.Planes[0];
depthFlagTexture = new Texture2D((int)plane.Width, (int)plane.Height, GetTextureFormat(frame.FrameType), false);
targetRenderer.material.SetTexture(metadataTextureKey, depthFlagTexture);
}

depthFlagTexture.LoadRawTextureData(frame.Planes[0].ByteData);
depthFlagTexture.Apply();
}

private void ReadDepthFlag(){
var pixels = depthFlagTexture.GetPixels();

for (int i = 0; i < pixels.Length; i++)
{
// Extract the red channel as it holds the flag data
float flagValue = pixels[i].r;
int flags = Mathf.FloorToInt(flagValue * 255);

// Process the flag value to determine what flags are set
//ProcessFlag((PixelSensorDepthFlags)flags, i);
}
}
...