Skip to main content
Version: 21 Aug 2024

World Camera - Pixel Sensor API Example

This section includes examples on how to configure the World Camera Pixel Sensor and display it's data.

caution

This features requires the Camera permission to be requested at runtime and enabled in your project's Manifest Settings (Edit > Project Settings > Magic Leap > Manifest Settings).

Default Configuration Example

This example shows how to poll the World Camera Images using the default sensor configuration.

using System.Collections;
using System.Collections.Generic;
using MagicLeap.Android;
using Unity.Collections;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using UnityEngine.XR.OpenXR;
using MagicLeap.OpenXR.Features.PixelSensors;

public class WorldCameraPixelSensor : MonoBehaviour
{
[Header("Stream Configuration")]
public bool useStream0 = true;
public bool useStream1 = true;

[Header("Render Settings")]
[SerializeField] private Renderer[] streamRenderers = new Renderer[2];
[SerializeField]
[Tooltip("Set to either: World Center, World Left, World Right")]
private string pixelSensorName = "World Center";

private string requiredPermission = MLPermission.Camera;
// Array to hold textures for each stream
private Texture2D[] streamTextures = new Texture2D[2];
// Optional sensor ID, used to interact with the specific sensor
private PixelSensorId? sensorId;
// List to keep track of which streams have been configured
private readonly List<uint> configuredStreams = new List<uint>();
// Reference to the Magic Leap Pixel Sensor Feature
private MagicLeapPixelSensorFeature pixelSensorFeature;

private void Start()
{
InitializePixelSensorFeature();
}

private void InitializePixelSensorFeature()
{
// Get the Magic Leap Pixel Sensor Feature from the OpenXR settings
pixelSensorFeature = OpenXRSettings.Instance.GetFeature<MagicLeapPixelSensorFeature>();
if (pixelSensorFeature == null || !pixelSensorFeature.enabled)
{
Debug.LogError("Pixel Sensor Feature Not Found or Not Enabled!");
enabled = false;
return;
}
RequestPermission(MLPermission.Camera);
}

// Method to request a specific permission
private void RequestPermission(string permission)
{
Permissions.RequestPermission(permission, OnPermissionGranted, OnPermissionDenied);
}

// Callback for when permission is granted
private void OnPermissionGranted(string permission)
{
if (Permissions.CheckPermission(requiredPermission))
{
FindAndInitializeSensor();
}
}

// Callback for when permission is denied
private void OnPermissionDenied(string permission)
{
Debug.LogError($"Permission Denied: {permission}");
enabled = false;
}

// Find the sensor by name and try to initialize it
private void FindAndInitializeSensor()
{
List<PixelSensorId> sensors = pixelSensorFeature.GetSupportedSensors();
int index = sensors.FindIndex(x => x.SensorName.Contains(pixelSensorName));

if (index <= 0)
{
Debug.LogError($"{pixelSensorName} sensor not found.");
return;
}

sensorId = sensors[index];

// Subscribe to sensor availability changes
pixelSensorFeature.OnSensorAvailabilityChanged += OnSensorAvailabilityChanged;
TryInitializeSensor();
}

// Handle changes in sensor availability, tries to initialize the sensor if it becomes available
private void OnSensorAvailabilityChanged(PixelSensorId id, bool available)
{
if (id == sensorId && available)
{
Debug.Log($"Sensor became available: {id.SensorName}");
TryInitializeSensor();
}
}

// Try to create and initialize the sensor
private void TryInitializeSensor()
{
if (sensorId.HasValue && pixelSensorFeature.CreatePixelSensor(sensorId.Value))
{
Debug.Log("Sensor created successfully.");
ConfigureSensorStreams();
}
else
{
Debug.LogError("Failed to create sensor. Will retry when available.");
}
}

// Configure streams based on the sensor capabilities
private void ConfigureSensorStreams()
{
if (!sensorId.HasValue)
{
Debug.LogError("Sensor Id was not set.");
return;
}

uint streamCount = pixelSensorFeature.GetStreamCount(sensorId.Value);
if (useStream1 && streamCount < 2 || useStream0 && streamCount < 1)
{
Debug.LogError("target Streams are not available from the sensor.");
return;
}

for (uint i = 0; i < streamCount; i++)
{
if ((useStream0 && i == 0) || (useStream1 && i == 1))
{
configuredStreams.Add(i);
}
}

StartCoroutine(StartSensorStream());
}

// Coroutine to configure stream and start sensor streams
private IEnumerator StartSensorStream()
{
// Configure the sensor with default configuration
PixelSensorAsyncOperationResult configureOperation =
pixelSensorFeature.ConfigureSensorWithDefaultCapabilities(sensorId.Value, configuredStreams.ToArray());

yield return configureOperation;

if (!configureOperation.DidOperationSucceed)
{
Debug.LogError("Failed to configure sensor.");
yield break;
}

Debug.Log("Sensor configured with defaults successfully.");

// Start the sensor with the default configuration and specify that all of the meta data should be requested.
var sensorStartAsyncResult =
pixelSensorFeature.StartSensor(sensorId.Value, configuredStreams);

yield return sensorStartAsyncResult;

if (!sensorStartAsyncResult.DidOperationSucceed)
{
Debug.LogError("Stream could not be started.");
yield break;
}

Debug.Log("Stream started successfully.");
yield return ProcessSensorData();
}

private IEnumerator ProcessSensorData()
{
while (sensorId.HasValue && pixelSensorFeature.GetSensorStatus(sensorId.Value) == PixelSensorStatus.Started)
{
foreach (var stream in configuredStreams)
{
// In this example, the meta data is not used.
if (pixelSensorFeature.GetSensorData(
sensorId.Value, stream,
out var frame,
out PixelSensorMetaData[] currentFrameMetaData,
Allocator.Temp,
shouldFlipTexture: true))
{
Pose sensorPose = pixelSensorFeature.GetSensorPose(sensorId.Value);
Debug.Log($"Pixel Sensor Pose: Position {sensorPose.position} Rotation: {sensorPose.rotation}");
ProcessFrame(frame, streamRenderers[stream], streamTextures[stream]);
}
}
yield return null;
}
}

public void ProcessFrame(in PixelSensorFrame frame, Renderer targetRenderer, Texture2D targetTexture)
{
if (!frame.IsValid || targetRenderer == null || frame.Planes.Length == 0)
{
return;
}

if (targetTexture == null)
{
var plane = frame.Planes[0];
targetTexture = new Texture2D((int)plane.Width, (int)plane.Height, TextureFormat.R8, false);
targetRenderer.material.mainTexture = targetTexture;
}

targetTexture.LoadRawTextureData(frame.Planes[0].ByteData);
targetTexture.Apply();
}

private void OnDisable()
{
var camMono = Camera.main.GetComponent<MonoBehaviour>();
camMono.StartCoroutine(StopSensor());
}

private IEnumerator StopSensor()
{
if (sensorId.HasValue)
{
var stopSensorAsyncResult = pixelSensorFeature.StopSensor(sensorId.Value, configuredStreams);
yield return stopSensorAsyncResult;
if (stopSensorAsyncResult.DidOperationSucceed)
{
pixelSensorFeature.DestroyPixelSensor(sensorId.Value);
Debug.Log("Sensor stopped and destroyed successfully.");
}
else
{
Debug.LogError("Failed to stop the sensor.");
}
}
}
}

Obtain Metadata Example

This example snippet shows some of the meta data that can be obtained from the World Camera if requested when starting the sensor.

using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using MagicLeap.Android;
using Unity.Collections;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using UnityEngine.XR.OpenXR;
using MagicLeap.OpenXR.Features.PixelSensors;

public class WorldCameraPixelSensor : MonoBehaviour
{
// The name of the center world camera pixel sensor
private string pixelSensorName = "World Center";
// The world camera requires the Camera permission
private string requiredPermission = MLPermission.Camera;
// Optional sensor ID, used to interact with the specific sensor
private PixelSensorId? sensorId;
// Reference to the Magic Leap Pixel Sensor Feature
private MagicLeapPixelSensorFeature pixelSensorFeature;
// The target stream to read meta data from
private uint targetStream = 0;

private void Start()
{
InitializePixelSensorFeature();
}

private void InitializePixelSensorFeature()
{
// Get the Magic Leap Pixel Sensor Feature from the OpenXR settings
pixelSensorFeature = OpenXRSettings.Instance.GetFeature<MagicLeapPixelSensorFeature>();
if (pixelSensorFeature == null || !pixelSensorFeature.enabled)
{
Debug.LogError("Pixel Sensor Feature Not Found or Not Enabled!");
enabled = false;
return;
}
RequestPermission(MLPermission.Camera);
}

// Method to request a specific permission
private void RequestPermission(string permission)
{
Permissions.RequestPermission(permission, OnPermissionGranted, OnPermissionDenied);
}

// Callback for when permission is granted
private void OnPermissionGranted(string permission)
{
if (Permissions.CheckPermission(requiredPermission))
{
FindAndInitializeSensor();
}
}

// Callback for when permission is denied
private void OnPermissionDenied(string permission)
{
Debug.LogError($"Permission Denied: {permission}");
enabled = false;
}

// Find the sensor by name and try to initialize it
private void FindAndInitializeSensor()
{
List<PixelSensorId> sensors = pixelSensorFeature.GetSupportedSensors();
int index = sensors.FindIndex(x => x.SensorName.Contains(pixelSensorName));

if (index <= 0)
{
Debug.LogError($"{pixelSensorName} sensor not found.");
return;
}

sensorId = sensors[index];

// Subscribe to sensor availability changes
pixelSensorFeature.OnSensorAvailabilityChanged += OnSensorAvailabilityChanged;
TryInitializeSensor();
}

// Handle changes in sensor availability, tries to initialize the sensor if it becomes available
private void OnSensorAvailabilityChanged(PixelSensorId id, bool available)
{
if (id == sensorId && available)
{
Debug.Log("Sensor became available.");
TryInitializeSensor();
}
}

// Try to create and initialize the sensor
private void TryInitializeSensor()
{
if (pixelSensorFeature.CreatePixelSensor(sensorId.Value))
{
Debug.Log("Sensor created successfully.");
ConfigureSensorStreams();
}
else
{
Debug.LogError("Failed to create sensor. Will retry when available.");
}
}

// Configure streams based on the sensor capabilities
private void ConfigureSensorStreams()
{
if (!sensorId.HasValue)
{
Debug.LogError("Sensor Id was not set.");
return;
}

uint streamCount = pixelSensorFeature.GetStreamCount(sensorId.Value);
if (streamCount <= targetStream)
{
Debug.LogError($"Stream at index {targetStream}. Not found");
return;
}

StartCoroutine(StartSensorStream());
}

// Coroutine to configure stream and start sensor streams
private IEnumerator StartSensorStream()
{
// Configure the sensor with default configuration
PixelSensorAsyncOperationResult configureOperation =
pixelSensorFeature.ConfigureSensorWithDefaultCapabilities(sensorId.Value, targetStream);

yield return configureOperation;

if (!configureOperation.DidOperationSucceed)
{
Debug.LogError("Failed to configure sensor.");
}

Debug.Log("Sensor configured with defaults successfully.");

// Obtain the supported metadata types from the sensor
Dictionary<uint, PixelSensorMetaDataType[]> supportedMetadataTypes = new();

if (pixelSensorFeature.EnumeratePixelSensorMetaDataTypes(sensorId.Value, targetStream, out var metaDataTypes))
{
supportedMetadataTypes.Add(targetStream, metaDataTypes);
}

// Start the sensor with the default configuration and specify that all of the meta data should be requested.
var sensorStartAsyncResult =
pixelSensorFeature.StartSensor(sensorId.Value, new[] { targetStream }, supportedMetadataTypes);

yield return sensorStartAsyncResult;

if (!sensorStartAsyncResult.DidOperationSucceed)
{
Debug.LogError("Stream could not be started.");
yield break;
}

Debug.Log("Stream started successfully.");
yield return ProcessSensorData();
}

private IEnumerator ProcessSensorData()
{
while (sensorId != null && pixelSensorFeature.GetSensorStatus(sensorId.Value) ==
PixelSensorStatus.Started)
{
if (pixelSensorFeature.GetSensorData(sensorId.Value, targetStream, out var frame,
out PixelSensorMetaData[] currentFrameMetaData, Allocator.Temp,
shouldFlipTexture: true))
{
for (int i = 0; i < currentFrameMetaData.Length; i++)
{
Debug.Log(GetStringFromMetaData(currentFrameMetaData[i]));
}
}
yield return null;
}
}

private string GetStringFromMetaData(PixelSensorMetaData metaData)
{
var builder = new StringBuilder();
switch (metaData)
{
case PixelSensorAnalogGain analogGain:
builder.AppendLine($"{analogGain.AnalogGain}");
break;
case PixelSensorDigitalGain digitalGain:
builder.AppendLine($"{digitalGain.DigitalGain}");
break;
case PixelSensorExposureTime exposureTime:
builder.AppendLine($"{exposureTime.ExposureTime:F1}");
break;
case PixelSensorFisheyeIntrinsics fisheyeIntrinsics:
{
builder.AppendLine($"FOV: {fisheyeIntrinsics.FOV}");
builder.AppendLine($"Focal Length: {fisheyeIntrinsics.FocalLength}");
builder.AppendLine($"Principal Point: {fisheyeIntrinsics.PrincipalPoint}");
builder.AppendLine(
$"Radial Distortion: [{string.Join(',', fisheyeIntrinsics.RadialDistortion.Select(val => val.ToString("F1")))}]");
builder.AppendLine(
$"Tangential Distortion: [{string.Join(',', fisheyeIntrinsics.TangentialDistortion.Select(val => val.ToString("F1")))}]");
break;
}
}

return builder.ToString();
}

private void OnDisable()
{
var camMono = Camera.main.GetComponent<MonoBehaviour>();
camMono.StartCoroutine(StopSensor());
}

private IEnumerator StopSensor()
{
if (sensorId.HasValue)
{
var stopSensorAsyncResult = pixelSensorFeature.StopSensor(sensorId.Value, new[] { targetStream });
yield return stopSensorAsyncResult;
if (stopSensorAsyncResult.DidOperationSucceed)
{
pixelSensorFeature.DestroyPixelSensor(sensorId.Value);
Debug.Log("Sensor stopped and destroyed successfully.");
}
else
{
Debug.LogError("Failed to stop the sensor.");
}
}
}
}

Custom Configuration Example

This example shows how to configure the pixel sensor with a custom configuration and then displays it on a Renderer.

using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using MagicLeap.Android;
using Unity.Collections;
using UnityEngine;
using UnityEngine.XR.MagicLeap;
using UnityEngine.XR.OpenXR;
using MagicLeap.OpenXR.Features.PixelSensors;

public class WorldCameraPixelSensor : MonoBehaviour
{

[Header("Sensor Configuration")]
[Tooltip("Set to either: World Center, World Left, World Right")]
[SerializeField] private string pixelSensorName = "World Center";
[SerializeField] private bool useStream0 = true;
[SerializeField] private bool useStream1 = true;


[Header("Render Settings")]
[SerializeField] private Renderer[] streamRenderers = new Renderer[2];

[Header("Stream Configuration")]
public bool UseCustomProperties = false;
public bool UseManualExposureSettings;


[Header("Auto Exposure Settings")]
[Tooltip("Exposure Mode Enviornment (0) or Near IR (1)")]
[Range(0, 1)]
public PixelSensorAutoExposureMode autoExposureMode = 0;

[Range(-5.00f, 5.00f)]
public float AutoExposureTargetBrightness;

[Header("Manual Exposure Settings")]

[Range(7, 32999f)]
public int ManualExposureTime = 50;

[Range(100, 12799)]
public int AnalogGain = 100;

private string requiredPermission = MLPermission.Camera;

private Texture2D[] streamTextures = new Texture2D[2];

private PixelSensorId? cachedSensorId;
private List<uint> configuredStreams = new List<uint>();


private MagicLeapPixelSensorFeature pixelSensorFeature;


// Sensors require that the following capabilities are specified.
private readonly PixelSensorCapabilityType[] requireCapabilities = new[]
{
PixelSensorCapabilityType.UpdateRate,
PixelSensorCapabilityType.Format,
PixelSensorCapabilityType.Resolution
};

// Capabilities on the world camera that can be changed when using manual exposure
private readonly PixelSensorCapabilityType[] manualExposureCapabilities =
new PixelSensorCapabilityType[]
{
PixelSensorCapabilityType.ManualExposureTime,
PixelSensorCapabilityType.AnalogGain,
};

// Capabilities on the world camera that can be changed when using automatic exposure
private readonly PixelSensorCapabilityType[] automaticExposureCapabilities =
new PixelSensorCapabilityType[]
{
PixelSensorCapabilityType.AutoExposureMode,
PixelSensorCapabilityType.AutoExposureTargetBrightness,
};

private void Start()
{
RequestPermission(MLPermission.Camera);
}

private void InitializePixelSensorFeature()
{
pixelSensorFeature = OpenXRSettings.Instance.GetFeature<MagicLeapPixelSensorFeature>();
if (pixelSensorFeature == null || !pixelSensorFeature.enabled)
{
enabled = false;
Debug.LogError("Pixel Sensor Feature Not Found!");
return;
}

//Find pixel with the specified name.
cachedSensorId = FindSensor(pixelSensorName);

// Subscribe to sensor availability changes
pixelSensorFeature.OnSensorAvailabilityChanged += OnSensorAvailabilityChanged;
}

private void RequestPermission(string permission)
{
Permissions.RequestPermission(permission, OnPermissionGranted, RequestPermission,
OnPermissionDenied);
}

private void OnPermissionGranted(string permission)
{
if (Permissions.CheckPermission(requiredPermission))
{
InitializePixelSensorFeature();
}
}

private void OnPermissionDenied(string permission)
{
Debug.Log("Permission Denied");
enabled = false;
}

private PixelSensorId? FindSensor(string sensorName)
{
List<PixelSensorId> sensors = pixelSensorFeature.GetSupportedSensors();
int index = sensors.FindIndex(x => x.SensorName.Contains(sensorName));

if (index <= 0)
{
Debug.LogError($"{pixelSensorName} sensor not found.");
return null;
}
return sensors[index];
}

private void OnSensorAvailabilityChanged(PixelSensorId id, bool available)
{
if (cachedSensorId.HasValue && id == cachedSensorId && available)
{
Debug.Log("Sensor became available.");
TryInitializeSensor();
}
}

private void TryInitializeSensor()
{
if (cachedSensorId.HasValue && pixelSensorFeature.CreatePixelSensor(cachedSensorId.Value))
{
Debug.Log("Sensor created successfully.");
StartCoroutine(ConfigureSensorStreams());
}
else
{
Debug.LogError("Failed to create sensor. Will retry when it becomes available.");
}
}

private List<PixelSensorCapabilityType> GetTargetCapabilities()
{
List<PixelSensorCapabilityType> targetCapabilityTypes = new List<PixelSensorCapabilityType>(requireCapabilities);
if (UseCustomProperties)
{
targetCapabilityTypes.AddRange(UseManualExposureSettings
? manualExposureCapabilities
: automaticExposureCapabilities);
}

return targetCapabilityTypes;
}

private List<uint> GetTargetStreams()
{
List<uint> streams = new List<uint>();

if (cachedSensorId.HasValue)
{
uint streamCount = pixelSensorFeature.GetStreamCount(cachedSensorId.Value);

// Track which streams we will try to configure
for (uint i = 0; i < streamCount; i++)
{
// create a list that includes the index for each targeted stream : index 0, index 1
if (useStream0 && i == 0 || useStream1 && i == 1)
{
streams.Add(i);
}
}
}
return streams;
}


private IEnumerator ConfigureSensorStreams()
{
if (cachedSensorId.HasValue == false)
{
Debug.LogError("Cannot configure sensor streams. Sensor Id was not set.");
yield break;
}

if (pixelSensorFeature.GetSensorStatus(cachedSensorId.Value) != PixelSensorStatus.Stopped
&& pixelSensorFeature.GetSensorStatus(cachedSensorId.Value) !=
PixelSensorStatus.NotConfigured)
{
Debug.LogError("Sensor must be stopped or not configured before a new configuration can be applied. Current Status is " + pixelSensorFeature.GetSensorStatus(cachedSensorId.Value));
yield break;
}


// Gets a list of the Required Capabilities and the custom capabilities based on the target settings (AutoExposure vs Manual Exposure)
List<PixelSensorCapabilityType> targetCapabilityTypes = GetTargetCapabilities();
List<uint> targetStreams = GetTargetStreams();

foreach (uint streamIndex in targetStreams)
{
// Iterate through each of the target capabilities and try to find it in the sensors availible capabilities, then set it's value.
// When configuring a sensor capabilities have to be configured iteratively since each applied configuration can impact other capabilities.
// Step 1 : Itereate through each of the target capabilities
for (var index = 0; index < targetCapabilityTypes.Count; index++)
{
PixelSensorCapabilityType pixelSensorCapability = targetCapabilityTypes[index];
// Get the sensors capabilities based on the previous applied settings.
pixelSensorFeature.GetPixelSensorCapabilities(cachedSensorId.Value, streamIndex, out PixelSensorCapability[] capabilities);
// Step2 Try to find a capability of the same type in the sensor
PixelSensorCapability targetAbility = capabilities.FirstOrDefault(x => x.CapabilityType == pixelSensorCapability);
// Verify that it was found - A null check would not work because it is a struct.
if (targetAbility.CapabilityType == pixelSensorCapability)
{
// Once found, we query the valid range of the capability
if (pixelSensorFeature.QueryPixelSensorCapability(cachedSensorId.Value, targetAbility.CapabilityType, streamIndex, out PixelSensorCapabilityRange range) && range.IsValid)
{
// If the capability is required, we use the default value.
// This is because these values cannot be configure on the world cameras.
// UpdateRate = 30, Format = Grayscale, Resolution = 1016x1016
if (range.CapabilityType == PixelSensorCapabilityType.UpdateRate
|| range.CapabilityType == PixelSensorCapabilityType.Format
|| range.CapabilityType == PixelSensorCapabilityType.Resolution)
{
pixelSensorFeature.ApplySensorConfig(cachedSensorId.Value, range.GetDefaultConfig(streamIndex));
yield return null;
}


// Custom Capability settings based on the settings of the Script (AutoExposure vs Manual Exposure)
// Auto Exposure: Auto Exposure Mode (Controller / Enviornment) , AutoExposure Target (-5.0 to 5.0)
if (range.CapabilityType == PixelSensorCapabilityType.AutoExposureMode)
{
var configData = new PixelSensorConfigData(range.CapabilityType, streamIndex);
if (range.IntValues.Contains((uint)autoExposureMode))
{
configData.IntValue = (uint)autoExposureMode;
pixelSensorFeature.ApplySensorConfig(cachedSensorId.Value, configData);
yield return null;
}

}
else if (range.CapabilityType == PixelSensorCapabilityType.AutoExposureTargetBrightness)
{
var configData = new PixelSensorConfigData(range.CapabilityType, streamIndex);
configData.FloatValue = Mathf.Clamp(AutoExposureTargetBrightness,
range.FloatRange.Value.Min, range.FloatRange.Value.Max);
pixelSensorFeature.ApplySensorConfig(cachedSensorId.Value, configData);
yield return null;
}
// Manual Exposure : Exposure Time , Analog Gain
else if (range.CapabilityType == PixelSensorCapabilityType.ManualExposureTime)
{
var configData = new PixelSensorConfigData(range.CapabilityType, streamIndex);
configData.IntValue = (uint)Mathf.Clamp(ManualExposureTime,
range.IntRange.Value.Min, range.IntRange.Value.Max);
pixelSensorFeature.ApplySensorConfig(cachedSensorId.Value, configData);
yield return null;
}
else if (range.CapabilityType == PixelSensorCapabilityType.AnalogGain)
{
var configData = new PixelSensorConfigData(range.CapabilityType, streamIndex);
configData.IntValue = (uint)Mathf.Clamp((uint)AnalogGain,
range.IntRange.Value.Min, range.IntRange.Value.Max);
pixelSensorFeature.ApplySensorConfig(cachedSensorId.Value, configData);
yield return null;
}

}

}
}

}


var operation = pixelSensorFeature.ConfigureSensor(cachedSensorId.Value, targetStreams.ToArray());

yield return operation;
if (!operation.DidOperationSucceed)
{
Debug.LogError($"Failed to configure streams. Try using the default settings.");
yield break;
}

Debug.LogError($"Sensor Configured Successfully.");
configuredStreams = targetStreams;

var sensorStartAsyncResult =
pixelSensorFeature.StartSensor(cachedSensorId.Value, targetStreams);

yield return sensorStartAsyncResult;


if (!sensorStartAsyncResult.DidOperationSucceed)
{
Debug.LogError("Stream could not be started.");
yield break;
}

Debug.LogError("Stream Started succesfully.");

yield return DoPollData(targetStreams);

}

private IEnumerator DoPollData(List<uint> configuredStreams)
{
while (cachedSensorId.HasValue && pixelSensorFeature.GetSensorStatus(cachedSensorId.Value) ==
PixelSensorStatus.Started)
{
foreach (var stream in configuredStreams)
{
//Metadata is not used in this example
if (pixelSensorFeature.GetSensorData(cachedSensorId.Value, stream, out var frame,
out PixelSensorMetaData[] currentFrameMetaData, Allocator.Temp,
shouldFlipTexture: true))
{

string formattedTime = DateTimeOffset.FromUnixTimeMilliseconds(frame.CaptureTime / 1000).ToString(@"hh\:mm\:ss");
Debug.Log($"Frame Captured Time: {formattedTime}");

Pose sensorPose = pixelSensorFeature.GetSensorPose(cachedSensorId.Value);
Debug.Log($"Pixel Sensor Pose: Position {sensorPose.position} Rotation: {sensorPose.rotation}");
ProcessFrame(frame, streamRenderers[stream], streamTextures[stream]);
}
}

yield return null;
}
}

public void ProcessFrame(in PixelSensorFrame frame, Renderer targetRenderer, Texture2D targetTexture)
{
if (!frame.IsValid || targetRenderer == null || frame.Planes.Length == 0)
{
return;
}

if (targetTexture == null)
{
var plane = frame.Planes[0];
targetTexture = new Texture2D((int)plane.Width, (int)plane.Height, TextureFormat.R8, false);
targetRenderer.material.mainTexture = targetTexture;
}

targetTexture.LoadRawTextureData(frame.Planes[0].ByteData);
targetTexture.Apply();
}


private void OnDisable()
{
StartCoroutine(StopSensorCoroutine());
}

private IEnumerator StopSensorCoroutine()
{
if (cachedSensorId != null)
{
if (pixelSensorFeature.GetSensorStatus(cachedSensorId.Value) ==
PixelSensorStatus.Started)
{
var stopSensorAsyncResult = pixelSensorFeature.StopSensor(cachedSensorId.Value, configuredStreams);
yield return stopSensorAsyncResult;
if (stopSensorAsyncResult.DidOperationSucceed)
{
pixelSensorFeature.ClearAllAppliedConfigs(cachedSensorId.Value);
pixelSensorFeature.DestroyPixelSensor(cachedSensorId.Value);
}
else
{
Debug.Log("Unable to stop the sensor");
}
}
else
{
yield return null;
pixelSensorFeature.ClearAllAppliedConfigs(cachedSensorId.Value);
pixelSensorFeature.DestroyPixelSensor(cachedSensorId.Value);
}
}
}
}