Skip to main content
Version: 20 Jan 2025

Visualize Camera Output

This section includes details on rendering the Magic Leap's camera output on a Raw Image UI Component. When receiving camera output developers can query the format using the MLCamera.CameraOutput.Format property.


public void OnCaptureDataReceived(MLCamera.CameraOutput output, MLCamera.ResultExtras extras, MLCamera.Metadata metadataHandle)
{
if (output.Format == MLCamera.OutputFormat.JPEG)
{
// JPEG Output
}
else if (output.Format == MLCamera.OutputFormat.YUV_420_888)
{
// YUV Output
}
else if (output.Format == MLCamera.OutputFormat.RGBA_8888)
{
// RGBA Output
}
}

Render JPEG Output

The following example displays camera output on a captured in JPEG format on a Raw Image.

using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.MagicLeap;

public class JPEGVisualizer : MonoBehaviour
{
[SerializeField, Tooltip("The UI to show the camera capture in JPEG format")]
private RawImage _screenRendererJPEG = null;
//JPEG Image Texture
private Texture2D _imageTexture;

public void OnCaptureDataReceived(MLCamera.CameraOutput output, MLCamera.ResultExtras extras, MLCamera.Metadata metadataHandle)
{
if (output.Format == MLCamera.OutputFormat.JPEG)
{
UpdateJPGTexture(output.Planes[0]);
}
}

private void UpdateJPGTexture(MLCamera.PlaneInfo imagePlane)
{
if (_imageTexture != null)
{
Destroy(_imageTexture);
}

_imageTexture = new Texture2D(8, 8);
bool status = _imageTexture.LoadImage(imagePlane.Data);
if (status && (_imageTexture.width != 8 && _imageTexture.height != 8))
{
_screenRendererJPEG.texture = _imageTexture;
}
}
}

Render RGBA Output

Note the conditional check at the bottom of the function. This is in place because at times image width and stride are not the same because of memory layout optimizations. Loosely speaking, depending on the frame resolution, in cases where the width does not line up well with the memory boundaries then the camera pipeline will pad each line in the frame with dummy pixel values which then needs to be skipped over when being operated on.

using System;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.MagicLeap;

public class RGBVisualizer : MonoBehaviour
{
[SerializeField, Tooltip("The UI to show the camera capture in RGBA format")]
private RawImage _screenRendererRGBA = null;

//RGBA Image Texture
private Texture2D _rawVideoTextureRGBA;

public void OnCaptureDataReceived(MLCamera.CameraOutput output, MLCamera.ResultExtras extras, MLCamera.Metadata metadataHandle)
{
if (output.Format == MLCamera.OutputFormat.RGBA_8888)
{
//Flips the frame vertically so it does not appear upside down.
MLCamera.FlipFrameVertically(ref output);
UpdateRGBTexture(output.Planes[0]);
}
}

private void UpdateRGBTexture(MLCamera.PlaneInfo imagePlane)
{
int actualWidth = (int)(imagePlane.Width * imagePlane.PixelStride);

if (_rawVideoTextureRGBA != null &&
(_rawVideoTextureRGBA.width != imagePlane.Width || _rawVideoTextureRGBA.height != imagePlane.Height))
{
Destroy(_rawVideoTextureRGBA);
_rawVideoTextureRGBA = null;
}

if (_rawVideoTextureRGBA == null)
{
// Create a new texture that will display the RGB image
_rawVideoTextureRGBA = new Texture2D((int)imagePlane.Width, (int)imagePlane.Height, TextureFormat.RGBA32, false);
_rawVideoTextureRGBA.filterMode = FilterMode.Bilinear;

// Assign the RawImage Texture to the resulting texture
_screenRendererRGBA.texture = _rawVideoTextureRGBA;
}

// Image width and stride may differ due to padding bytes for memory alignment. Skip over padding bytes when accessing pixel data.
if (imagePlane.Stride != actualWidth)
{
// Create a new array to store the pixel data without padding
var newTextureChannel = new byte[actualWidth * imagePlane.Height];
// Loop through each row of the image
for (int i = 0; i < imagePlane.Height; i++)
{
// Copy the pixel data from the original array to the new array, skipping the padding bytes
Buffer.BlockCopy(imagePlane.Data, (int)(i * imagePlane.Stride), newTextureChannel, i * actualWidth, actualWidth);
}
// Load the new array as the texture data
_rawVideoTextureRGBA.LoadRawTextureData(newTextureChannel);
}
else // If the stride is equal to the width, no padding bytes are present
{
_rawVideoTextureRGBA.LoadRawTextureData(imagePlane.Data);
}

_rawVideoTextureRGBA.Apply();
}
}

Render YUV Output

Visualizing the YUV_420_888 output is more difficult than RGBA or JPEG because the camera delivers three separate planes rather than a single contiguous buffer. This provides more efficency when rendering larger resolutions.

Because YUV transmits luma and chroma separately—and subsamples colour 2 × 2—you move ~60 % less data every frame. The tiny shader below finishes the RGB conversion on-the-fly, keeping the CPU free and memory bandwidth low.

Note: This performance gain is lost if you convert the YUV format back to RGB to be used on the CPU.

PlaneContentsSub-samplingTypical Texture
0Y (luma) – brightness for every pixelfull resolutionAlpha8
1UV (chroma) – interleaved U & V bytes½ width × ½ heightRG16
2VU (‐) interleaved V & U bytes (not required for NV12)overlaps Plane 1

A row can also be padded: Stride (bytes from one row-start to the next) may be larger than Width × PixelStride.
The script below detects this at runtime and copies the plane data to a texture, before feeding it to a lightweight shader that converts Y + UV into RGB on the GPU.

Usage

  1. Add the script and shader to *Assets/**.
  2. Create or reuse a Quad (or any mesh) in your scene.
  3. Attach YUVVisualizer to that object.
  4. Set its YUV shader field to Unlit/YUV_RG16_Shader.
  5. Register OnCaptureRawVideoFrameAvailable with the ML Camera API, passing each frame to the script.

YUVVisualizer.cs

using System;
using UnityEngine;
using UnityEngine.XR.MagicLeap;

/// <summary>
/// Visualizer for ML Camera YUV_420_888 (NV12) frames.
/// Attach to a Quad or any MeshRenderer that uses the YUV shader below.
/// </summary>
[RequireComponent(typeof(MeshRenderer))]
public class YUVVisualizer : MonoBehaviour
{
[Tooltip("Shader that converts Y + interleaved UV to RGB")]
[SerializeField] Shader yuvShader;

Texture2D _texY, _texUV;
Material _mat;

byte[] _scratchY; // reused if row padding exists
byte[] _scratchUV;

void Start()
{
_mat = new Material(yuvShader);
GetComponent<MeshRenderer>().material = _mat;
}

/* ------------------------------------------------------------------ */
/* Main-thread callback */
/* ------------------------------------------------------------------ */
public void OnCaptureRawVideoFrameAvailable(MLCameraBase.CameraOutput frame,
MLCameraBase.ResultExtras _,
MLCameraBase.Metadata __)
{
if (frame.Format != MLCamera.OutputFormat.YUV_420_888)
return;

var planeY = frame.Planes[0]; // full-res luma
var planeUV = frame.Planes[1]; // ½×½ interleaved chroma

if (_texY == null)
InitTextures((int)planeY.Width, (int)planeY.Height);

CopyPlaneManaged(planeY, _texY, ref _scratchY);
CopyPlaneManaged(planeUV, _texUV, ref _scratchUV);

_texY.Apply(false, false);
_texUV.Apply(false, false);
}

/* ------------------------------------------------------------------ */
/* Managed copy helper – handles padded & contiguous rows */
/* ------------------------------------------------------------------ */
static void CopyPlaneManaged(MLCameraBase.PlaneInfo plane,
Texture2D tex,
ref byte[] scratch)
{
int rowBytes = (int)(plane.Width * plane.PixelStride);
bool padded = plane.Stride != rowBytes;

if (!padded)
{
// Tight rows – one call does the job
tex.SetPixelData(plane.Data, 0);
return;
}

/* Padded rows – copy row-by-row into a tight buffer */
int tightSize = rowBytes * (int)plane.Height;
if (scratch == null || scratch.Length != tightSize)
scratch = new byte[tightSize];

for (int y = 0; y < plane.Height; ++y)
{
int src = (int)(y * plane.Stride);
int dst = y * rowBytes;
Buffer.BlockCopy(plane.Data, src, scratch, dst, rowBytes);
}

tex.LoadRawTextureData(scratch);
}

/* ------------------------------------------------------------------ */
/* One-time texture setup */
/* ------------------------------------------------------------------ */
void InitTextures(int width, int height)
{
_texY = NewPlaneTexture(width, height, TextureFormat.Alpha8);
_texUV = NewPlaneTexture(width >> 1, height >> 1, TextureFormat.RG16);

_mat.SetTexture("_MainTex", _texY);
_mat.SetTexture("_UVTex", _texUV);
}

static Texture2D NewPlaneTexture(int w, int h, TextureFormat fmt)
{
var tex = new Texture2D(w, h, fmt, mipChain:false, linear:true);
tex.filterMode = FilterMode.Bilinear;
return tex;
}

void OnDestroy()
{
Destroy(_texY);
Destroy(_texUV);
Destroy(_mat);
}
}

Unlit/YUV_RG16_Shader

Shader "Unlit/YUV_RG16_Shader"
{
Properties
{
_MainTex ("Y Plane (Alpha8)" , 2D) = "white" {}
_UVTex ("UV Plane (RG16)" , 2D) = "white" {}
}
SubShader
{
Tags { "Queue"="Transparent" "RenderType"="Transparent" }
Pass
{
ZWrite Off
Cull Off
Blend SrcAlpha OneMinusSrcAlpha

CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"

sampler2D _MainTex;
sampler2D _UVTex;

struct appdata { float4 vertex : POSITION; float2 uv : TEXCOORD0; };
struct v2f { float4 vertex : SV_POSITION; float2 uv : TEXCOORD0; };

v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
return o;
}

float4 frag(v2f i) : SV_Target
{
float Y = tex2D(_MainTex, i.uv).r;
float2 UV = tex2D(_UVTex, i.uv).rg - 0.5; // center chroma

float3 rgb;
rgb.r = Y + 1.402 * UV.y;
rgb.g = Y - 0.344136 * UV.x - 0.714136 * UV.y;
rgb.b = Y + 1.772 * UV.x;

return float4(rgb, 1);
}
ENDCG
}
}
}