// %BANNER_BEGIN%
// ---------------------------------------------------------------------
// %COPYRIGHT_BEGIN%
// Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved.
// Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2
// Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein.
// %COPYRIGHT_END%
// ---------------------------------------------------------------------
// %BANNER_END%
namespace UnityEngine.XR.MagicLeap
{
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEngine.XR.MagicLeap.Native;
///
/// MLWebRTC class contains the API to interface with the
/// WebRTC C API.
///
public partial class MLWebRTC
{
///
/// Class that represents a source used by the MLWebRTC API.
///
public partial class VideoSink
{
///
/// Struct representing a captured camera frame.
///
public partial struct Frame
{
///
/// Native bindings for the MLWebRTC.Frame struct.
///
internal class NativeBindings : MagicLeapNativeBindings
{
///
/// Gets frame data.
///
/// The handle to the frame to query.
/// Pointer to the frame data.
///
/// MLResult.Result will be MLResult.Code.Ok if the dimensions were successfully obtained.
/// MLResult.Result will be MLResult.Code.PermissionDenied if necessary permission is missing.
/// MLResult.Result will be MLResult.Code.UnspecifiedFailure if failed due to other internal error.
///
[DllImport(MLWebRTCDLL, CallingConvention = CallingConvention.Cdecl)]
public static extern MLResult.Code MLWebRTCFrameGetData(ulong frameHandle, ref MLWebRTCFrame frame);
///
/// Buffer for native image plane arrays.
///
private static readonly CircularBuffer nativeImagePlanesBuffer = CircularBuffer.Create(
new ImagePlaneInfoNative[PlaneInfo.MaxImagePlanes],
new ImagePlaneInfoNative[PlaneInfo.MaxImagePlanes],
new ImagePlaneInfoNative[PlaneInfo.MaxImagePlanes]);
///
/// 4x4 matrix size for native frame transforms
///
private const int TransformMatrixLength = 16;
///
/// Representation of the native frame structure.
///
[StructLayout(LayoutKind.Sequential)]
public struct MLWebRTCFrame
{
///
/// Version of this structure.
///
public uint Version;
///
/// Frame data to be interpreted as a if Format is ,
/// otherwise interpreted as a .
///
public IntPtr FrameData;
///
/// Timestamp of the frame.
///
public ulong TimeStamp;
///
/// Output format that the image planes will be in.
///
public OutputFormat Format;
public static MLWebRTCFrame Create(OutputFormat format)
{
MLWebRTCFrame frameNative = new MLWebRTCFrame();
frameNative.Version = 2;
frameNative.Format = format;
return frameNative;
}
///
/// Caller MUST call FreeUnmanagedMemory when finished with the returned MLWebRTCFrame! Unmanaged memory
/// will be allocated for the FrameData pointer.
/// Creates and returns an initialized version of this struct from a object.
///
/// The frame object to use for initializing.
/// An initialized version of this struct.
public static MLWebRTCFrame Create(MLWebRTC.VideoSink.Frame frame)
{
MLWebRTCFrame frameNative = Create(frame.Format);
frameNative.TimeStamp = frame.TimeStampUs;
if (frame.Format == OutputFormat.NativeBuffer)
{
MLWebRTCNativeFrameInfo frameData = new MLWebRTCNativeFrameInfo()
{
Width = frame.NativeFrame.Width,
Height = frame.NativeFrame.Height,
SurfaceHandle = frame.NativeFrame.SurfaceHandle,
NativeBufferHandle = frame.NativeFrame.NativeBufferHandle,
Transform = new float[frame.NativeFrame.Transform.Length]
};
Array.Copy(frame.NativeFrame.Transform, frameData.Transform, frame.NativeFrame.Transform.Length);
frameNative.FrameData = Marshal.AllocHGlobal(Marshal.SizeOf(frameData));
Marshal.StructureToPtr(frameData, frameNative.FrameData, false);
}
else
{
MLWebRTCFramePlanes frameData = new MLWebRTCFramePlanes()
{
PlaneCount = (byte)((frame.Format == OutputFormat.YUV_420_888) ? 3 : 1),
ImagePlanes = nativeImagePlanesBuffer.Get()
};
for (int i = 0; i < frame.ImagePlanes.Length; i++)
{
frameData.ImagePlanes[i] = new ImagePlaneInfoNative(frame.ImagePlanes[i]);
}
frameNative.FrameData = Marshal.AllocHGlobal(Marshal.SizeOf(frameData));
Marshal.StructureToPtr(frameData, frameNative.FrameData, false);
}
return frameNative;
}
public void FreeUnmanagedMemory()
{
if (FrameData != IntPtr.Zero)
{
Marshal.FreeHGlobal(FrameData);
FrameData = IntPtr.Zero;
}
}
public override string ToString()
{
var str = new System.Text.StringBuilder($"[MLWebRTCFrame: Version={Version}, TimeStamp={TimeStamp}, Format={Format}\n");
str.AppendLine($"\tFrameData:\n\t\t{FrameData}\n]");
return str.ToString();
}
}
[StructLayout(LayoutKind.Sequential)]
public struct MLWebRTCFramePlanes
{
///
/// Number of output image planes. 3 for , 1 for
///
public byte PlaneCount;
///
/// The image planes making up the output image. Array length is constant, actual number of planes is specified by PlaneCount.
///
[MarshalAs(UnmanagedType.ByValArray, SizeConst = PlaneInfo.MaxImagePlanes)]
public ImagePlaneInfoNative[] ImagePlanes;
}
[StructLayout(LayoutKind.Sequential)]
public struct MLWebRTCNativeFrameInfo
{
///
/// Width of the native frame
///
public uint Width;
///
/// Height of the native frame
///
public uint Height;
///
/// The 4x4 column-major tranformation matrix for the native frame
///
[MarshalAs(UnmanagedType.ByValArray, SizeConst = TransformMatrixLength)]
public float[] Transform;
///
/// Surface handle, from which native handle is acquired
/// Surface handle can be created using MLNativeSurfaceCreate
///
public ulong SurfaceHandle;
///
/// Native buffer handle can be acquired from MLNativeSurface using MLNativeSurfaceAcquireNextAvailableFrame.
/// Application should never release the native_buffer_handle once it is acquired.
/// If MLWebRTCFrame is created by application to send the frame through webrtc, webrtc will release
/// the native_buffer_handle using MLNativeSurfaceReleaseFrame after encoding.
/// Application should not release the native_buffer_handle send by webrtc for rendering whether it is local or remote sink.
///
public ulong NativeBufferHandle;
public override string ToString()
{
return $"[MLWebRTCNativeFrameInfo: Width={Width}, Height={Height},\n\t\t\tTransform=({string.Join(',', Transform)}),\n\t\t\tSurfaceHandle={SurfaceHandle}, NativeBufferHandle={NativeBufferHandle}]";
}
}
///
/// Representation of the native image plane structure.
///
[StructLayout(LayoutKind.Sequential)]
public struct ImagePlaneInfoNative
{
///
/// Width of the image plane.
///
public uint Width;
///
/// Height of the image plane.
///
public uint Height;
///
/// The stride of the image plane, representing how many bytes one row of the image plane contains.
///
public uint Stride;
///
/// The bytes per pixel of the image plane.
///
public uint BytesPerPixel;
///
/// Data of the image plane.
///
public IntPtr ImageDataPtr;
///
/// Size of the image plane.
///
public uint Size;
///
/// Sets data from an MLWebRTC.VideoSink.Frame.ImagePlane object.
///
public ImagePlaneInfoNative(PlaneInfo planeInfo)
{
Width = planeInfo.Width;
Height = planeInfo.Height;
Stride = planeInfo.Stride;
BytesPerPixel = planeInfo.BytesPerPixel;
ImageDataPtr = planeInfo.DataPtr;
Size = planeInfo.Size;
}
public override string ToString()
{
return $"[Width={Width}, Height={Height}, Stride={Stride}, BytesPerPixel={BytesPerPixel}, DataPtr={ImageDataPtr}, Size={Size}]";
}
}
}
}
}
}
}