// %BANNER_BEGIN% // --------------------------------------------------------------------- // %COPYRIGHT_BEGIN% // Copyright (c) (2018-2022) Magic Leap, Inc. All Rights Reserved. // Use of this file is governed by the Software License Agreement, located here: https://www.magicleap.com/software-license-agreement-ml2 // Terms and conditions applicable to third-party materials accompanying this distribution may also be found in the top-level NOTICE file appearing herein. // %COPYRIGHT_END% // --------------------------------------------------------------------- // %BANNER_END% using UnityEngine.XR.MagicLeap.Native; namespace UnityEngine.XR.MagicLeap { using System; using System.Runtime.InteropServices; /// /// APIs for video and audio recording. /// public partial class MLMediaRecorder { /// /// Video source to put in the recorded media. /// public enum VideoSource { /// /// Camera video source. /// Camera = 1, }; /// /// Audio source to put in the recorded media. /// public enum AudioSource { /// /// Recording voice. /// Voice = 0, /// /// Recording ambient sounds. /// World, /// /// Recording digital sounds. /// Virtual, /// /// Mixed reality mode: digital + ambient. /// Mixed, }; /// /// Media recorder events /// public enum Event { /// /// Media recorder error. /// Error = 1, /// /// Media recorder info. /// Info = 2, /// /// Media recorder track error. /// TrackError = 100, /// /// Media recorder track info. /// TrackInfo = 101, }; public enum Info { Unknown = 1, /// /// Max duration of the clip is reached. /// MaxDurationReached = 800, /// /// Max file size is reached. /// MaxFileSizeReached = 801, /// /// Max file size is approaching. /// MaxFileSizeApproaching = 802, /// /// Next output file has started. /// TrackCompletionStatus = 1000, /// /// The info about progress in time. /// TrackProgressInTime = 1001, /// /// track info. /// TrackType = 1002, /// /// The info about track duration. /// TrackDurationMs = 1003, /// /// The time to measure the max chunk duration. /// TrackMaxChunkDurationMs = 1004, /// /// The info about encoded frames. /// TrackEncodedFrames = 1005, /// /// The time to measure how well the audio and video track data interleaved. /// TrackInterChunkTimeUs = 1006, /// /// The time to measure system response. /// TrackInitialDelayMs = 1007, /// /// The time used to compensate for initial A/V sync. /// TrackStartOffsetMs = 1008, /// /// Total number of bytes of the media data. /// TrackDataKBytes = 1009, }; /// /// Video recorder error types /// public enum Error { Unknown = 1, ServerDied = 2, TrackGeneral = 100, VideoNoSyncFrame = 200, }; /// /// Possible output formats /// public enum OutputFormat { Default = 0, /// /// 3gpp format. /// THREE_GPP, /// /// MP4 format. /// MPEG_4, /// /// AMR NB. /// AMR_NB, /// /// AMR WB. /// AMR_WB, /// /// AAC_ADIF. /// AAC_ADIF, /// /// AAC_ADTS. /// AAC_ADTS, /// /// Stream over a socket, limited to a single stream. /// RTP_AVP, /// /// H.264/AAC data encapsulated in MPEG2/TS. /// MPEG2TS, /// /// VP8/VORBIS data in a WEBM container. /// WEBM, /// /// HEIC data in a HEIF container. /// HEIF, /// /// Opus data in a OGG container. /// OGG, }; /// /// Available video encoder formats /// public enum VideoEncoder { Default = 0, /// /// H263. This format has support for software encoder. /// H263, /// /// H264. This format has support for hardware encoder. /// H264, /// /// MPEG4 SP. This format has support for software encoder. /// MPEG_4_SP, /// /// VP8. This format has support for software encoder. /// VP8, /// /// HEVC. This format has support for hardware encoder. /// HEVC, }; /// /// Available audio encoder formats /// public enum AudioEncoder { Default = 0, /// /// AMR NB. /// AMR_NB, /// /// AMR WB. /// AMR_WB, /// /// AAC. /// AAC, /// /// HE AAC. /// HE_AAC, /// /// AAC ELD. /// AAC_ELD, /// /// Vorbis. /// VORBIS, /// /// Opus. /// OPUS, }; /// /// Info received when the media recorder runs into an error. /// public struct OnErrorData { /// /// The error of MLMediaRecorder.Error /// public Error Error; /// /// The extra info /// public int Extra; }; /// /// Info received when the media recorder runs into a track error. /// public struct OnTrackErrorData { /// /// Track ID When the error or info type is track specific. /// public uint TrackId; /// /// The error of MLMediaRecorder.Error /// public Error Error; /// /// The extra info /// public int Extra; }; public struct OnInfoData { /// /// The info of MLMediaRecorder.Info /// public Info Info; /// /// The extra info /// public int Extra; }; public struct OnTrackInfoData { /// /// Track ID When the error or info type is track specific. /// public uint TrackId; /// /// The info of MLMediaRecorder.Info /// public Info Info; /// /// The extra info /// public int Extra; }; public delegate void OnInfoDelegate(OnInfoData info); public delegate void OnTrackInfoDelegate(OnTrackInfoData info); public delegate void OnErrorDelegate(OnErrorData trackInfo); public delegate void OnTrackErrorDelegate(OnTrackErrorData trackInfo); /// /// MediaRecorder received a general info/warning message. /// public event OnInfoDelegate OnInfo = delegate { }; /// /// MediaRecorder received a track-related info/warning message. /// public event OnTrackInfoDelegate OnTrackInfo = delegate { }; /// /// MediaRecorder received a general error message. /// public event OnErrorDelegate OnError = delegate { }; /// /// MediaRecorder received a track-related error message. /// public event OnTrackErrorDelegate OnTrackError = delegate { }; /// /// Handle to the underlying media recorder object. /// public ulong Handle { get; private set; } = MagicLeapNativeBindings.InvalidHandle; /// /// Native surface object which should be used to get the /// native buffers to render the video frames onto for recorded. /// public MLNativeSurface InputSurface { get; private set; } /// /// Handle for the managed media recorder object to pass to and from unmanaged code. /// private GCHandle gcHandle; /// /// Create a media recorder object /// /// Media recorder object if construction was successful, null otherwise public static MLMediaRecorder Create() { MLResult.Code resultCode = NativeBindings.MLMediaRecorderCreate(out ulong handle); return MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderCreate)) ? new MLMediaRecorder(handle) : null; } private MLMediaRecorder(ulong handle) { this.Handle = handle; NativeBindings.MLMediaRecorderEventCallbacks callbacks = NativeBindings.MLMediaRecorderEventCallbacks.Create(); this.gcHandle = GCHandle.Alloc(this, GCHandleType.Weak); IntPtr gcHandlePtr = GCHandle.ToIntPtr(this.gcHandle); var resultCode = NativeBindings.MLMediaRecorderSetEventCallbacks(handle, ref callbacks, gcHandlePtr); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetEventCallbacks)); } ~MLMediaRecorder() { Destroy(); gcHandle.Free(); } private void Destroy() { if (!MagicLeapNativeBindings.MLHandleIsValid(Handle)) return; ReleaseInputSurface(); MLResult.Code resultCode = NativeBindings.MLMediaRecorderDestroy(Handle); if (MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderDestroy))) Handle = MagicLeapNativeBindings.InvalidHandle; } /// /// Pass in the unmanaged file descriptor of the file to be written. /// Call this after MLMediaRecorder.SetOutputFormat() but before /// MLMediaRecorder.Prepare(). /// /// Unmanaged file descriptor of the output file. public MLResult SetOutputFileForFD(int fd) { var resultCode = NativeBindings.MLMediaRecorderSetOutputFileForFD(Handle, fd); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetOutputFileForFD)); return MLResult.Create(resultCode); } /// /// Sets the path of the output file to be produced. /// Call this after MLMediaRecorder.SetOutputFormat() but before /// MLMediaRecorder.Prepare(). /// /// Path to output file. Folders should exist already. public MLResult SetOutputFileForPath(string path) { var resultCode = NativeBindings.MLMediaRecorderSetOutputFileForPath(Handle, path); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetOutputFileForPath)); return MLResult.Create(resultCode); } /// /// Sets MediaRecorder video source. Cannot be called twice without /// calling MLMediaRecorder.Reset() in between. /// public MLResult SetVideoSource(VideoSource inVideoSource) { var resultCode = NativeBindings.MLMediaRecorderSetVideoSource(Handle, inVideoSource); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetVideoSource)); return MLResult.Create(resultCode); } /// /// Set MediaRecorder audio source. Cannot be called twice without /// calling MLMediaRecorder.Reset() in between. /// public MLResult SetAudioSource(AudioSource inAudioSource) { var resultCode = NativeBindings.MLMediaRecorderSetAudioSource(Handle, inAudioSource); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetAudioSource)); return MLResult.Create(resultCode); } /// /// Sets the format of the output file produced during recording. /// public MLResult SetOutputFormat(OutputFormat inFormat) { var resultCode = NativeBindings.MLMediaRecorderSetOutputFormat(Handle, inFormat); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetOutputFormat)); return MLResult.Create(resultCode); } /// /// Sets the video encoder to be used for recording. /// If this method is not called, the output file will not contain a /// video track. Call this after MLMediaRecorder.SetOutputFormat() and /// before MLMediaRecorder.Prepare(). The video source is /// always set to camera by default. /// public MLResult SetVideoEncoder(VideoEncoder inVideoEncoder) { var resultCode = NativeBindings.MLMediaRecorderSetVideoEncoder(Handle, inVideoEncoder); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetVideoEncoder)); return MLResult.Create(resultCode); } /// /// Sets the audio encoder to be used for recording. /// If this method is not called, the output file will not contain an /// audio track. Call this after MLMediaRecorder.SetOutputFormat() and /// before MLMediaRecorder.Prepare(). /// public MLResult SetAudioEncoder(AudioEncoder inAudioEncoder) { var resultCode = NativeBindings.MLMediaRecorderSetAudioEncoder(Handle, inAudioEncoder); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetAudioEncoder)); return MLResult.Create(resultCode); } /// /// Sets the maximum duration (in ms) of the recording session. Call this /// after MLMediaRecorder.SetOutputFormat() and before /// MLMediaRecorder.Prepare(). After recording reaches the specified /// duration, a notification will be sent via the callback /// with a MLMediaRecorder.Info code of MLMediaRecorder.Info.MaxDurationReached /// and recording will be stopped. Stopping happens asynchronously, there /// is no guarantee that the recorder will have stopped by the time the listener is notified. /// public MLResult SetMaxDuration(int inMaxDurationMsec) { var resultCode = NativeBindings.MLMediaRecorderSetMaxDuration(Handle, inMaxDurationMsec); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetMaxDuration)); return MLResult.Create(resultCode); } /// /// Sets the maximum file size (in bytes) of the recording session. /// Call this after MLMediaRecorder.SetOutputFormat() and before MLMediaRecorder.Prepare(). /// After recording reaches the specified filesize, a notification /// will be sent via the callback with a MLMediaRecorder.Info code of /// MLMediaRecorder.Info.MaxFileSizeReached and recording will be stopped. /// happens asynchronously, there is no guarantee that the recorder /// will have stopped by the time the listener is notified. /// public MLResult SetMaxFileSize(long inMaxFileSize) { var resultCode = NativeBindings.MLMediaRecorderSetMaxFileSize(Handle, inMaxFileSize); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetMaxFileSize)); return MLResult.Create(resultCode); } /// /// Sets the GEO location for recording. /// /// the 10000 multiplies latitude of location. /// the 10000 multiplies longitude of location. public MLResult SetGeoLocation(long inLatitude10000, long inLongitude10000) { var resultCode = NativeBindings.MLMediaRecorderSetGeoLocation(Handle, inLatitude10000, inLongitude10000); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderSetGeoLocation)); return MLResult.Create(resultCode); } /// /// Prepares the recorder to begin capturing and encoding data for input mediaformat. /// Should be called after setting up the desired audio and video sources, /// encoders, but before MLMediaRecorder.Start(). /// /// Media format object to configure the video & audio track. public MLResult Prepare(MLMediaFormat format) { MLResult.Code resultCode = NativeBindings.MLMediaRecorderPrepare(Handle, format.Handle); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderPrepare)); return MLResult.Create(resultCode); } /// /// Begins capturing and encoding data to the specified file. /// Call this after MLMediaRecorder.Prepare(). The apps should /// not start another recording session during recording. /// public MLResult Start() { var resultCode = NativeBindings.MLMediaRecorderStart(Handle); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderStart)); return MLResult.Create(resultCode); } /// /// Gets the input surface to record from when using SURFACE video source. May only be called after MLMediaRecorderPrepare and /// MLMediaRecorderStart. Frames rendered to the producer before MLMediaRecorderStart() is called will be discarded. When using an input /// surface, there are no accessible input buffers, as buffers are automatically passed from the other modules to this surface. /// The returned input surface can also be passed as a destination surface to - a video/mixed reality video capture session /// when calling MLCameraPrepareCapture(). Captured raw video frames will be consumed directly as input to an encoder /// without copying. Caller of this API should release the surface using #MLMediaRecorderReleaseInputSurface() on the Surface /// handle after usage. /// public MLResult GetInputSurface() { var resultCode = NativeBindings.MLMediaRecorderGetInputSurface(Handle, out ulong inputSurface); if (MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderGetInputSurface))) { InputSurface = new MLNativeSurface(inputSurface); } return MLResult.Create(resultCode); } /// /// Returns the maximum absolute amplitude that was sampled since the /// last call to this method. Call this only after the /// MLMediaRecorder.SetAudioSource(). /// public MLResult GetMaxAmplitude(out int maxAmp) { var resultCode = NativeBindings.MLMediaRecorderGetMaxAmplitude(Handle, out maxAmp); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderGetMaxAmplitude)); return MLResult.Create(resultCode); } /// /// Stops recording. Call this after MLMediaRecorder.Start(). /// Once recording is stopped, you will have to configure it /// again as if it has just been constructed. /// public MLResult Stop() { var resultCode = NativeBindings.MLMediaRecorderStop(Handle); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderStop)); return MLResult.Create(resultCode); } /// /// Restarts the MediaRecorder to its idle state. After calling this method, /// you will have to configure it again as if it had just been constructed. /// public MLResult Reset() { MLResult.Code resultCode = NativeBindings.MLMediaRecorderReset(Handle); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderReset)); ReleaseInputSurface(); return MLResult.Create(resultCode); } private MLResult.Code ReleaseInputSurface() { MLResult.Code resultCode = MLResult.Code.Ok; if (InputSurface != null && MagicLeapNativeBindings.MLHandleIsValid(InputSurface.Handle)) { resultCode = NativeBindings.MLMediaRecorderReleaseInputSurface(Handle, InputSurface.Handle); MLResult.DidNativeCallSucceed(resultCode, nameof(NativeBindings.MLMediaRecorderReleaseInputSurface)); InputSurface = null; } return resultCode; } } }