private Task SetupCameraAsync()
        {
            lock (lockObj)
            {
#if UNITY_EDITOR
                UnityCompositorInterface.StartArUcoMarkerDetector(_markerDictionaryName, _markerSize);
                return(Task.CompletedTask);
#else
                if (setupCameraTask != null)
                {
                    DebugLog("Returning existing setup camera task");
                    return(setupCameraTask);
                }

                DebugLog("Setting up HoloLensCamera");
                if (_holoLensCamera == null)
                {
                    _holoLensCamera = new HoloLensCamera(CaptureMode.SingleLowLatency, PixelFormat.BGRA8);
                    _holoLensCamera.OnCameraInitialized += CameraInitialized;
                    _holoLensCamera.OnCameraStarted     += CameraStarted;
                    _holoLensCamera.OnFrameCaptured     += FrameCaptured;
                }

                return(setupCameraTask = _holoLensCamera.Initialize());
#endif
            }
        }
        public bool TryStartRecording(out string fileName)
        {
            fileName = string.Empty;
            TextureManager.InitializeVideoRecordingTextures();
            StringBuilder builder           = new StringBuilder(1024);
            string        documentDirectory = System.Environment.GetFolderPath(System.Environment.SpecialFolder.MyDocuments);
            string        outputDirectory   = $"{documentDirectory}\\HologramCapture";

            if (!Directory.Exists(outputDirectory))
            {
                Directory.CreateDirectory(outputDirectory);
            }

            string desiredFileName = $"{outputDirectory}\\Video.mp4";

            int[] fileNameLength   = new int[1];
            bool  startedRecording = UnityCompositorInterface.StartRecording((int)VideoRecordingLayout, desiredFileName, desiredFileName.Length, builder.Capacity, builder, fileNameLength);

            if (!startedRecording)
            {
                Debug.LogError($"CompositionManager failed to start recording: {desiredFileName}");
                return(false);
            }

            fileName = builder.ToString().Substring(0, fileNameLength[0]);
            DebugLog($"Started recording file: {fileName}");
            return(true);
        }
        private Task CleanUpCameraAsync()
        {
            lock (lockObj)
            {
#if UNITY_EDITOR
                UnityCompositorInterface.StopArUcoMarkerDetector();
#else
                if (setupCameraTask == null)
                {
                    DebugLog("CleanupCameraAsync was called when no start task had been created.");
                    return(Task.CompletedTask);
                }

                DebugLog("Cleaning up HoloLensCamera");
                if (_holoLensCamera != null)
                {
                    _holoLensCamera.Dispose();
                    _holoLensCamera.OnCameraInitialized -= CameraInitialized;
                    _holoLensCamera.OnCameraStarted     -= CameraStarted;
                    _holoLensCamera.OnFrameCaptured     -= FrameCaptured;
                    _holoLensCamera = null;
                }

                setupCameraTask = null;
#endif
            }

            return(Task.CompletedTask);
        }
        private void OnAudioFilterRead(float[] data, int channels)
        {
            if (!UnityCompositorInterface.IsRecording())
            {
                return;
            }

            //Create new stream
            if (audioMemoryStream == null)
            {
                audioMemoryStream    = new MemoryStream();
                audioStreamWriter    = new BinaryWriter(audioMemoryStream);
                audioStartTime       = AudioSettings.dspTime;
                numCachedAudioFrames = 0;
            }

            //Put data into stream
            for (int i = 0; i < data.Length; i++)
            {
                // Rescale float to short range for encoding.
                short audioEntry = (short)(data[i] * short.MaxValue);
                audioStreamWriter.Write(audioEntry);
            }

            numCachedAudioFrames++;

            //Send to compositor (buffer a few calls to reduce potential timing errors between packages)
            if (numCachedAudioFrames >= MAX_NUM_CACHED_AUDIO_FRAMES)
            {
                audioStreamWriter.Flush();
                byte[] outBytes = audioMemoryStream.ToArray();
                audioMemoryStream = null;
                UnityCompositorInterface.SetAudioData(outBytes, outBytes.Length, audioStartTime);
            }
        }
        /// <summary>
        /// Gets the timestamp of the hologram that will be composited for the current frame of the compositor.
        /// </summary>
        /// <returns>The hologram timestamp corresponding to the current video frame, in Unity's timeline.</returns>
        public float GetHologramTime()
        {
            float time = Time.time;

#if UNITY_EDITOR
            if (isVideoFrameProviderInitialized)
            {
                if (poseCache.poses.Count > 0)
                {
                    time = timeSynchronizer.GetUnityTimeFromCameraTime(GetTimeFromFrame(CurrentCompositeFrame));
                }
                else
                {
                    //Clamp time to video dt
                    float videoDeltaTime = GetVideoFrameDuration();
                    int   frame          = (int)(time / videoDeltaTime);
                    //Subtract the queued frames
                    frame -= UnityCompositorInterface.GetCaptureFrameIndex() - CurrentCompositeFrame;
                    time   = videoDeltaTime * frame;
                }
            }
#endif

            return(time);
        }
예제 #6
0
        private void Start()
        {
            frameWidth          = UnityCompositorInterface.GetFrameWidth();
            frameHeight         = UnityCompositorInterface.GetFrameHeight();
            outputYUV           = UnityCompositorInterface.OutputYUV();
            renderEvent         = UnityCompositorInterface.GetRenderEventFunc();
            hardwareEncodeVideo = UnityCompositorInterface.HardwareEncodeVideo();

            downsampleMat   = LoadMaterial("Downsample");
            YUVToRGBMat     = LoadMaterial("YUVToRGB");
            RGBToYUVMat     = LoadMaterial("RGBToYUV");
            BGRToRGBMat     = LoadMaterial("BGRToRGB");
            RGBToBGRMat     = LoadMaterial("BGRToRGB");
            NV12VideoMat    = LoadMaterial("RGBToNV12");
            BGRVideoMat     = LoadMaterial("BGRToRGB");
            holoAlphaMat    = LoadMaterial("HoloAlpha");
            extractAlphaMat = LoadMaterial("ExtractAlpha");
            ignoreAlphaMat  = LoadMaterial("IgnoreAlpha");
            quadViewMat     = LoadMaterial("QuadView");
            alphaBlendMat   = LoadMaterial("AlphaBlend");
            textureClearMat = LoadMaterial("TextureClear");

            SetHologramShaderAlpha(Compositor.DefaultAlpha);

            CreateColorTexture();
            CreateOutputTextures();

            SetupCameraAndRenderTextures();

            SetShaderValues();

            SetOutputTextures();
        }
        private void Update()
        {
            if (_detecting)
            {
#if UNITY_EDITOR
                int markerCount = UnityCompositorInterface.GetLatestArUcoMarkerCount();
                if (compositorMarkers == null || compositorMarkers.Length < markerCount)
                {
                    compositorMarkers = new CompositorMarker[markerCount];
                }

                UnityCompositorInterface.GetLatestArUcoMarkers(markerCount, compositorMarkers);
                compositorMarkersToProcess.Clear();
                for (int i = 0; i < markerCount; i++)
                {
                    compositorMarkersToProcess.Add(compositorMarkers[i].id, compositorMarkers[i].AsMarker());
                }

                ProcessMarkersFromFrame(compositorMarkersToProcess);
#else
                if (_holoLensCamera.State == CameraState.Ready &&
                    !_holoLensCamera.TakeSingle())
                {
                    Debug.LogError("Failed to take photo with HoloLensCamera, Camera State: " + _holoLensCamera.State.ToString());
                }
#endif
            }

            if (_nextMarkerUpdate != null)
            {
                MarkersUpdated?.Invoke(_nextMarkerUpdate);
                _nextMarkerUpdate = null;
            }
        }
예제 #8
0
        private void SetOutputTextures()
        {
            // hack, this forces the nativetexturepointer to be assigned inside the engine
            displayOutputTexture.colorBuffer.ToString();
            compositeTexture.colorBuffer.ToString();

            UnityCompositorInterface.SetOutputRenderTexture(displayOutputTexture.GetNativeTexturePtr());
            UnityCompositorInterface.SetHoloTexture(compositeTexture.GetNativeTexturePtr());
        }
예제 #9
0
        private void Update()
        {
            // this updates after we start running or when the video source changes, so we need to check every frame
            bool newOutputYUV = UnityCompositorInterface.OutputYUV();

            if (outputYUV != newOutputYUV)
            {
                outputYUV = newOutputYUV;
            }
        }
        private void ResetCompositor()
        {
            Debug.Log("Stopping the video composition system.");
            UnityCompositorInterface.Reset();

            UnityCompositorInterface.StopFrameProvider();
            if (UnityCompositorInterface.IsRecording())
            {
                UnityCompositorInterface.StopRecording();
            }
        }
 /// <summary>
 /// Stops audio recording by ensuring the audio stream is fully written immediately.
 /// </summary>
 private void StopRecordingAudio()
 {
     //Send any left over stream
     if (audioMemoryStream != null)
     {
         audioStreamWriter.Flush();
         byte[] outBytes = audioMemoryStream.ToArray();
         UnityCompositorInterface.SetAudioData(outBytes, outBytes.Length, audioStartTime);
         audioMemoryStream = null;
     }
 }
        private void Start()
        {
            frameWidth          = UnityCompositorInterface.GetFrameWidth();
            frameHeight         = UnityCompositorInterface.GetFrameHeight();
            providesYUV         = UnityCompositorInterface.ProvidesYUV();
            expectsYUV          = UnityCompositorInterface.ExpectsYUV();
            renderEvent         = UnityCompositorInterface.GetRenderEventFunc();
            hardwareEncodeVideo = UnityCompositorInterface.HardwareEncodeVideo();

            downsampleMat      = LoadMaterial("Downsample");
            YUVToRGBMat        = LoadMaterial("YUVToRGB");
            RGBToYUVMat        = LoadMaterial("RGBToYUV");
            BGRToRGBMat        = LoadMaterial("BGRToRGB");
            RGBToBGRMat        = LoadMaterial("BGRToRGB");
            NV12VideoMat       = LoadMaterial("RGBToNV12");
            BGRVideoMat        = LoadMaterial("BGRToRGB");
            holoAlphaMat       = LoadMaterial("HoloAlpha");
            blurMat            = LoadMaterial("Blur");
            occlusionMaskMat   = LoadMaterial("OcclusionMask");
            extractAlphaMat    = LoadMaterial("ExtractAlpha");
            ignoreAlphaMat     = LoadMaterial("IgnoreAlpha");
            quadViewMat        = LoadMaterial("QuadView");
            alphaBlendMat      = LoadMaterial("AlphaBlend");
            textureClearMat    = LoadMaterial("TextureClear");
            colorCorrectionMat = LoadMaterial("ColorCorrection");

            videoFeedColorCorrection = ColorCorrection.GetColorCorrection(VideoFeedColorCorrectionPlayerPrefName);
            blurSize      = PlayerPrefs.GetFloat($"{nameof(TextureManager)}.{nameof(blurSize)}", 5);
            numBlurPasses = PlayerPrefs.GetInt($"{nameof(TextureManager)}.{nameof(numBlurPasses)}", 1);

            SetHologramShaderAlpha(Compositor.DefaultAlpha);

            CreateColorTexture();

            if (Compositor.OcclusionMode == OcclusionSetting.RawDepthCamera)
            {
                CreateDepthCameraTexture();
            }
            else if (Compositor.OcclusionMode == OcclusionSetting.BodyTracking)
            {
                CreateDepthCameraTexture();
                CreateBodyDepthTexture();
            }

            CreateOutputTextures();

            SetupCameraAndRenderTextures();

            SetShaderValues();

            SetOutputTextures();
        }
예제 #13
0
 private void CreateDepthCameraTexture()
 {
     if (depthTexture == null)
     {
         IntPtr depthSRV;
         if (UnityCompositorInterface.CreateUnityDepthCameraTexture(out depthSRV))
         {
             depthTexture            = Texture2D.CreateExternalTexture(frameWidth, frameHeight, TextureFormat.R16, false, false, depthSRV);
             depthTexture.filterMode = FilterMode.Point;
             depthTexture.anisoLevel = 0;
         }
     }
 }
예제 #14
0
 private void CreateBodyDepthTexture()
 {
     if (bodyMaskTexture == null)
     {
         IntPtr bodySRV;
         if (UnityCompositorInterface.CreateUnityBodyMaskTexture(out bodySRV))
         {
             bodyMaskTexture            = Texture2D.CreateExternalTexture(frameWidth, frameHeight, TextureFormat.R16, false, false, bodySRV);
             bodyMaskTexture.filterMode = FilterMode.Point;
             bodyMaskTexture.anisoLevel = 0;
         }
     }
 }
예제 #15
0
 /// <summary>
 /// Create External texture resources and poll for latest Color frame.
 /// </summary>
 private void CreateColorTexture()
 {
     if (colorTexture == null)
     {
         IntPtr colorSRV;
         if (UnityCompositorInterface.CreateUnityColorTexture(out colorSRV))
         {
             colorTexture            = Texture2D.CreateExternalTexture(frameWidth, frameHeight, TextureFormat.ARGB32, false, false, colorSRV);
             colorTexture.filterMode = FilterMode.Point;
             colorTexture.anisoLevel = 0;
         }
     }
 }
예제 #16
0
 private void CreateQuadrantTexture()
 {
     if (quadViewOutputTexture == null)
     {
         // The output texture should always specify Linear read/write so that color space conversions are not performed when recording
         // the video when using Linear rendering in Unity.
         quadViewOutputTexture              = new RenderTexture(UnityCompositorInterface.GetVideoRecordingFrameWidth(VideoRecordingFrameLayout.Quad), UnityCompositorInterface.GetVideoRecordingFrameHeight(VideoRecordingFrameLayout.Quad), 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear);
         quadViewOutputTexture.filterMode   = FilterMode.Point;
         quadViewOutputTexture.anisoLevel   = 0;
         quadViewOutputTexture.antiAliasing = 1;
         quadViewOutputTexture.depth        = 0;
         quadViewOutputTexture.useMipMap    = false;
     }
 }
예제 #17
0
        private void OnPostRender()
        {
            displayOutputTexture.DiscardContents();

            RenderTexture sourceTexture = spectatorViewCamera.targetTexture;

            if (supersampleBuffers.Length > 0)
            {
                for (int i = supersampleBuffers.Length - 1; i >= 0; i--)
                {
                    Graphics.Blit(sourceTexture, supersampleBuffers[i], downsampleMats[i]);

                    sourceTexture = supersampleBuffers[i];
                }
            }

            // force set this every frame as it sometimes get unset somehow when alt-tabbing
            renderTexture = sourceTexture;
            holoAlphaMat.SetTexture("_FrontTex", renderTexture);
            Graphics.Blit(sourceTexture, compositeTexture, holoAlphaMat);

            Graphics.Blit(compositeTexture, displayOutputTexture, outputYUV ? RGBToYUVMat : RGBToBGRMat);

            Graphics.Blit(renderTexture, alphaTexture, extractAlphaMat);

            // Video texture.
            if (UnityCompositorInterface.IsRecording())
            {
                videoOutputTexture.DiscardContents();
                // convert composite to the format expected by our video encoder (NV12 or BGR)
                Graphics.Blit(compositeTexture, videoOutputTexture, hardwareEncodeVideo ? NV12VideoMat : BGRVideoMat);
            }

            TextureRenderCompleted?.Invoke();

            // push the texture to the compositor plugin and pull the next real world camera texture

            // Issue a plugin event with arbitrary integer identifier.
            // The plugin can distinguish between different
            // things it needs to do based on this ID.
            // For our simple plugin, it does not matter which ID we pass here.
            GL.IssuePluginEvent(renderEvent, 1);
        }
        // This function is not/not always called on the main thread.
        private void OnAudioFilterRead(float[] data, int channels)
        {
            if (!UnityCompositorInterface.IsRecording())
            {
                return;
            }

            //Create new stream
            if (audioMemoryStream == null)
            {
                audioMemoryStream = new MemoryStream();
                audioStreamWriter = new BinaryWriter(audioMemoryStream);
                double audioSettingsTime = AudioSettings.dspTime;                                                                                      // Audio time in seconds, more accurate than Time.time
                double captureFrameTime  = UnityCompositorInterface.GetCaptureFrameIndex() * UnityCompositorInterface.GetColorDuration() / 10000000.0; // Capture Frame Time in seconds
                DebugLog($"Obtained Audio Sample, AudioSettingsTime:{audioSettingsTime}, CaptureFrameTime:{captureFrameTime}");
                audioStartTime       = captureFrameTime;
                numCachedAudioFrames = 0;
            }

            //Put data into stream
            for (int i = 0; i < data.Length; i++)
            {
                // Rescale float to short range for encoding.
                short audioEntry = (short)(data[i] * short.MaxValue);
                audioStreamWriter.Write(audioEntry);
            }

            numCachedAudioFrames++;

            //Send to compositor (buffer a few calls to reduce potential timing errors between packages)
            if (numCachedAudioFrames >= MAX_NUM_CACHED_AUDIO_FRAMES)
            {
                audioStreamWriter.Flush();
                byte[] outBytes = audioMemoryStream.ToArray();
                audioMemoryStream = null;

                // The Unity compositor assumes that the audioStartTime will be in capture frame sample time.
                // Above we default to capture frame time compared to AudioSettings.dspTime.
                // Any interpolation between these two time sources needs to be done in the editor before handing sample time values to the compositor.
                UnityCompositorInterface.SetAudioData(outBytes, outBytes.Length, audioStartTime);
            }
        }
예제 #19
0
        public void InitializeVideoRecordingTextures()
        {
            var videoRecordingFrameWidth  = Compositor.VideoRecordingFrameWidth;
            var videoRecordingFrameHeight = Compositor.VideoRecordingFrameHeight;

            NV12VideoMat.SetFloat("_Width", videoRecordingFrameWidth);
            NV12VideoMat.SetFloat("_Height", videoRecordingFrameHeight);

            // The output texture should always specify Linear read/write so that color space conversions are not performed when recording
            // the video when using Linear rendering in Unity.
            videoOutputTexture              = new RenderTexture(videoRecordingFrameWidth, videoRecordingFrameHeight, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.Linear);
            videoOutputTexture.filterMode   = FilterMode.Point;
            videoOutputTexture.anisoLevel   = 0;
            videoOutputTexture.antiAliasing = 1;
            videoOutputTexture.depth        = 0;
            videoOutputTexture.useMipMap    = false;

            // hack, this forces the nativetexturepointer to be assigned inside the engine
            videoOutputTexture.colorBuffer.ToString();

            UnityCompositorInterface.SetVideoRenderTexture(videoOutputTexture.GetNativeTexturePtr());
        }
        private void SendCalibrationData()
        {
            if (UnityCompositorInterface.IsCameraCalibrationInformationAvailable())
            {
                UnityCompositorInterface.GetCameraCalibrationInformation(out CompositorCameraIntrinsics compositorIntrinsics);
                CalculatedCameraCalibration calibration = new CalculatedCameraCalibration(compositorIntrinsics.AsCalculatedCameraIntrinsics(), new CalculatedCameraExtrinsics());
                byte[] serializedCalibration            = calibration.Serialize();

                using (MemoryStream memoryStream = new MemoryStream())
                    using (BinaryWriter message = new BinaryWriter(memoryStream))
                    {
                        message.Write("CalibrationData");
                        message.Write(serializedCalibration.Length);
                        message.Write(serializedCalibration);
                        memoryStream.TryGetBuffer(out var buffer);
                        networkManager.Broadcast(buffer.Array, buffer.Offset, buffer.Count);
                    }
            }
            else
            {
                Debug.LogError($"Expected that calibration data should be available when the {nameof(StationaryCameraCalibrationDataProvider)} component is enabled, but calibration data was not available");
            }
        }
 public void StopRecording()
 {
     StopRecordingAudio();
     UnityCompositorInterface.StopRecording();
 }
 public void StartRecording()
 {
     TextureManager.InitializeVideoRecordingTextures();
     UnityCompositorInterface.StartRecording((int)VideoRecordingLayout);
 }
 public bool IsRecording()
 {
     return(UnityCompositorInterface.IsRecording());
 }
 public void TakePicture()
 {
     UnityCompositorInterface.TakePicture();
 }
        private void Update()
        {
#if UNITY_EDITOR
            UpdateStatsElement(framerateStatistics, 1.0f / Time.deltaTime);

            int captureFrameIndex = UnityCompositorInterface.GetCaptureFrameIndex();

            int prevCompositeFrame = CurrentCompositeFrame;

            //Set our current frame towards the latest captured frame. Dont get too close to it, and dont fall too far behind
            int step = (captureFrameIndex - CurrentCompositeFrame);
            if (step < 8)
            {
                step = 0;
            }
            else if (step > 16)
            {
                step -= 16;
            }
            else
            {
                step = 1;
            }
            CurrentCompositeFrame += step;

            UnityCompositorInterface.SetCompositeFrameIndex(CurrentCompositeFrame);

            #region Spectator View Transform
            if (IsCalibrationDataLoaded && transform.parent != null)
            {
                //Update time syncronizer
                {
                    float captureTime = GetTimeFromFrame(captureFrameIndex);

                    SpectatorViewPoseCache.PoseData poseData = poseCache.GetLatestPose();
                    if (poseData != null)
                    {
                        timeSynchronizer.Update(UnityCompositorInterface.GetCaptureFrameIndex(), captureTime, poseData.Index, poseData.TimeStamp);
                    }
                }

                if (overrideCameraPose)
                {
                    transform.parent.localPosition = overrideCameraPosition;
                    transform.parent.localRotation = overrideCameraRotation;
                }
                else
                {
                    //Set camera transform for the currently composited frame
                    float cameraTime = GetTimeFromFrame(prevCompositeFrame);
                    float poseTime   = timeSynchronizer.GetPoseTimeFromCameraTime(cameraTime);

                    Quaternion currRot;
                    Vector3    currPos;
                    poseTime += VideoTimestampToHolographicTimestampOffset;
                    if (captureFrameIndex <= 0) //No frames captured yet, lets use the very latest camera transform
                    {
                        poseTime = float.MaxValue;
                    }
                    poseCache.GetPose(poseTime, out currPos, out currRot);

                    transform.parent.localPosition = currPos;
                    transform.parent.localRotation = currRot;
                }
            }

            #endregion

            if (!isVideoFrameProviderInitialized)
            {
                isVideoFrameProviderInitialized = UnityCompositorInterface.InitializeFrameProviderOnDevice(CaptureDevice);
                if (isVideoFrameProviderInitialized)
                {
                    CurrentCompositeFrame = 0;
                    timeSynchronizer.Reset();
                    poseCache.Reset();
                }
            }

            UnityCompositorInterface.UpdateCompositor();
#endif
        }
        private void Update()
        {
#if UNITY_EDITOR
            UpdateStatsElement(framerateStatistics, 1.0f / Time.deltaTime);

            int captureFrameIndex = UnityCompositorInterface.GetCaptureFrameIndex();

            int prevCompositeFrame = CurrentCompositeFrame;

            //Set our current frame towards the latest captured frame. Dont get too close to it, and dont fall too far behind
            int step = (captureFrameIndex - CurrentCompositeFrame);
            if (step < 8)
            {
                step = 0;
            }
            else if (step > 16)
            {
                step -= 16;
            }
            else
            {
                step = 1;
            }
            CurrentCompositeFrame += step;

            UnityCompositorInterface.SetCompositeFrameIndex(CurrentCompositeFrame);

            #region Spectator View Transform
            if (IsCalibrationDataLoaded && transform.parent != null)
            {
                //Update time syncronizer
                {
                    float captureTime = GetTimeFromFrame(captureFrameIndex);

                    SpectatorViewPoseCache.PoseData poseData = poseCache.GetLatestPose();
                    if (poseData != null)
                    {
                        timeSynchronizer.Update(UnityCompositorInterface.GetCaptureFrameIndex(), captureTime, poseData.Index, poseData.TimeStamp);
                    }
                }

                if (overrideCameraPose)
                {
                    transform.parent.localPosition = overrideCameraPosition;
                    transform.parent.localRotation = overrideCameraRotation;
                }
                else
                {
                    //Set camera transform for the currently composited frame
                    float cameraTime = GetTimeFromFrame(prevCompositeFrame);
                    float poseTime   = timeSynchronizer.GetPoseTimeFromCameraTime(cameraTime);

                    Quaternion currRot;
                    Vector3    currPos;
                    poseTime += VideoTimestampToHolographicTimestampOffset;
                    if (captureFrameIndex <= 0) //No frames captured yet, lets use the very latest camera transform
                    {
                        poseTime = float.MaxValue;
                    }
                    poseCache.GetPose(poseTime, out currPos, out currRot);

                    transform.parent.localPosition = currPos;
                    transform.parent.localRotation = currRot;
                }
            }

            #endregion

            if (!isVideoFrameProviderInitialized)
            {
                if (UnityCompositorInterface.IsFrameProviderSupported(CaptureDevice) && UnityCompositorInterface.IsOutputFrameProviderSupported(OutputDevice))
                {
                    isVideoFrameProviderInitialized = UnityCompositorInterface.InitializeFrameProviderOnDevice(CaptureDevice, OutputDevice);
                    if (isVideoFrameProviderInitialized)
                    {
                        CurrentCompositeFrame = 0;
                        timeSynchronizer.Reset();
                        poseCache.Reset();

                        if (UnityCompositorInterface.IsCameraCalibrationInformationAvailable())
                        {
                            stationaryCameraBroadcaster.SetActive(true);
                            HolographicCameraObserver.Instance.ConnectTo("127.0.0.1");
                        }
                    }
                }
                else
                {
                    Debug.LogWarning($"The current device selection, Capture: {CaptureDevice}, Output: {OutputDevice}, is not supported by your build of SpectatorView.Compositor.UnityPlugin.dll.");
                }
            }

            UnityCompositorInterface.UpdateCompositor();
#endif
        }
 /// <summary>
 /// Gets the number of composited frames ready to be output by the video output buffer.
 /// </summary>
 public int GetQueuedOutputFrameCount()
 {
     return(UnityCompositorInterface.GetNumQueuedOutputFrames());
 }
 /// <summary>
 /// Gets the height of the video frame.
 /// </summary>
 /// <returns>The height of the video frame, in pixels.</returns>
 public static int GetVideoFrameHeight()
 {
     return(UnityCompositorInterface.GetFrameHeight());
 }
 /// <summary>
 /// Gets the time duration of a single video frame.
 /// </summary>
 /// <returns>The time duration of a single video frame, in seconds.</returns>
 private float GetVideoFrameDuration()
 {
     return(0.0001f * UnityCompositorInterface.GetColorDuration() / 1000);
 }
 public void StartRecording()
 {
     UnityCompositorInterface.StartRecording();
 }