private void OnAudioFilterRead(float[] data, int channels)
        {
            if (!UnityCompositorInterface.IsRecording())
            {
                return;
            }

            //Create new stream
            if (audioMemoryStream == null)
            {
                audioMemoryStream    = new MemoryStream();
                audioStreamWriter    = new BinaryWriter(audioMemoryStream);
                audioStartTime       = AudioSettings.dspTime;
                numCachedAudioFrames = 0;
            }

            //Put data into stream
            for (int i = 0; i < data.Length; i++)
            {
                // Rescale float to short range for encoding.
                short audioEntry = (short)(data[i] * short.MaxValue);
                audioStreamWriter.Write(audioEntry);
            }

            numCachedAudioFrames++;

            //Send to compositor (buffer a few calls to reduce potential timing errors between packages)
            if (numCachedAudioFrames >= MAX_NUM_CACHED_AUDIO_FRAMES)
            {
                audioStreamWriter.Flush();
                byte[] outBytes = audioMemoryStream.ToArray();
                audioMemoryStream = null;
                UnityCompositorInterface.SetAudioData(outBytes, outBytes.Length, audioStartTime);
            }
        }
        private void ResetCompositor()
        {
            Debug.Log("Stopping the video composition system.");
            UnityCompositorInterface.Reset();

            UnityCompositorInterface.StopFrameProvider();
            if (UnityCompositorInterface.IsRecording())
            {
                UnityCompositorInterface.StopRecording();
            }
        }
예제 #3
0
        private void OnPostRender()
        {
            displayOutputTexture.DiscardContents();

            RenderTexture sourceTexture = spectatorViewCamera.targetTexture;

            if (supersampleBuffers.Length > 0)
            {
                for (int i = supersampleBuffers.Length - 1; i >= 0; i--)
                {
                    Graphics.Blit(sourceTexture, supersampleBuffers[i], downsampleMats[i]);

                    sourceTexture = supersampleBuffers[i];
                }
            }

            // force set this every frame as it sometimes get unset somehow when alt-tabbing
            renderTexture = sourceTexture;
            holoAlphaMat.SetTexture("_FrontTex", renderTexture);
            Graphics.Blit(sourceTexture, compositeTexture, holoAlphaMat);

            Graphics.Blit(compositeTexture, displayOutputTexture, outputYUV ? RGBToYUVMat : RGBToBGRMat);

            Graphics.Blit(renderTexture, alphaTexture, extractAlphaMat);

            // Video texture.
            if (UnityCompositorInterface.IsRecording())
            {
                videoOutputTexture.DiscardContents();
                // convert composite to the format expected by our video encoder (NV12 or BGR)
                Graphics.Blit(compositeTexture, videoOutputTexture, hardwareEncodeVideo ? NV12VideoMat : BGRVideoMat);
            }

            TextureRenderCompleted?.Invoke();

            // push the texture to the compositor plugin and pull the next real world camera texture

            // Issue a plugin event with arbitrary integer identifier.
            // The plugin can distinguish between different
            // things it needs to do based on this ID.
            // For our simple plugin, it does not matter which ID we pass here.
            GL.IssuePluginEvent(renderEvent, 1);
        }
        // This function is not/not always called on the main thread.
        private void OnAudioFilterRead(float[] data, int channels)
        {
            if (!UnityCompositorInterface.IsRecording())
            {
                return;
            }

            //Create new stream
            if (audioMemoryStream == null)
            {
                audioMemoryStream = new MemoryStream();
                audioStreamWriter = new BinaryWriter(audioMemoryStream);
                double audioSettingsTime = AudioSettings.dspTime;                                                                                      // Audio time in seconds, more accurate than Time.time
                double captureFrameTime  = UnityCompositorInterface.GetCaptureFrameIndex() * UnityCompositorInterface.GetColorDuration() / 10000000.0; // Capture Frame Time in seconds
                DebugLog($"Obtained Audio Sample, AudioSettingsTime:{audioSettingsTime}, CaptureFrameTime:{captureFrameTime}");
                audioStartTime       = captureFrameTime;
                numCachedAudioFrames = 0;
            }

            //Put data into stream
            for (int i = 0; i < data.Length; i++)
            {
                // Rescale float to short range for encoding.
                short audioEntry = (short)(data[i] * short.MaxValue);
                audioStreamWriter.Write(audioEntry);
            }

            numCachedAudioFrames++;

            //Send to compositor (buffer a few calls to reduce potential timing errors between packages)
            if (numCachedAudioFrames >= MAX_NUM_CACHED_AUDIO_FRAMES)
            {
                audioStreamWriter.Flush();
                byte[] outBytes = audioMemoryStream.ToArray();
                audioMemoryStream = null;

                // The Unity compositor assumes that the audioStartTime will be in capture frame sample time.
                // Above we default to capture frame time compared to AudioSettings.dspTime.
                // Any interpolation between these two time sources needs to be done in the editor before handing sample time values to the compositor.
                UnityCompositorInterface.SetAudioData(outBytes, outBytes.Length, audioStartTime);
            }
        }
 public bool IsRecording()
 {
     return(UnityCompositorInterface.IsRecording());
 }
예제 #6
0
        private IEnumerator OnPostRender()
        {
            RenderTexture sourceTexture = spectatorViewCamera.targetTexture;

            // Capture the depth mask before calling WaitForEndOfFrame to make sure that post processing effects
            // haven't changed the depth buffer.
            if (IsOcclusionMaskNeededForPreviewing ||
                !IsVideoRecordingQuadrantMode)
            {
                occlusionMaskMat.SetTexture("_DepthTexture", depthTexture);
                occlusionMaskMat.SetTexture("_BodyMaskTexture", bodyMaskTexture);
                Graphics.Blit(sourceTexture, occlusionMaskTexture, occlusionMaskMat);

                blurMat.SetFloat("_BlurSize", blurSize);
                for (int i = 0; i < numBlurPasses || i < 1; i++)
                {
                    var source = i % 2 == 0 ? occlusionMaskTexture : blurOcclusionTexture;
                    var dest   = i % 2 == 0 ? blurOcclusionTexture : occlusionMaskTexture;
                    blurMat.SetTexture("_MaskTexture", source);
                    Graphics.Blit(source, dest, blurMat);
                }

                if (numBlurPasses % 2 == 0)
                {
                    Graphics.Blit(occlusionMaskTexture, blurOcclusionTexture);
                }
            }

            yield return(new WaitForEndOfFrame());

            displayOutputTexture.DiscardContents();

            if (supersampleBuffers.Length > 0)
            {
                for (int i = supersampleBuffers.Length - 1; i >= 0; i--)
                {
                    Graphics.Blit(sourceTexture, supersampleBuffers[i], downsampleMats[i]);

                    sourceTexture = supersampleBuffers[i];
                }
            }

            // force set this every frame as it sometimes get unset somehow when alt-tabbing
            renderTexture = sourceTexture;

            if (IsVideoRecordingQuadrantMode)
            {
                // Composite hologram onto video for recording quadrant mode video, or for previewing
                // that quadrant-mode video on screen.
                BlitCompositeTexture(renderTexture, colorRGBTexture, compositeTexture);
            }
            else
            {
                // Render the real-world video back onto the composited frame to reduce the opacity
                // of the hologram by the appropriate amount.
                holoAlphaMat.SetTexture("_FrontTex", renderTexture);
                holoAlphaMat.SetTexture("_OcclusionTexture", blurOcclusionTexture);
                Graphics.Blit(sourceTexture, compositeTexture, holoAlphaMat);
            }

            // If an output texture override has been specified, use it instead of the composited texture
            Texture outputTexture = (overrideOutputTexture == null) ? compositeTexture : overrideOutputTexture;

            Graphics.Blit(outputTexture, displayOutputTexture, outputYUV ? RGBToYUVMat : RGBToBGRMat);

            Graphics.Blit(renderTexture, alphaTexture, extractAlphaMat);

            if (ShouldProduceQuadrantVideoFrame)
            {
                CreateQuadrantTexture();
                BlitQuadView(renderTexture, alphaTexture, colorRGBTexture, outputTexture, quadViewOutputTexture);
            }

            // Video texture.
            if (UnityCompositorInterface.IsRecording())
            {
                videoOutputTexture.DiscardContents();

                Texture videoSourceTexture;
                if (IsVideoRecordingQuadrantMode)
                {
                    videoSourceTexture = quadViewOutputTexture;
                }
                else
                {
                    videoSourceTexture = outputTexture;
                }

                // convert composite to the format expected by our video encoder (NV12 or BGR)
                Graphics.Blit(videoSourceTexture, videoOutputTexture, hardwareEncodeVideo ? NV12VideoMat : BGRVideoMat);
            }

            TextureRenderCompleted?.Invoke();

            // push the texture to the compositor plugin and pull the next real world camera texture

            // Issue a plugin event with arbitrary integer identifier.
            // The plugin can distinguish between different
            // things it needs to do based on this ID.
            // For our simple plugin, it does not matter which ID we pass here.
            GL.IssuePluginEvent(renderEvent, 1);
        }
예제 #7
0
        private void OnPostRender()
        {
            displayOutputTexture.DiscardContents();

            RenderTexture sourceTexture = spectatorViewCamera.targetTexture;

            if (supersampleBuffers.Length > 0)
            {
                for (int i = supersampleBuffers.Length - 1; i >= 0; i--)
                {
                    Graphics.Blit(sourceTexture, supersampleBuffers[i], downsampleMats[i]);

                    sourceTexture = supersampleBuffers[i];
                }
            }

            // force set this every frame as it sometimes get unset somehow when alt-tabbing
            renderTexture = sourceTexture;

            if (IsVideoRecordingQuadrantMode)
            {
                // Composite hologram onto video for recording quadrant mode video, or for previewing
                // that quadrant-mode video on screen.
                BlitCompositeTexture(renderTexture, colorRGBTexture, compositeTexture);
            }
            else
            {
                // Render the real-world video back onto the composited frame to reduce the opacity
                // of the hologram by the appropriate amount.
                holoAlphaMat.SetTexture("_FrontTex", renderTexture);
                Graphics.Blit(sourceTexture, compositeTexture, holoAlphaMat);
            }

            // If an output texture override has been specified, use it instead of the composited texture
            Texture outputTexture = (overrideOutputTexture == null) ? compositeTexture : overrideOutputTexture;

            Graphics.Blit(outputTexture, displayOutputTexture, outputYUV ? RGBToYUVMat : RGBToBGRMat);

            Graphics.Blit(renderTexture, alphaTexture, extractAlphaMat);

            if (ShouldProduceQuadrantVideoFrame)
            {
                CreateQuadrantTexture();
                BlitQuadView(renderTexture, alphaTexture, colorRGBTexture, outputTexture, quadViewOutputTexture);
            }

            // Video texture.
            if (UnityCompositorInterface.IsRecording())
            {
                videoOutputTexture.DiscardContents();

                Texture videoSourceTexture;
                if (IsVideoRecordingQuadrantMode)
                {
                    videoSourceTexture = quadViewOutputTexture;
                }
                else
                {
                    videoSourceTexture = outputTexture;
                }

                // convert composite to the format expected by our video encoder (NV12 or BGR)
                Graphics.Blit(videoSourceTexture, videoOutputTexture, hardwareEncodeVideo ? NV12VideoMat : BGRVideoMat);
            }

            TextureRenderCompleted?.Invoke();

            // push the texture to the compositor plugin and pull the next real world camera texture

            // Issue a plugin event with arbitrary integer identifier.
            // The plugin can distinguish between different
            // things it needs to do based on this ID.
            // For our simple plugin, it does not matter which ID we pass here.
            GL.IssuePluginEvent(renderEvent, 1);
        }