Ejemplo n.º 1
0
		public void Play_WhileRecording_Throws()
		{
			using (var session = new RecordingSession())
			{
				Assert.Throws<ApplicationException>(() => session.Recorder.Play());
			}
		}
Ejemplo n.º 2
0
 public void CanRecord_WhileRecording_False()
 {
     using (var session = new RecordingSession())
        {
        Assert.IsFalse(session.Recorder.CanRecord);
        }
 }
Ejemplo n.º 3
0
 public void CanPlay_WhilePlaying_False()
 {
     using (var session = new RecordingSession(1000))
        {
        session.Recorder.Play();
        Thread.Sleep(100);
        Assert.IsFalse(session.Recorder.CanPlay);
        }
 }
Ejemplo n.º 4
0
		public void StopPlaying_WhilePlaying_Ok()
		{
			using (var session = new RecordingSession(1000))
			{
				session.Recorder.Play();
				Thread.Sleep(100);
				session.Recorder.StopPlaying();
			}
		}
Ejemplo n.º 5
0
		public void Record_AfterRecordThenStop_Ok()
		{
			using (var session = new RecordingSession(100))
			{
				session.Recorder.StartRecording();
				Thread.Sleep(100);
				session.Recorder.StopRecordingAndSaveAsWav();
			}
		}
Ejemplo n.º 6
0
        protected override void BeginRecording(RecordingSession session)
        {
            base.BeginRecording(session);

            OutputRenderTexture = new RenderTexture(
                InputSettings.OutputWidth,
                InputSettings.OutputHeight,
                0,
                RenderTextureFormat.ARGB32,
                RenderTextureReadWrite.Default);
        }
Ejemplo n.º 7
0
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            m_Settings.m_DestinationPath.CreateDirectory();

            return(true);
        }
Ejemplo n.º 8
0
        protected internal override void NewFrameReady(RecordingSession session)
        {
            Profiler.BeginSample("GameViewInput.NewFrameReady");
#if UNITY_2019_1_OR_NEWER
            ScreenCapture.CaptureScreenshotIntoRenderTexture(m_CaptureTexture);
            m_VFlipper?.Flip(m_CaptureTexture);
#else
            ReadbackTexture = ScreenCapture.CaptureScreenshotAsTexture();
#endif
            Profiler.EndSample();
        }
Ejemplo n.º 9
0
        public override void BeginRecording(RecordingSession session)
        {
            if (cbSettings.m_SourceRTxtr == null)
            {
                throw new Exception("No Render Texture object provided as source");
            }

            outputHeight = cbSettings.m_SourceRTxtr.height;
            outputWidth  = cbSettings.m_SourceRTxtr.width;
            outputRT     = cbSettings.m_SourceRTxtr;
        }
Ejemplo n.º 10
0
        protected internal override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            Settings.fileNameGenerator.CreateDirectory(session);

            return(true);
        }
Ejemplo n.º 11
0
        protected internal override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            Settings.fileNameGenerator.CreateDirectory(session);

            m_ctx = fcAPI.fcExrCreateContext(ref Settings.m_ExrEncoderSettings);
            return(m_ctx);
        }
Ejemplo n.º 12
0
 protected internal override void EndRecording(RecordingSession session)
 {
     base.EndRecording(session);
     if (m_ColorRT != null)
     {
         UnityHelpers.Destroy(m_ColorRT);
     }
     if (m_TempRT != null)
     {
         UnityHelpers.Destroy(m_TempRT);
     }
 }
        public override void BeginRecording(RecordingSession session)
        {
            int screenWidth  = Screen.width;
            int screenHeight = Screen.height;

#if UNITY_EDITOR
            switch (scSettings.m_OutputSize)
            {
            case EImageDimension.Window:
            {
                GameViewSize.GetGameRenderSize(out screenWidth, out screenHeight);
                outputWidth  = screenWidth;
                outputHeight = screenHeight;

                if (scSettings.m_ForceEvenSize)
                {
                    outputWidth  = (outputWidth + 1) & ~1;
                    outputHeight = (outputHeight + 1) & ~1;
                }
                break;
            }

            default:
            {
                outputHeight = (int)scSettings.m_OutputSize;
                outputWidth  = (int)(outputHeight * AspectRatioHelper.GetRealAR(scSettings.m_AspectRatio));

                if (scSettings.m_ForceEvenSize)
                {
                    outputWidth  = (outputWidth + 1) & ~1;
                    outputHeight = (outputHeight + 1) & ~1;
                }

                var size = GameViewSize.SetCustomSize(outputWidth, outputHeight) ?? GameViewSize.AddSize(outputWidth, outputHeight);
                if (GameViewSize.m_ModifiedResolutionCount == 0)
                {
                    GameViewSize.BackupCurrentSize();
                }
                else
                {
                    if (size != GameViewSize.currentSize)
                    {
                        Debug.LogError("Requestion a resultion change while a recorder's input has already requested one! Undefined behaviour.");
                    }
                }
                GameViewSize.m_ModifiedResolutionCount++;
                m_ModifiedResolution = true;
                GameViewSize.SelectSize(size);
                break;
            }
            }
#endif
        }
Ejemplo n.º 14
0
        protected internal override void EndRecording(RecordingSession session)
        {
            base.EndRecording(session);

            if (workTexture != null)
            {
                UnityHelpers.Destroy(workTexture);
                workTexture = null;
            }

            NeedToFlipVertically = null; // This variable is not valid anymore
        }
Ejemplo n.º 15
0
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            m_Settings.m_DestinationPath.CreateDirectory();

            m_ctx = fcAPI.fcExrCreateContext(ref m_Settings.m_ExrEncoderSettings);
            return(m_ctx);
        }
Ejemplo n.º 16
0
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            m_Settings.fileNameGenerator.CreateDirectory(session);

            m_ctx = fcAPI.fcPngCreateContext(ref m_Settings.m_PngEncoderSettings);
            return(m_ctx);
        }
Ejemplo n.º 17
0
        public override void BeginRecording(RecordingSession session)
        {
            superShader      = Shader.Find("Hidden/Volund/BS4SuperShader");
            accumulateShader = Shader.Find("Hidden/BeautyShot/Accumulate");
            normalizeShader  = Shader.Find("Hidden/BeautyShot/Normalize");

            // Below here is considered 'void Start()', but we run it for directly "various reasons".
            if (adamSettings.m_FinalSize > adamSettings.m_RenderSize)
            {
                throw new UnityException("Upscaling is not supported! Output dimension must be smaller or equal to render dimension.");
            }

            // Calculate aspect and render/output sizes
            // Clamp size to 16K, which is the min always supported size in d3d11
            // Force output to divisible by two as x264 doesn't approve of odd image dimensions.
            var aspect = AspectRatioHelper.GetRealAR(adamSettings.m_AspectRatio);

            m_renderHeight = (int)adamSettings.m_RenderSize;
            m_renderWidth  = Mathf.Min(16 * 1024, Mathf.RoundToInt(m_renderHeight * aspect));
            m_outputHeight = (int)adamSettings.m_FinalSize;
            m_outputWidth  = Mathf.Min(16 * 1024, Mathf.RoundToInt(m_outputHeight * aspect));
            if (adamSettings.m_ForceEvenSize)
            {
                m_outputWidth  = (m_outputWidth + 1) & ~1;
                m_outputHeight = (m_outputHeight + 1) & ~1;
            }

            m_superMaterial           = new Material(superShader);
            m_superMaterial.hideFlags = HideFlags.DontSave;

            m_accumulateMaterial           = new Material(accumulateShader);
            m_accumulateMaterial.hideFlags = HideFlags.DontSave;

            m_normalizeMaterial           = new Material(normalizeShader);
            m_normalizeMaterial.hideFlags = HideFlags.DontSave;

            m_renderRT          = new RenderTexture(m_renderWidth, m_renderHeight, 24, RenderTextureFormat.DefaultHDR, RenderTextureReadWrite.Linear);
            m_renderRT.wrapMode = TextureWrapMode.Clamp;
            for (int i = 0; i < 2; ++i)
            {
                m_accumulateRTs[i]          = new RenderTexture(m_renderWidth, m_renderHeight, 0, RenderTextureFormat.DefaultHDR, RenderTextureReadWrite.Linear);
                m_accumulateRTs[i].wrapMode = TextureWrapMode.Clamp;
                m_accumulateRTs[i].Create();
            }
            var rt = new RenderTexture(m_outputWidth, m_outputHeight, 0, RenderTextureFormat.Default, RenderTextureReadWrite.sRGB);

            rt.Create();
            outputRT  = rt;
            m_samples = new Vector2[(int)adamSettings.m_SuperSampling];
            GenerateSamplesMSAA(m_samples, adamSettings.m_SuperSampling);

            m_hookedCameras = new List <HookedCamera>();
        }
Ejemplo n.º 18
0
        private Material _matSRGBConversion = null; // a shader for doing linear to sRGB conversion

        protected internal override void BeginRecording(RecordingSession session)
        {
            if (cbSettings.renderTexture == null)
            {
                return; // error will have been triggered in RenderTextureInputSettings.CheckForErrors()
            }
            OutputHeight        = cbSettings.OutputHeight;
            OutputWidth         = cbSettings.OutputWidth;
            OutputRenderTexture = cbSettings.renderTexture;

            var encoderAlreadyFlips = session.settings.EncoderAlreadyFlips();

            NeedToFlipVertically = UnityHelpers.NeedToActuallyFlip(cbSettings.FlipFinalOutput, this, encoderAlreadyFlips);

            var requiredColorSpace = ImageRecorderSettings.ColorSpaceType.sRGB_sRGB;

            if (session.settings is ImageRecorderSettings)
            {
                requiredColorSpace = ((ImageRecorderSettings)session.settings).OutputColorSpaceComputed;
            }
            else if (session.settings is MovieRecorderSettings)
            {
                requiredColorSpace = ImageRecorderSettings.ColorSpaceType.sRGB_sRGB;                               // always sRGB
            }
            var renderTextureColorSpace = UnityHelpers.GetColorSpaceType(cbSettings.renderTexture.graphicsFormat); // the color space of the RenderTexture
            var projectColorSpace       = PlayerSettings.colorSpace;

            // Log warnings in unsupported contexts
            if (projectColorSpace == ColorSpace.Gamma)
            {
                if (requiredColorSpace == ImageRecorderSettings.ColorSpaceType.Unclamped_linear_sRGB)
                {
                    Debug.LogWarning($"Gamma color space does not support linear output format. This operation is not supported.");
                }

                if (renderTextureColorSpace != ImageRecorderSettings.ColorSpaceType.Unclamped_linear_sRGB)
                {
                    Debug.LogWarning($"Gamma color space does not support non-linear textures. This operation is not supported.");
                }
            }

            // We convert from linear to sRGB if the project is linear + the source RT is linear + the output color space is sRGB
            m_needToConvertLinearToSRGB = (projectColorSpace == ColorSpace.Linear && renderTextureColorSpace == ImageRecorderSettings.ColorSpaceType.Unclamped_linear_sRGB) && requiredColorSpace == ImageRecorderSettings.ColorSpaceType.sRGB_sRGB;

            // We convert from sRGB to linear if the RT is sRGB (gamma) and the output color space is linear (e.g., linear EXR)
            m_needToConvertSRGBToLinear = renderTextureColorSpace == ImageRecorderSettings.ColorSpaceType.sRGB_sRGB && requiredColorSpace == ImageRecorderSettings.ColorSpaceType.Unclamped_linear_sRGB;

            if (NeedToFlipVertically.Value || m_needToConvertLinearToSRGB || m_needToConvertSRGBToLinear)
            {
                workTexture      = new RenderTexture(OutputRenderTexture);
                workTexture.name = "RenderTextureInput_intermediate";
            }
        }
        public void CanPlaybackFromMultipleRecordingSessions()
        {
            // ARRANGE
            var recordedRequest1 = new RecordedRequest
            {
                Url          = "http://fakeSite.fake/1",
                Method       = "GET",
                ResponseBody = "Response 1"
            };

            var recordedRequest2 = new RecordedRequest
            {
                Url          = "http://fakeSite.fake/2",
                Method       = recordedRequest1.Method,
                ResponseBody = "Response 2"
            };

            var recordingSession1 = new RecordingSession
            {
                RecordedRequests = new List <RecordedRequest> {
                    recordedRequest1
                }
            };

            var recordingSession2 = new RecordingSession
            {
                RecordedRequests = new List <RecordedRequest> {
                    recordedRequest2
                }
            };

            var requestBuilder = new RecordingSessionInterceptorRequestBuilder(recordingSession1, recordingSession2);

            IWebRequestCreate creator = new HttpWebRequestWrapperInterceptorCreator(requestBuilder);

            var request1 = creator.Create(new Uri(recordedRequest1.Url));
            var request2 = creator.Create(new Uri(recordedRequest2.Url));

            // ACT
            var response1 = request1.GetResponse();
            var response2 = request2.GetResponse();

            // ASSERT
            response1.ShouldNotBeNull();
            response2.ShouldNotBeNull();

            using (var sr = new StreamReader(response1.GetResponseStream()))
                sr.ReadToEnd().ShouldEqual(recordedRequest1.ResponseBody.SerializedStream);

            using (var sr = new StreamReader(response2.GetResponseStream()))
                sr.ReadToEnd().ShouldEqual(recordedRequest2.ResponseBody.SerializedStream);
        }
Ejemplo n.º 20
0
        public override void NewFrameReady(RecordingSession session)
        {
            if (cbSettings.captureUI)
            {
                // Find canvases
                var canvases = UnityObject.FindObjectsOfType <Canvas>();
                if (m_CanvasBackups == null || m_CanvasBackups.Length != canvases.Length)
                {
                    m_CanvasBackups = new CanvasBackup[canvases.Length];
                }

                // Hookup canvase to UI camera
                for (var i = 0; i < canvases.Length; i++)
                {
                    var canvas = canvases[i];
                    if (canvas.isRootCanvas && canvas.renderMode == RenderMode.ScreenSpaceOverlay)
                    {
                        m_CanvasBackups[i].camera = canvas.worldCamera;
                        m_CanvasBackups[i].canvas = canvas;
                        canvas.renderMode         = RenderMode.ScreenSpaceCamera;
                        canvas.worldCamera        = m_UICamera;
                    }
                    else
                    {
                        // Mark this canvas as null so we can skip it when restoring.
                        // The array might contain invalid data from a previous frame.
                        m_CanvasBackups[i].canvas = null;
                    }
                }

                m_UICamera.Render();

                // Restore canvas settings
                for (var i = 0; i < m_CanvasBackups.Length; i++)
                {
                    // Skip those canvases that are not roots canvases or are
                    // not using ScreenSpaceOverlay as a render mode.
                    if (m_CanvasBackups[i].canvas == null)
                    {
                        continue;
                    }

                    m_CanvasBackups[i].canvas.renderMode  = RenderMode.ScreenSpaceOverlay;
                    m_CanvasBackups[i].canvas.worldCamera = m_CanvasBackups[i].camera;
                }
            }

            if (cbSettings.flipFinalOutput)
            {
                m_VFlipper.Flip(outputRT);
            }
        }
Ejemplo n.º 21
0
        void DisableAOVCapture(RecordingSession session)
        {
            var aovRecorderSettings = session.settings as AOVRecorderSettings;

            if (aovRecorderSettings != null)
            {
                var add = TargetCamera.GetComponent <HDAdditionalCameraData>();
                if (add != null)
                {
                    add.SetAOVRequests(null);
                }
            }
        }
Ejemplo n.º 22
0
        protected internal override void NewFrameReady(RecordingSession session)
        {
            try
            {
                int totalReadBlocks;
                int totalFloats = 0;
                lock (mixBlockQueue)
                {
                    totalReadBlocks = mixBlockQueueSize;
                    for (int i = 0; i < totalReadBlocks; i++)
                    {
                        if (totalFloats / channelCount > 1 * sampleRate)
                        {
                            // The Unity MediaEncoder hangs when we send hundreds of thousands of audio samples.
                            // This can happen if the game is paused (ie. the audio continues to queue, but no new
                            // Updates are happening). Instead, we just cut the audio in those cases.
                            // This is a workaround for the lag spikes we were seeing when recording.
                            Debug.Log($"(FmodAudioInput) More than 2 seconds of audio samples [{totalFloats}] " +
                                      $"queued up since the last submission. Only sending [{totalFloats}]. Dropping the rest.");
                            totalReadBlocks = i;
                            break;
                        }

                        totalFloats += mixBlockQueue[i].Length;
                    }
                }

                // Allocate a giant buffer with all of the samples, since the last frame.
                // This is necessary because the Unity audio encoder expects a single native array.
                mMainBuffer = new NativeArray <float>(totalFloats, Allocator.Temp);

                int index = 0;
                for (int i = 0; i < totalReadBlocks; i++)
                {
                    NativeArray <float> .Copy(mixBlockQueue[i], 0, mMainBuffer, index, mixBlockQueue[i].Length);

                    index += mixBlockQueue[i].Length;
                }

                Assert.AreEqual(0, totalFloats % channelCount);
                sampleFrames += totalFloats / channelCount;
            }
            finally
            {
                // Reset the list of blocks, so it can be reused.
                lock (mixBlockQueue)
                {
                    mixBlockQueueSize = 0;
                }
            }
        }
Ejemplo n.º 23
0
        RenderTexture m_TempCaptureTextureVFlip; // A temp RenderTexture for vertical flips

        protected internal override void BeginRecording(RecordingSession session)
        {
            var encoderAlreadyFlips = session.settings.EncoderAlreadyFlips();

            NeedToFlipVertically = UnityHelpers.NeedToActuallyFlip(settings360.FlipFinalOutput, this, encoderAlreadyFlips);

            OutputWidth  = settings360.OutputWidth;
            OutputHeight = settings360.OutputHeight;

            if (NeedToFlipVertically.Value)
            {
                m_TempCaptureTextureVFlip = RenderTexture.GetTemporary(OutputWidth, OutputHeight);
            }
        }
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            _recordingComplete = false;
            _renderedFrames    = new List <Texture2D>();
            _currentFrame      = 0;
            m_Settings.FileNameGenerator.CreateDirectory(session);

            return(true);
        }
Ejemplo n.º 25
0
        public override void BeginRecording(RecordingSession session)
        {
            outputWidth  = scSettings.outputWidth;
            outputHeight = scSettings.outputHeight;

            int w, h;

            GameViewSize.GetGameRenderSize(out w, out h);
            if (w != outputWidth || h != outputHeight)
            {
                var size = GameViewSize.SetCustomSize(outputWidth, outputHeight) ?? GameViewSize.AddSize(outputWidth, outputHeight);
                if (GameViewSize.modifiedResolutionCount == 0)
                {
                    GameViewSize.BackupCurrentSize();
                }
                else
                {
                    if (size != GameViewSize.currentSize)
                    {
                        Debug.LogError("Requestion a resultion change while a recorder's input has already requested one! Undefined behaviour.");
                    }
                }
                GameViewSize.modifiedResolutionCount++;
                m_ModifiedResolution = true;
                GameViewSize.SelectSize(size);
            }

#if !UNITY_2019_1_OR_NEWER
            // Before 2019.1, we capture syncrhonously into a Texture2D, so we don't need to create
            // a RenderTexture that is used for reading asynchronously.
            return;
#else
            m_CaptureTexture = new RenderTexture(outputWidth, outputHeight, 0, RenderTextureFormat.ARGB32)
            {
                wrapMode = TextureWrapMode.Repeat
            };
            m_CaptureTexture.Create();

            if (scSettings.flipFinalOutput)
            {
                m_VFlipper = new TextureFlipper(false);
                m_VFlipper.Init(m_CaptureTexture);
                outputRT = m_VFlipper.workTexture;
            }
            else
            {
                outputRT = m_CaptureTexture;
            }
#endif
        }
        public void BuilderSetsWebExceptionWithResponse()
        {
            // ARRANGE
            var recordedRequest = new RecordedRequest
            {
                Url               = "http://fakeSite.fake",
                Method            = "GET",
                ResponseException = new RecordedResponseException
                {
                    Message            = "Test Exception Message",
                    Type               = typeof(WebException),
                    WebExceptionStatus = WebExceptionStatus.ConnectionClosed
                },
                ResponseBody    = "Fake Error Response",
                ResponseHeaders = new RecordedHeaders {
                    { "header1", new [] { "value1" } }
                },
                ResponseStatusCode = HttpStatusCode.InternalServerError
            };

            var recordingSession = new RecordingSession {
                RecordedRequests = new List <RecordedRequest> {
                    recordedRequest
                }
            };

            var requestBuilder = new RecordingSessionInterceptorRequestBuilder(recordingSession);

            IWebRequestCreate creator = new HttpWebRequestWrapperInterceptorCreator(requestBuilder);

            var request = creator.Create(new Uri(recordedRequest.Url));

            // ACT
            var exception            = Record.Exception(() => request.GetResponse());
            var webException         = exception as WebException;
            var webExceptionResponse = webException.Response as HttpWebResponse;

            // ASSERT
            webException.ShouldNotBeNull();
            webException.Message.ShouldEqual(recordedRequest.ResponseException.Message);
            webException.Status.ShouldEqual(recordedRequest.ResponseException.WebExceptionStatus.Value);

            webExceptionResponse.ShouldNotBeNull();
            Assert.Equal(recordedRequest.ResponseHeaders, (RecordedHeaders)webExceptionResponse.Headers);
            webExceptionResponse.StatusCode.ShouldEqual(recordedRequest.ResponseStatusCode);
            webExceptionResponse.ContentLength.ShouldBeGreaterThan(0);

            using (var sr = new StreamReader(webExceptionResponse.GetResponseStream()))
                sr.ReadToEnd().ShouldEqual(recordedRequest.ResponseBody.SerializedStream);
        }
Ejemplo n.º 27
0
        protected internal override void NewFrameReady(RecordingSession session)
        {
            Profiler.BeginSample("GameViewInput.NewFrameReady");
#if UNITY_2019_1_OR_NEWER
            ScreenCapture.CaptureScreenshotIntoRenderTexture(m_CaptureTexture);
            var  movieRecorderSettings = session.settings as MovieRecorderSettings;
            bool needToFlip            = scSettings.FlipFinalOutput;
            if (movieRecorderSettings != null)
            {
                bool encoderAlreadyFlips = movieRecorderSettings.encodersRegistered[movieRecorderSettings.encoderSelected].PerformsVerticalFlip;
                needToFlip &= encoderAlreadyFlips;
            }

            if (needToFlip)
            {
                OutputRenderTexture = m_VFlipper?.Flip(m_CaptureTexture);
            }

            // Force opaque alpha channel
            MakeFullyOpaque(OutputRenderTexture);
#else
            ReadbackTexture = ScreenCapture.CaptureScreenshotAsTexture();
            var movieRecorderSettings = session.settings as MovieRecorderSettings;
            if (movieRecorderSettings != null)
            {
                var currEncoder    = movieRecorderSettings.encodersRegistered[movieRecorderSettings.encoderSelected];
                var requiredFormat = currEncoder.GetTextureFormat(movieRecorderSettings);
                var isGameView     = movieRecorderSettings.ImageInputSettings is GameViewInputSettings;
                if (!currEncoder.PerformsVerticalFlip)
                {
                    ReadbackTexture = UnityHelpers.FlipTextureVertically(ReadbackTexture, movieRecorderSettings.CaptureAlpha);
                }
                if (requiredFormat != ReadbackTexture.format)
                {
                    if (requiredFormat == TextureFormat.RGB24 && ReadbackTexture.format == TextureFormat.RGBA32)
                    {
                        ReadbackTexture = UnityHelpers.RGBA32_to_RGB24(ReadbackTexture);
                    }
                    else
                    {
                        throw new Exception($"Unexpected conversion requested: from {ReadbackTexture.format} to {requiredFormat}.");
                    }
                }
            }

            // Force opaque alpha channel
            MakeFullyOpaque(ReadbackTexture);
#endif
            Profiler.EndSample();
        }
Ejemplo n.º 28
0
        public override void OnGraphStop(Playable playable)
        {
            if (session != null && session.recording)
            {
                session.EndRecording();
                session.Dispose();
                session = null;

                if (OnEnd != null)
                {
                    OnEnd();
                }
            }
        }
Ejemplo n.º 29
0
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            if (!Directory.Exists(m_Settings.m_DestinationPath))
            {
                Directory.CreateDirectory(m_Settings.m_DestinationPath);
            }

            return(true);
        }
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session)) { return false; }

            m_Settings.fileNameGenerator.CreateDirectory(session);

            var input = (BaseRenderTextureInput)m_Inputs[0];
            if (input.outputWidth > 4096 || input.outputHeight > 2160 )
            {
                Debug.LogError("Mp4 format does not support requested resolution.");
            }

            return true;
        }
Ejemplo n.º 31
0
        protected internal override void BeginRecording(RecordingSession session)
        {
            var dspName = "RecordSessionVideo(Audio)".ToCharArray();

            Array.Resize(ref dspName, 32);
            dspCallback = DspReadCallback;
            var dspDescription = new DSP_DESCRIPTION
            {
                version          = 0x00010000,
                name             = dspName,
                numinputbuffers  = 1,
                numoutputbuffers = 1,
                read             = dspCallback,
                numparameters    = 0
            };

            FMOD.System system = RuntimeManager.CoreSystem;
            CheckError(system.getMasterChannelGroup(out ChannelGroup masterGroup));
            CheckError(masterGroup.getDSP(CHANNELCONTROL_DSP_INDEX.TAIL, out DSP masterDspTail));
            CheckError(masterDspTail.getChannelFormat(out CHANNELMASK channelMask, out int numChannels,
                                                      out SPEAKERMODE sourceSpeakerMode));

            if (RecorderOptions.VerboseMode)
            {
                Debug.Log(
                    $"(UnityRecorder) Listening to FMOD Audio. Setting DSP to [{channelMask}] [{numChannels}] [{sourceSpeakerMode}]");
            }

            // Create a new DSP with the format of the existing master group.
            CheckError(system.createDSP(ref dspDescription, out dsp));
            CheckError(dsp.setChannelFormat(channelMask, numChannels, sourceSpeakerMode));
            CheckError(masterGroup.addDSP(CHANNELCONTROL_DSP_INDEX.TAIL, dsp));

            // Fill in some basic information for the Unity audio encoder.
            mChannelCount = (ushort)numChannels;
            CheckError(system.getDriver(out int driverId));
            CheckError(system.getDriverInfo(driverId, out Guid _, out int systemRate, out SPEAKERMODE _, out int _));
            mSampleRate = systemRate;

            if (RecorderOptions.VerboseMode)
            {
                Debug.Log($"FmodAudioInput.BeginRecording for capture frame rate {Time.captureFramerate}");
            }

            if (audioSettings.PreserveAudio)
            {
                AudioRenderer.Start();
            }
        }
        public void BuilderAlwaysSetsWebExceptionResponseWhenStatusIsProtocolError()
        {
            // ARRANGE
            var recordedRequest = new RecordedRequest
            {
                Url               = "http://fakeSite.fake",
                Method            = "GET",
                ResponseException = new RecordedResponseException
                {
                    Message            = "Test Exception Message",
                    Type               = typeof(WebException),
                    WebExceptionStatus = WebExceptionStatus.ProtocolError
                },
                ResponseHeaders = new RecordedHeaders
                {
                    { "header1", new[] { "value1" } }
                },
                ResponseStatusCode = HttpStatusCode.Unauthorized
                                     //intentionally leave ResponseBody null
            };

            var recordingSession = new RecordingSession {
                RecordedRequests = new List <RecordedRequest> {
                    recordedRequest
                }
            };

            var requestBuilder = new RecordingSessionInterceptorRequestBuilder(recordingSession);

            IWebRequestCreate creator = new HttpWebRequestWrapperInterceptorCreator(requestBuilder);

            var request = creator.Create(new Uri(recordedRequest.Url));

            // ACT
            var exception            = Record.Exception(() => request.GetResponse());
            var webException         = exception as WebException;
            var webExceptionResponse = webException.Response as HttpWebResponse;

            // ASSERT
            webException.ShouldNotBeNull();
            webException.Message.ShouldEqual(recordedRequest.ResponseException.Message);
            webException.Status.ShouldEqual(recordedRequest.ResponseException.WebExceptionStatus.Value);

            webExceptionResponse.ShouldNotBeNull();
            Assert.Equal(recordedRequest.ResponseHeaders, (RecordedHeaders)webExceptionResponse.Headers);
            webExceptionResponse.StatusCode.ShouldEqual(recordedRequest.ResponseStatusCode);
            // no response content in recordedResponse, so content length should be 0
            webExceptionResponse.ContentLength.ShouldEqual(0);
        }
Ejemplo n.º 33
0
        public override void EndRecording(RecordingSession session)
        {
            if (s_BufferManager != null)
            {
                s_BufferManager.Dispose();
                s_BufferManager = null;
            }

            s_Handler = null;

            if (audioSettings.preserveAudio)
            {
                AudioRenderer.Stop();
            }
        }
Ejemplo n.º 34
0
        public override void OnBehaviourPause(Playable playable, FrameData info)
        {
            if (session == null)
            {
                return;
            }

            if (session.isRecording && m_PlayState == PlayState.Playing)
            {
                session.Dispose();
                session = null;
            }

            m_PlayState = PlayState.Paused;
        }
Ejemplo n.º 35
0
        void RenderAndReadbackAOVCapture(RecordingSession session)
        {
            var pipeline = RenderPipelineManager.currentPipeline as HDRenderPipeline;

            var aovRecorderSettings = session.settings as AOVRecorderSettings;

            if (aovRecorderSettings != null)
            {
                targetCamera.Render();
                RenderTexture.active = outputRT;
                readbackTexture.ReadPixels(new Rect(0, 0, outputWidth, outputHeight), 0, 0, false);
                readbackTexture.Apply();
                RenderTexture.active = null;
            }
        }
Ejemplo n.º 36
0
		public void CanStop_WhilePlaying_True()
		{
			using (var session = new RecordingSession(1000))
			{
				session.Recorder.Play();
				Thread.Sleep(100);
				Assert.IsTrue(session.Recorder.CanStop);
			}
		}
Ejemplo n.º 37
0
		public void CanStop_WhileRecording_True()
		{
			using (var session = new RecordingSession())
			{
				Assert.IsTrue(session.Recorder.CanStop);
			}
		}
Ejemplo n.º 38
0
        /// <summary>
        /// Init method
        /// </summary>
        public void Init()
        {
            m_context = SynchronizationContext.Current;
            IsRecording = false;
            this.Clock = new MidiInternalClock();
            m_Session = new RecordingSession(this.Clock);
            this.Sequencer = new Sequencer();

            //If midi input isn't initialised, try and initiailise it.
            if (!_midiInput.IsInitialised) _midiInput.Initialise();
        }
Ejemplo n.º 39
0
        /// <summary>
        /// Init method
        /// </summary>
        private void Init()
        {
            IsRecording = false;
            this.Clock = new MidiInternalClock();
            m_Session = new RecordingSession(this.Clock);
            this.Sequencer = new Sequencer();

            Sequencer.ChannelMessagePlayed += HandlePlayerMessage;
            Sequencer.PlayingCompleted += HandlePlayingCompleted;

            //If midi input isn't initialised, try and initiailise it.
            if (!_midiInput.IsInitialised) _midiInput.Initialise();
            if (_midiInput.IsInitialised)
            {
                CanRecord = true;
            }
        }