Example #1
0
        private static void SetupDebugLogCallback()
        {
            LogCallbackDelegate callbackDelegate = new LogCallbackDelegate(LogCallback);

            System.IntPtr func = Marshal.GetFunctionPointerForDelegate(callbackDelegate);
            NativePlugin.SetLogFunction(func);
        }
        public bool IsFileReady()
        {
            bool result = true;

            if (_handle >= 0)
            {
                result = NativePlugin.IsFileWritingComplete(_handle);
                if (result)
                {
                    if (_videoEncodingHints != null)
                    {
                        result = StartPostProcess();
                        _videoEncodingHints = null;
                    }
                    if (_postProcessEvent != null)
                    {
                        result = _postProcessEvent.WaitOne(1);
                    }
                    if (result)
                    {
                        Dispose();
                    }
                }
            }
            return(result);
        }
        private void CopyRenderTargetToTexture()
        {
#if false
            // RJT TODO: If using D3D12 we need to read the current 'Display.main.colorBuffer', pass it down
            // to native and extract the texture using 'IUnityGraphicsD3D12v5::TextureFromRenderBuffer()'
            // - Although, as is, this doesn't work: https://forum.unity.com/threads/direct3d12-native-plugin-render-to-screen.733025/
            if (_targetNativePointer == System.IntPtr.Zero)
            {
                _targetNativePointer = Display.main.colorBuffer.GetNativeRenderBufferPtr();
//						_targetNativePointer = Graphics.activeColorBuffer.GetNativeRenderBufferPtr();
                NativePlugin.SetColourBuffer(_handle, _targetNativePointer);
            }
#endif
#if true
            if ((_targetNativePointer == System.IntPtr.Zero) ||
                (_resolveTexture && ((_resolveTexture.width != Screen.width) || (_resolveTexture.height != Screen.height)))
                )
            {
                FreeRenderResources();

                // Create RT matching screen extents
                _resolveTexture = RenderTexture.GetTemporary(Screen.width, Screen.height, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.sRGB, 1);
                _resolveTexture.Create();
                _targetNativePointer = _resolveTexture.GetNativeTexturePtr();
                NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                // Create command buffer
                _commandBuffer      = new CommandBuffer();
                _commandBuffer.name = "AVPro Movie Capture copy";
                _commandBuffer.Blit(BuiltinRenderTextureType.CurrentActive, _resolveTexture);
            }
#endif

            Graphics.ExecuteCommandBuffer(_commandBuffer);
        }
Example #4
0
        private void Capture()
        {
            TickFrameTimer();

            AccumulateMotionBlur();

            if (_capturing && !_paused)
            {
                if (_cubeTarget != null && _camera != null)
                {
                    bool canGrab = true;

                    if (IsUsingMotionBlur())
                    {
                        // TODO: fix motion blur
                        //this._motionBlur.RenderImage()
                        // If the motion blur is still accumulating, don't grab this frame
                        canGrab = _motionBlur.IsFrameAccumulated;
                    }

                    if (canGrab && CanOutputFrame())
                    {
                        EncodeUnityAudio();

                        RenderTexture finalTexture = _finalTarget;
                        if (!IsUsingMotionBlur())
                        {
                            UpdateTexture();
                        }
                        else
                        {
                            finalTexture = _motionBlur.FinalTexture;
                        }

                        if (_targetNativePointer == System.IntPtr.Zero || _supportTextureRecreate)
                        {
                            // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                            // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                            _targetNativePointer = finalTexture.GetNativeTexturePtr();
                        }

                        NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                        RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);

                        // ADG NOTE: Causes screen flickering under D3D12, even if we're not doing any rendering at native level
                        // And also seems to cause GL.sRGBWrite to be set to false, which causes screen darkening in Linear mode
                        if (SystemInfo.graphicsDeviceType != GraphicsDeviceType.Direct3D12)
                        {
                            GL.InvalidateState();
                        }

                        UpdateFPS();
                    }
                }
            }

            RenormTimer();
        }
Example #5
0
        // NOTE: This is old code based on OnRenderImage...may be revived at some point
        private void OnRenderImage(RenderTexture source, RenderTexture dest)
        {
            if (_capturing && !_paused)
            {
#if true
                while (_handle >= 0 && !NativePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio && _isRealTime)
                    {
                        int           audioDataLength = 0;
                        System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                        if (audioDataLength > 0)
                        {
                            NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                        }
                    }

                    // In Direct3D the RT can be flipped vertically

                    /*if (source.texelSize.y < 0)
                     * {
                     *
                     * }*/

                    Graphics.Blit(source, dest);

                    _lastSource = source;
                    _lastDest   = dest;

                    if (dest != _originalTarget)
                    {
                        Graphics.Blit(dest, _originalTarget);
                    }

#if AVPRO_MOVIECAPTURE_GLISSUEEVENT_52
                    GL.IssuePluginEvent(NativePlugin.GetRenderEventFunc(), NativePlugin.PluginID | (int)NativePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#else
                    GL.IssuePluginEvent(NativePlugin.PluginID | (int)NativePlugin.PluginEvent.CaptureFrameBuffer | _handle);
#endif
                    GL.InvalidateState();

                    UpdateFPS();

                    return;
                }
#endif
            }

            // Pass-through
            Graphics.Blit(source, dest);

            _lastSource = source;
            _lastDest   = dest;
        }
Example #6
0
 static NativePlugin()
 {
                 #if UNITY_EDITOR_OSX
     SetupDebugLogCallback();
                 #endif
                 #if !UNITY_EDITOR_OSX && UNITY_IOS
     NativePlugin.MCUnityRegisterPlugin();
                 #endif
 }
Example #7
0
 private static void CheckInit()
 {
     if (!_isEnumerated)
     {
         if (NativePlugin.Init())
         {
             EnumerateCodecs();
         }
     }
 }
Example #8
0
        public override void UnprepareCapture()
        {
            NativePlugin.SetTexturePointer(_handle, System.IntPtr.Zero);

            if (_target != null)
            {
                _target.DiscardContents();
            }

            base.UnprepareCapture();
        }
        void UpdateCoefficients()
        {
            Ambisonic.PolarCoord p = new Ambisonic.PolarCoord();
            p.FromCart(_position);

            lock (this)
            {
                float[] normaliseWeights = Ambisonic.GetNormalisationWeights(_normalisation);
                NativePlugin.UpdateAmbisonicWeights(_sourceInstance, p.azimuth, p.elevation, _order, _channelOrder, normaliseWeights);
            }
        }
Example #10
0
        public override void UpdateFrame()
        {
            TickFrameTimer();

            AccumulateMotionBlur();

            if (_capturing && !_paused)
            {
                if (_settings.camera != null && _handle >= 0)
                {
                    bool canGrab = true;

                    if (IsUsingMotionBlur())
                    {
                        // If the motion blur is still accumulating, don't grab this frame
                        canGrab = _motionBlur.IsFrameAccumulated;
                    }

                    if (canGrab && CanOutputFrame())
                    {
                        // Frame to encode either comes from rendering, or motion blur accumulation
                        RenderTexture finalTexture = null;
                        if (!IsUsingMotionBlur())
                        {
                            RenderFrame();
                            finalTexture = _final;
                        }
                        else
                        {
                            finalTexture = _motionBlur.FinalTexture;
                        }

                        if (_targetNativePointer == System.IntPtr.Zero || _supportTextureRecreate)
                        {
                            // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                            // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                            _targetNativePointer = finalTexture.GetNativeTexturePtr();
                        }

                        NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                        RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);
                        GL.InvalidateState();

                        UpdateFPS();
                    }
                }
            }

            base.UpdateFrame();

            RenormTimer();
        }
        void OnAudioFilterRead(float[] samples, int channelCount)
        {
            lock (this)
            {
                int coeffCount = Ambisonic.GetCoeffCount(_order);
                if (_sink != null && coeffCount > 0)
                {
                    int samplesOffset = 0;
                    // While there are sample to process
                    while (samplesOffset < samples.Length)
                    {
                        // If the pending buffer is full, move it to the full ist
                        if (_activeSamples != null && _activeSamples.Length == _activeSampleIndex)
                        {
                            _fullBuffers.Enqueue(_activeSamples);
                            _activeSamples     = null;
                            _activeSampleIndex = 0;
                        }
                        // Assign a new pending queue
                        if (_activeSamples == null && _emptyBuffers.Count > 0)
                        {
                            _activeSamples = _emptyBuffers.Dequeue();
                        }
                        if (_activeSamples == null)
                        {
                            // Remaining samples are lost!
                            break;
                        }
                        int remainingFrameCount  = (samples.Length - samplesOffset) / channelCount;
                        int generatedSampleCount = remainingFrameCount * coeffCount;
                        int remainingSampleSpace = (_activeSamples.Length - _activeSampleIndex);

                        int samplesToProcess = Mathf.Min(remainingSampleSpace, generatedSampleCount);
                        // TODO: should we specify Floor/Ceil rounding behaviour?
                        int framesToProcess = samplesToProcess / coeffCount;
                        generatedSampleCount = framesToProcess * coeffCount;

                        if (framesToProcess > 0)
                        {
                            NativePlugin.EncodeMonoToAmbisonic(_sourceInstance, samples, samplesOffset, framesToProcess, channelCount, _activeSamples, _activeSampleIndex, _activeSamples.Length, _order);
                            _activeSampleIndex += generatedSampleCount;
                            samplesOffset      += framesToProcess * channelCount;
                        }
                        else
                        {
                            Debug.Log(coeffCount + " " + framesToProcess + "   " + remainingSampleSpace + " >>  " + samplesOffset + " /  " + samples.Length);
                            break;
                        }
                    }
                }
            }
        }
Example #12
0
        public override void UnprepareCapture()
        {
            _targetNativePointer = System.IntPtr.Zero;
            NativePlugin.SetTexturePointer(_handle, System.IntPtr.Zero);

            if (_renderTexture != null)
            {
                RenderTexture.ReleaseTemporary(_renderTexture);
                _renderTexture = null;
            }

            base.UnprepareCapture();
        }
Example #13
0
        private static void EnumerateDevices()
        {
            {
                Device[] audioInputDevices = new Device[NativePlugin.GetAudioInputDeviceCount()];
                for (int i = 0; i < audioInputDevices.Length; i++)
                {
                    audioInputDevices[i] = new Device(DeviceType.AudioInput, i, NativePlugin.GetAudioInputDeviceName(i), NativePlugin.GetAudioInputDeviceMediaApi(i));
                }
                _audioInputDevices = new DeviceList(audioInputDevices);
            }

            _isEnumerated = true;
        }
Example #14
0
 public void ShowConfigWindow()
 {
     if (_hasConfigWindow)
     {
         if (_codecType == CodecType.Video)
         {
             NativePlugin.ConfigureVideoCodec(_index);
         }
         else if (_codecType == CodecType.Audio)
         {
             NativePlugin.ConfigureAudioCodec(_index);
         }
     }
 }
        public void Dispose()
        {
            if (_handle >= 0)
            {
                NativePlugin.FreeRecorder(_handle);
                _handle = -1;

                // Issue the free resources plugin event
                NativePlugin.RenderThreadEvent(NativePlugin.PluginEvent.FreeResources, -1);
            }

            _videoEncodingHints = null;
            _postProcessEvent   = null;
        }
        private void OnRenderImage(RenderTexture source, RenderTexture dest)
        {
#if false
            if (_capturing && !_paused)
            {
                while (_handle >= 0 && !NativePlugin.IsNewFrameDue(_handle))
                {
                    System.Threading.Thread.Sleep(1);
                }
                if (_handle >= 0)
                {
                    if (_audioCapture && _audioDeviceIndex < 0 && !_noAudio)
                    {
                        uint bufferLength = (uint)_audioCapture.BufferLength;
                        if (bufferLength > 0)
                        {
                            NativePlugin.EncodeAudio(_handle, _audioCapture.BufferPtr, bufferLength);
                            _audioCapture.FlushBuffer();
                        }
                    }

                    // In Direct3D the RT can be flipped vertically

                    /*if (source.texelSize.y < 0)
                     * {
                     *
                     * }*/

                    Graphics.Blit(_cubeTarget, _target, _cubemapToEquirectangularMaterial);

                    RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);
                    GL.InvalidateState();

                    UpdateFPS();
                }
            }
#endif
            // Pass-through

            if (_cubeTarget != null)
            {
                Graphics.Blit(_cubeTarget, dest, _cubemapToEquirectangularMaterial);
            }
            else
            {
                Graphics.Blit(source, dest);
            }
        }
        void OnDisable()
        {
            lock (this)
            {
                if (_sink)
                {
                    _sink.RemoveSource(this);
                }

                if (_sourceInstance != System.IntPtr.Zero)
                {
                    NativePlugin.RemoveAmbisonicSourceInstance(_sourceInstance);
                    _sourceInstance = System.IntPtr.Zero;
                }
            }
        }
        public override void UnprepareCapture()
        {
            if (_handle != -1)
            {
                                #if false
                NativePlugin.SetColourBuffer(_handle, System.IntPtr.Zero);
                                #endif
                NativePlugin.SetTexturePointer(_handle, System.IntPtr.Zero);
            }

            FreeRenderResources();

            if (_mouseCursor != null)
            {
                _mouseCursor.enabled = false;
            }

            base.UnprepareCapture();
        }
        void OnEnable()
        {
            AudioSource audioSource = this.GetComponent <AudioSource>();

            if (audioSource && audioSource.clip)
            {
                audioSource.PlayOneShot(audioSource.clip, 0f);
            }

            Debug.Assert(_sourceInstance == System.IntPtr.Zero);
            _sourceInstance = NativePlugin.AddAmbisonicSourceInstance(Ambisonic.MaxCoeffs);

            _position = this.transform.position;
            UpdateCoefficients();
            if (_sink)
            {
                _sink.AddSource(this);
            }
        }
Example #20
0
        private static void EnumerateCodecs()
        {
            {
                Codec[] videoCodecs = new Codec[NativePlugin.GetVideoCodecCount()];
                for (int i = 0; i < videoCodecs.Length; i++)
                {
                    videoCodecs[i] = new Codec(CodecType.Video, i, NativePlugin.GetVideoCodecName(i), NativePlugin.GetVideoCodecMediaApi(i), NativePlugin.IsConfigureVideoCodecSupported(i));
                }
                _videoCodecs = new CodecList(videoCodecs);
            }
            {
                Codec[] audioCodecs = new Codec[NativePlugin.GetAudioCodecCount()];
                for (int i = 0; i < audioCodecs.Length; i++)
                {
                    audioCodecs[i] = new Codec(CodecType.Audio, i, NativePlugin.GetAudioCodecName(i), NativePlugin.GetAudioCodecMediaApi(i), NativePlugin.IsConfigureAudioCodecSupported(i));
                }
                _audioCodecs = new CodecList(audioCodecs);
            }

            _isEnumerated = true;
        }
        private IEnumerator FinalRenderCapture()
        {
            yield return(_waitForEndOfFrame);

            TickFrameTimer();

            bool canGrab = true;

            if (IsUsingMotionBlur())
            {
                // If the motion blur is still accumulating, don't grab this frame
                canGrab = _motionBlur.IsFrameAccumulated;
            }

            if (canGrab && CanOutputFrame())
            {
                // Grab final RenderTexture into texture and encode
                if (IsRecordingUnityAudio())
                {
                    int           audioDataLength = 0;
                    System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                    if (audioDataLength > 0)
                    {
                        NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                    }
                }

                RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);
                GL.InvalidateState();

                UpdateFPS();
            }

            RenormTimer();

            //yield return null;
        }
Example #22
0
        private void CreateGUI()
        {
            _outputType    = new string[3];
            _outputType[0] = "Video File";
            _outputType[1] = "Image Sequence";
            _outputType[2] = "Named Pipe";
            _downScales    = new string[6];
            _downScales[0] = "Original";
            _downScales[1] = "Half";
            _downScales[2] = "Quarter";
            _downScales[3] = "Eighth";
            _downScales[4] = "Sixteenth";
            _downScales[5] = "Custom";
            switch (_movieCapture._downScale)
            {
            default:
            case CaptureBase.DownScale.Original:
                _downScaleIndex = 0;
                break;

            case CaptureBase.DownScale.Half:
                _downScaleIndex = 1;
                break;

            case CaptureBase.DownScale.Quarter:
                _downScaleIndex = 2;
                break;

            case CaptureBase.DownScale.Eighth:
                _downScaleIndex = 3;
                break;

            case CaptureBase.DownScale.Sixteenth:
                _downScaleIndex = 4;
                break;

            case CaptureBase.DownScale.Custom:
                _downScaleIndex = 5;
                break;
            }

            _frameRates     = new string[11];
            _frameRates[0]  = "1";
            _frameRates[1]  = "10";
            _frameRates[2]  = "15";
            _frameRates[3]  = "24";
            _frameRates[4]  = "25";
            _frameRates[5]  = "30";
            _frameRates[6]  = "50";
            _frameRates[7]  = "60";
            _frameRates[8]  = "75";
            _frameRates[9]  = "90";
            _frameRates[10] = "120";
            switch (_movieCapture._frameRate)
            {
            default:
            case CaptureBase.FrameRate.One:
                _frameRateIndex = 0;
                break;

            case CaptureBase.FrameRate.Ten:
                _frameRateIndex = 1;
                break;

            case CaptureBase.FrameRate.Fifteen:
                _frameRateIndex = 2;
                break;

            case CaptureBase.FrameRate.TwentyFour:
                _frameRateIndex = 3;
                break;

            case CaptureBase.FrameRate.TwentyFive:
                _frameRateIndex = 4;
                break;

            case CaptureBase.FrameRate.Thirty:
                _frameRateIndex = 5;
                break;

            case CaptureBase.FrameRate.Fifty:
                _frameRateIndex = 6;
                break;

            case CaptureBase.FrameRate.Sixty:
                _frameRateIndex = 7;
                break;

            case CaptureBase.FrameRate.SeventyFive:
                _frameRateIndex = 8;
                break;

            case CaptureBase.FrameRate.Ninety:
                _frameRateIndex = 9;
                break;

            case CaptureBase.FrameRate.OneTwenty:
                _frameRateIndex = 10;
                break;
            }

            int numVideoCodecs = NativePlugin.GetNumAVIVideoCodecs();

            if (numVideoCodecs > 0)
            {
                _videoCodecNames        = new string[numVideoCodecs + 2];
                _videoCodecNames[0]     = "Uncompressed";
                _videoCodecNames[1]     = "Media Foundation H.264(MP4)";
                _videoCodecConfigurable = new bool[numVideoCodecs];
                for (int i = 0; i < numVideoCodecs; i++)
                {
                    _videoCodecNames[i + 2]    = NativePlugin.GetAVIVideoCodecName(i);
                    _videoCodecConfigurable[i] = NativePlugin.IsConfigureVideoCodecSupported(i);
                }
            }

            int numAudioDevices = NativePlugin.GetNumAVIAudioInputDevices();

            if (numAudioDevices > 0)
            {
                _audioDeviceNames    = new string[numAudioDevices + 1];
                _audioDeviceNames[0] = "Unity";
                for (int i = 0; i < numAudioDevices; i++)
                {
                    _audioDeviceNames[i + 1] = NativePlugin.GetAVIAudioInputDeviceName(i);
                }
            }

            int numAudioCodecs = NativePlugin.GetNumAVIAudioCodecs();

            if (numAudioCodecs > 0)
            {
                _audioCodecNames        = new string[numAudioCodecs + 1];
                _audioCodecNames[0]     = "Uncompressed";
                _audioCodecConfigurable = new bool[numAudioCodecs];
                for (int i = 0; i < numAudioCodecs; i++)
                {
                    _audioCodecNames[i + 1]    = NativePlugin.GetAVIAudioCodecName(i);
                    _audioCodecConfigurable[i] = NativePlugin.IsConfigureAudioCodecSupported(i);
                }
            }

            _movieCapture.SelectCodec(false);
            _movieCapture.SelectAudioCodec(false);
            _movieCapture.SelectAudioDevice(false);
        }
Example #23
0
        private void MyWindow(int id)
        {
            if (_movieCapture == null)
            {
                GUILayout.Label("CaptureGUI - No CaptureFrom component set");
                return;
            }

            if (_movieCapture.IsCapturing())
            {
                GUI_RecordingStatus();
                return;
            }

            GUILayout.BeginVertical();

            if (_movieCapture != null)
            {
                GUILayout.Label("Resolution:");
                GUILayout.BeginHorizontal();
                _downScaleIndex = GUILayout.SelectionGrid(_downScaleIndex, _downScales, _downScales.Length);
                switch (_downScaleIndex)
                {
                case 0:
                    _movieCapture._downScale = CaptureBase.DownScale.Original;
                    break;

                case 1:
                    _movieCapture._downScale = CaptureBase.DownScale.Half;
                    break;

                case 2:
                    _movieCapture._downScale = CaptureBase.DownScale.Quarter;
                    break;

                case 3:
                    _movieCapture._downScale = CaptureBase.DownScale.Eighth;
                    break;

                case 4:
                    _movieCapture._downScale = CaptureBase.DownScale.Sixteenth;
                    break;

                case 5:
                    _movieCapture._downScale = CaptureBase.DownScale.Custom;
                    break;
                }
                GUILayout.EndHorizontal();

                GUILayout.BeginHorizontal(GUILayout.Width(256));
                if (_movieCapture._downScale == CaptureBase.DownScale.Custom)
                {
                    string maxWidthString = GUILayout.TextField(Mathf.FloorToInt(_movieCapture._maxVideoSize.x).ToString(), 4);
                    int    maxWidth       = 0;
                    if (int.TryParse(maxWidthString, out maxWidth))
                    {
                        _movieCapture._maxVideoSize.x = Mathf.Clamp(maxWidth, 0, 16384);
                    }

                    GUILayout.Label("x", GUILayout.Width(20));

                    string maxHeightString = GUILayout.TextField(Mathf.FloorToInt(_movieCapture._maxVideoSize.y).ToString(), 4);
                    int    maxHeight       = 0;
                    if (int.TryParse(maxHeightString, out maxHeight))
                    {
                        _movieCapture._maxVideoSize.y = Mathf.Clamp(maxHeight, 0, 16384);
                    }
                }
                GUILayout.EndHorizontal();

                GUILayout.BeginHorizontal();
                GUILayout.Label("Frame Rate:");
                _frameRateIndex = GUILayout.SelectionGrid(_frameRateIndex, _frameRates, _frameRates.Length);
                switch (_frameRateIndex)
                {
                case 0:
                    _movieCapture._frameRate = CaptureBase.FrameRate.One;
                    break;

                case 1:
                    _movieCapture._frameRate = CaptureBase.FrameRate.Ten;
                    break;

                case 2:
                    _movieCapture._frameRate = CaptureBase.FrameRate.Fifteen;
                    break;

                case 3:
                    _movieCapture._frameRate = CaptureBase.FrameRate.TwentyFour;
                    break;

                case 4:
                    _movieCapture._frameRate = CaptureBase.FrameRate.TwentyFive;
                    break;

                case 5:
                    _movieCapture._frameRate = CaptureBase.FrameRate.Thirty;
                    break;

                case 6:
                    _movieCapture._frameRate = CaptureBase.FrameRate.Fifty;
                    break;

                case 7:
                    _movieCapture._frameRate = CaptureBase.FrameRate.Sixty;
                    break;

                case 8:
                    _movieCapture._frameRate = CaptureBase.FrameRate.SeventyFive;
                    break;

                case 9:
                    _movieCapture._frameRate = CaptureBase.FrameRate.Ninety;
                    break;

                case 10:
                    _movieCapture._frameRate = CaptureBase.FrameRate.OneTwenty;
                    break;
                }
                GUILayout.EndHorizontal();

                GUILayout.Space(16f);

                GUILayout.BeginHorizontal();
                GUILayout.Label("Output:", GUILayout.ExpandWidth(false));
                _movieCapture._outputType = (CaptureBase.OutputType)GUILayout.SelectionGrid((int)_movieCapture._outputType, _outputType, _outputType.Length);
                GUILayout.EndHorizontal();

                GUILayout.Space(16f);

                _movieCapture._isRealTime = GUILayout.Toggle(_movieCapture._isRealTime, "RealTime");

                GUILayout.Space(16f);


                if (_movieCapture._outputType == CaptureBase.OutputType.VideoFile)
                {
                    // Video Codec
                    GUILayout.BeginHorizontal();
                    if (_shownSection != 0)
                    {
                        if (GUILayout.Button("+", GUILayout.Width(24)))
                        {
                            _shownSection = 0;
                        }
                    }
                    else
                    {
                        if (GUILayout.Button("-", GUILayout.Width(24)))
                        {
                            _shownSection = -1;
                        }
                    }
                    GUILayout.Label("Using Video Codec: " + _movieCapture._codecName);
                    if (_movieCapture._codecIndex >= 0 && _videoCodecConfigurable[_movieCapture._codecIndex])
                    {
                        GUILayout.Space(16f);
                        if (GUILayout.Button("Configure Codec"))
                        {
                            NativePlugin.ConfigureVideoCodec(_movieCapture._codecIndex);
                        }
                    }
                    GUILayout.EndHorizontal();

                    if (_videoCodecNames != null && _shownSection == 0)
                    {
                        GUILayout.Label("Select Video Codec:");
                        _videoPos = GUILayout.BeginScrollView(_videoPos, GUILayout.Height(100));
                        int newCodecIndex = GUILayout.SelectionGrid(-1, _videoCodecNames, 1) - 2;
                        GUILayout.EndScrollView();

                        if (newCodecIndex >= -2)
                        {
                            _movieCapture._codecIndex = newCodecIndex;
                            if (_movieCapture._codecIndex >= 0)
                            {
                                _movieCapture._codecName = _videoCodecNames[_movieCapture._codecIndex + 2];
                                _movieCapture._useMediaFoundationH264 = false;
                            }
                            else
                            {
                                if (_movieCapture._codecIndex == -2)
                                {
                                    _movieCapture._codecName = "Uncompressed";
                                    _movieCapture._useMediaFoundationH264 = false;
                                }
                                else
                                {
                                    if (_movieCapture._codecIndex == -1)
                                    {
                                        _movieCapture._codecName = "Media Foundation H.264(MP4)";
                                        _movieCapture._useMediaFoundationH264 = true;
                                    }
                                }
                            }

                            _shownSection = -1;
                        }

                        GUILayout.Space(16f);
                    }


                    GUI.enabled            = _movieCapture._isRealTime;
                    _movieCapture._noAudio = !GUILayout.Toggle(!_movieCapture._noAudio, "Record Audio");
                    if (GUI.enabled)
                    {
                        GUI.enabled = !_movieCapture._noAudio;
                    }

                    // Audio Device
                    GUILayout.BeginHorizontal();
                    if (_shownSection != 1)
                    {
                        if (GUILayout.Button("+", GUILayout.Width(24)))
                        {
                            _shownSection = 1;
                        }
                    }
                    else
                    {
                        if (GUILayout.Button("-", GUILayout.Width(24)))
                        {
                            _shownSection = -1;
                        }
                    }
                    GUILayout.Label("Using Audio Source: " + _movieCapture._audioDeviceName);
                    GUILayout.EndHorizontal();
                    if (_audioDeviceNames != null && _shownSection == 1)
                    {
                        GUILayout.Label("Select Audio Source:");
                        _audioPos = GUILayout.BeginScrollView(_audioPos, GUILayout.Height(100));
                        int newAudioIndex = GUILayout.SelectionGrid(-1, _audioDeviceNames, 1) - 1;
                        GUILayout.EndScrollView();

                        if (newAudioIndex >= -1)
                        {
                            _movieCapture._audioDeviceIndex = newAudioIndex;
                            if (_movieCapture._audioDeviceIndex >= 0)
                            {
                                _movieCapture._audioDeviceName = _audioDeviceNames[_movieCapture._audioDeviceIndex + 1];
                            }
                            else
                            {
                                _movieCapture._audioDeviceName = "Unity";
                            }

                            _shownSection = -1;
                        }

                        GUILayout.Space(16f);
                    }



                    // Audio Codec
                    GUILayout.BeginHorizontal();
                    if (_shownSection != 2)
                    {
                        if (GUILayout.Button("+", GUILayout.Width(24)))
                        {
                            _shownSection = 2;
                        }
                    }
                    else
                    {
                        if (GUILayout.Button("-", GUILayout.Width(24)))
                        {
                            _shownSection = -1;
                        }
                    }
                    GUILayout.Label("Using Audio Codec: " + _movieCapture._audioCodecName);
                    if (_movieCapture._audioCodecIndex >= 0 && _audioCodecConfigurable[_movieCapture._audioCodecIndex])
                    {
                        GUILayout.Space(16f);
                        if (GUILayout.Button("Configure Codec"))
                        {
                            NativePlugin.ConfigureAudioCodec(_movieCapture._audioCodecIndex);
                        }
                    }
                    GUILayout.EndHorizontal();

                    if (_audioCodecNames != null && _shownSection == 2)
                    {
                        GUILayout.Label("Select Audio Codec:");
                        _audioCodecPos = GUILayout.BeginScrollView(_audioCodecPos, GUILayout.Height(100));
                        int newCodecIndex = GUILayout.SelectionGrid(-1, _audioCodecNames, 1) - 1;
                        GUILayout.EndScrollView();

                        if (newCodecIndex >= -1)
                        {
                            _movieCapture._audioCodecIndex = newCodecIndex;
                            if (_movieCapture._audioCodecIndex >= 0)
                            {
                                _movieCapture._audioCodecName = _audioCodecNames[_movieCapture._audioCodecIndex + 1];
                            }
                            else
                            {
                                _movieCapture._audioCodecName = "Uncompressed";
                            }

                            _shownSection = -1;
                        }

                        GUILayout.Space(16f);
                    }

                    GUI.enabled = true;

                    GUILayout.Space(16f);
                }

                GUILayout.BeginHorizontal();
                GUILayout.Label("Filename Prefix & Ext: ");
                _movieCapture._autoFilenamePrefix = GUILayout.TextField(_movieCapture._autoFilenamePrefix, 64);
                if (_movieCapture._outputType == CaptureBase.OutputType.VideoFile)
                {
                    _movieCapture._autoFilenameExtension = GUILayout.TextField(_movieCapture._autoFilenameExtension, 8);
                }
                else if (_movieCapture._outputType == CaptureBase.OutputType.ImageSequence)
                {
                    GUILayout.TextField("png", 8);
                }
                else if (_movieCapture._outputType == CaptureBase.OutputType.NamedPipe)
                {
                }
                GUILayout.EndHorizontal();
                GUILayout.Space(16f);
                GUILayout.Space(16f);

                if (_whenRecordingAutoHideUI)
                {
                    GUILayout.Label("(Press CTRL-F5 to stop capture)");
                }

                GUILayout.BeginHorizontal();
                if (!_movieCapture.IsCapturing())
                {
                    GUI.color = Color.green;
                    if (GUILayout.Button(_movieCapture._isRealTime?"Start Capture":"Start Render"))
                    {
                        StartCapture();
                    }
                    GUI.color = Color.white;
                }
                else
                {
                    /*if (!_movieCapture.IsPaused())
                     * {
                     *      if (GUILayout.Button("Pause Capture"))
                     *      {
                     *              PauseCapture();
                     *      }
                     * }
                     * else
                     * {
                     *      if (GUILayout.Button("Resume Capture"))
                     *      {
                     *              ResumeCapture();
                     *      }
                     * }
                     *
                     * if (GUILayout.Button("Cancel Capture"))
                     * {
                     *      CancelCapture();
                     * }
                     * if (GUILayout.Button("Stop Capture"))
                     * {
                     *      StopCapture();
                     * }*/
                }
                GUILayout.EndHorizontal();

                if (_movieCapture.IsCapturing())
                {
                    if (!string.IsNullOrEmpty(_movieCapture.LastFilePath))
                    {
                        GUILayout.Label("Writing file: '" + System.IO.Path.GetFileName(_movieCapture.LastFilePath) + "'");
                    }
                }
                else
                {
                    if (!string.IsNullOrEmpty(CaptureBase.LastFileSaved))
                    {
                        GUILayout.Space(16f);
                        GUILayout.Label("Last file written: '" + System.IO.Path.GetFileName(CaptureBase.LastFileSaved) + "'");

                        GUILayout.BeginHorizontal();
                        if (GUILayout.Button("Browse"))
                        {
                            Utils.ShowInExplorer(CaptureBase.LastFileSaved);
                        }
                        Color prevColor = GUI.color;
                        GUI.color = Color.cyan;
                        if (GUILayout.Button("View Last Capture"))
                        {
                            Utils.OpenInDefaultApp(CaptureBase.LastFileSaved);
                        }
                        GUI.color = prevColor;

                        GUILayout.EndHorizontal();
                    }
                }
            }

            GUILayout.EndVertical();
        }
Example #24
0
 static NativePlugin()
 {
     SetupDebugLogCallback();
     NativePlugin.RegisterPlugin();
 }
Example #25
0
        private void CreateGUI()
        {
            switch (_movieCapture.ResolutionDownScale)
            {
            default:
            case CaptureBase.DownScale.Original:
                _downScaleIndex = 0;
                break;

            case CaptureBase.DownScale.Half:
                _downScaleIndex = 1;
                break;

            case CaptureBase.DownScale.Quarter:
                _downScaleIndex = 2;
                break;

            case CaptureBase.DownScale.Eighth:
                _downScaleIndex = 3;
                break;

            case CaptureBase.DownScale.Sixteenth:
                _downScaleIndex = 4;
                break;

            case CaptureBase.DownScale.Custom:
                _downScaleIndex = 5;
                break;
            }

            if (CodecManager.VideoCodecs.Count > 0)
            {
                _videoCodecNames        = new string[CodecManager.VideoCodecs.Count];
                _videoCodecConfigurable = new bool[CodecManager.VideoCodecs.Count];
                int i = 0;
                foreach (Codec codec in CodecManager.VideoCodecs)
                {
                    _videoCodecNames[i]        = codec.Name;
                    _videoCodecConfigurable[i] = codec.HasConfigwindow;
                    i++;
                }
            }
            if (CodecManager.AudioCodecs.Count > 0)
            {
                _audioCodecNames        = new string[CodecManager.AudioCodecs.Count];
                _audioCodecConfigurable = new bool[CodecManager.AudioCodecs.Count];
                int i = 0;
                foreach (Codec codec in CodecManager.AudioCodecs)
                {
                    _audioCodecNames[i]        = codec.Name;
                    _audioCodecConfigurable[i] = codec.HasConfigwindow;
                    i++;
                }
            }
            int numAudioDevices = NativePlugin.GetAudioInputDeviceCount();

            if (numAudioDevices > 0)
            {
                _audioDeviceNames = new string[numAudioDevices];
                for (int i = 0; i < numAudioDevices; i++)
                {
                    _audioDeviceNames[i] = NativePlugin.GetAudioInputDeviceName(i);
                }
            }

            _movieCapture.SelectVideoCodec();
            _movieCapture.SelectAudioCodec();
            _movieCapture.SelectAudioInputDevice();
        }
        // If we're forcing a resolution or AA change then we have to render the camera again to the new target
        // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :(
        public override void UpdateFrame()
        {
            TickFrameTimer();

            if (_capturing && !_paused && HasCamera())
            {
                bool canGrab = true;

                if (IsUsingMotionBlur())
                {
                    // If the motion blur is still accumulating, don't grab this frame
                    canGrab = _motionBlur.IsFrameAccumulated;
                }

                if (canGrab)
                {
                    /*while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                     * {
                     *      System.Threading.Thread.Sleep(1);
                     * }*/
                    if (_handle >= 0 && CanOutputFrame())
                    {
                        // Render the camera(s)
                        if (!IsUsingMotionBlur())
                        {
                            UpdateTexture();
                        }
                        else
                        {
                            // Just grab the last result of the motion blur
                            _target.DiscardContents();
                            Graphics.Blit(_motionBlur.FinalTexture, _target);
                        }

                        if (_supportTextureRecreate)
                        {
                            // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                            // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                            _targetNativePointer = _target.GetNativeTexturePtr();
                        }

                        NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                        RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);

                        if (IsRecordingUnityAudio())
                        {
                            int           audioDataLength = 0;
                            System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                            if (audioDataLength > 0)
                            {
                                NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                            }
                        }

                        UpdateFPS();
                    }
                }
            }
            base.UpdateFrame();

            RenormTimer();
        }
        // If we're forcing a resolution or AA change then we have to render the camera again to the new target
        // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :(
        public IEnumerator Capture()
        {
            TickFrameTimer();

            if (_capturing && !_paused && HasCamera())
            {
                bool canGrab = true;

                if (IsUsingMotionBlur())
                {
                    // If the motion blur is still accumulating, don't grab this frame
                    canGrab = _motionBlur.IsFrameAccumulated;
                }

                if (canGrab)
                {
                    /*while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                     * {
                     *      System.Threading.Thread.Sleep(1);
                     * }*/
                    if (_handle >= 0 && CanOutputFrame())
                    {
                        RenderTexture sourceTexture = _target;

                        // In 2018.3 and above a different method is used to render the gizmos etc, so we don't yet support capturing these
#if SUPPORT_SCENE_VIEW_GIZMOS_CAPTURE
                        // Support capturing the Scene View target texture directly so that we get all gizmo rendering
                        if (_lastCamera != null && _includeSceneViewGizmos && (_lastCamera.hideFlags & HideFlags.NotEditable) != 0 && _lastCamera.targetTexture != null)
                        {
                            sourceTexture = _lastCamera.targetTexture;
                        }
#endif
                        if (sourceTexture == _target)
                        {
                            // Render the camera(s)
                            if (!IsUsingMotionBlur())
                            {
                                UpdateTexture();
                            }
                            else
                            {
                                // Just grab the last result of the motion blur
                                _target.DiscardContents();
                                Graphics.Blit(_motionBlur.FinalTexture, _target);
                            }
                        }

                        // If the texture isn't suitable then blit it to the Rendertexture so the native plugin can grab it
                        if (RequiresResolve(sourceTexture))
                        {
                            CreateResolveTexture(sourceTexture.width, sourceTexture.height);
                            _resolveTexture.DiscardContents();
                            Graphics.Blit(sourceTexture, _resolveTexture);
                            sourceTexture = _resolveTexture;
                        }

                        if (_supportTextureRecreate || _targetNativeTexture != sourceTexture)
                        {
                            // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                            // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                            _targetNativePointer = sourceTexture.GetNativeTexturePtr();
                            _targetNativeTexture = sourceTexture;
                        }

                        _previewTexture = sourceTexture;

                        NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                        RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);
                        EncodeUnityAudio();

                        UpdateFPS();
                    }
                }
            }
            base.UpdateFrame();

            RenormTimer();

            yield break;
        }
Example #28
0
        // If we're forcing a resolution or AA change then we have to render the camera again to the new target
        // If we try to just set the targetTexture of the camera and grab it in OnRenderImage we can't render it to the screen as before :(
        public override void UpdateFrame()
        {
            TickFrameTimer();

            if (_capturing && !_paused && HasCamera())
            {
                bool canGrab = true;

                if (IsUsingMotionBlur())
                {
                    // If the motion blur is still accumulating, don't grab this frame
                    canGrab = _motionBlur.IsFrameAccumulated;
                }

                if (canGrab)
                {
                    /*while (_handle >= 0 && !AVProMovieCapturePlugin.IsNewFrameDue(_handle))
                     * {
                     *      System.Threading.Thread.Sleep(1);
                     * }*/
                    if (_handle >= 0 && CanOutputFrame())
                    {
                        // Render the camera(s)
                        if (!IsUsingMotionBlur())
                        {
                            // Render a single camera
                            if (!HasContributingCameras())
                            {
                                RenderTexture prev = _lastCamera.targetTexture;
                                // Reset the viewport rect as we're rendering to a texture captures the full viewport
                                Rect             prevRect     = _lastCamera.rect;
                                CameraClearFlags prevClear    = _lastCamera.clearFlags;
                                Color            prevColor    = _lastCamera.backgroundColor;
                                bool             clearChanged = false;
                                if (_lastCamera.clearFlags == CameraClearFlags.Nothing || _lastCamera.clearFlags == CameraClearFlags.Depth)
                                {
                                    clearChanged           = true;
                                    _lastCamera.clearFlags = CameraClearFlags.SolidColor;
                                    if (!_supportAlpha)
                                    {
                                        _lastCamera.backgroundColor = Color.black;
                                    }
                                    else
                                    {
                                        _lastCamera.backgroundColor = new Color(0f, 0f, 0f, 0f);
                                    }
                                }

                                // Render
                                _lastCamera.rect          = new Rect(0f, 0f, 1f, 1f);
                                _lastCamera.targetTexture = _target;
                                _lastCamera.Render();

                                // Restore camera
                                {
                                    _lastCamera.rect = prevRect;
                                    if (clearChanged)
                                    {
                                        _lastCamera.clearFlags      = prevClear;
                                        _lastCamera.backgroundColor = prevColor;
                                    }
                                    _lastCamera.targetTexture = prev;
                                }
                            }
                            // Render the camera chain
                            else
                            {
                                // First render contributing cameras
                                for (int cameraIndex = 0; cameraIndex < _contribCameras.Length; cameraIndex++)
                                {
                                    Camera camera = _contribCameras[cameraIndex];
                                    if (camera != null)
                                    {
                                        RenderTexture prev = camera.targetTexture;
                                        camera.targetTexture = _target;
                                        camera.Render();
                                        camera.targetTexture = prev;
                                    }
                                }
                                // Finally render the last camera
                                if (_lastCamera != null)
                                {
                                    RenderTexture prev = _lastCamera.targetTexture;
                                    _lastCamera.targetTexture = _target;
                                    _lastCamera.Render();
                                    _lastCamera.targetTexture = prev;
                                }
                            }
                        }
                        else
                        {
                            // Just grab the last result of the motion blur
                            Graphics.Blit(_motionBlur.FinalTexture, _target);
                        }

                        if (_supportTextureRecreate)
                        {
                            // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                            // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                            _targetNativePointer = _target.GetNativeTexturePtr();
                        }

                        NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                        RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);

                        if (IsRecordingUnityAudio())
                        {
                            int           audioDataLength = 0;
                            System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                            if (audioDataLength > 0)
                            {
                                NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                            }
                        }

                        UpdateFPS();
                    }
                }
            }
            base.UpdateFrame();

            RenormTimer();
        }
        public override void UpdateFrame()
        {
            TickFrameTimer();

            AccumulateMotionBlur();

            if (_capturing && !_paused)
            {
                if (_cubeTarget != null && _camera != null)
                {
                    bool canGrab = true;

                    if (IsUsingMotionBlur())
                    {
                        // TODO: fix motion blur
                        //this._motionBlur.RenderImage()
                        // If the motion blur is still accumulating, don't grab this frame
                        canGrab = _motionBlur.IsFrameAccumulated;
                    }

                    if (canGrab && CanOutputFrame())
                    {
                        if (IsRecordingUnityAudio())
                        {
                            int           audioDataLength = 0;
                            System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                            if (audioDataLength > 0)
                            {
                                NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                            }
                        }

                        RenderTexture finalTexture = _finalTarget;
                        if (!IsUsingMotionBlur())
                        {
                            UpdateTexture();
                        }
                        else
                        {
                            finalTexture = _motionBlur.FinalTexture;
                        }

                        if (_targetNativePointer == System.IntPtr.Zero || _supportTextureRecreate)
                        {
                            // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                            // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                            _targetNativePointer = finalTexture.GetNativeTexturePtr();
                        }

                        NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                        RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);
                        GL.InvalidateState();

                        UpdateFPS();
                    }
                }
            }

            base.UpdateFrame();

            RenormTimer();
        }
Example #30
0
        private void Capture()
        {
            TickFrameTimer();

            AccumulateMotionBlur();

            if (ShouldCaptureFrame())
            {
                bool hasSourceTextureChanged = HasSourceTextureChanged();

                // If motion blur is enabled, wait until all frames are accumulated
                if (IsUsingMotionBlur())
                {
                    // If the motion blur is still accumulating, don't grab this frame
                    hasSourceTextureChanged = _motionBlur.IsFrameAccumulated;
                }

                _isSourceTextureChanged = false;
                if (hasSourceTextureChanged)
                {
                    if ((_manualUpdate /*&& NativePlugin.IsNewFrameDue(_handle)*/) || CanOutputFrame())
                    {
                        // If motion blur is enabled, use the motion blur result
                        Texture sourceTexture = _sourceTexture;
                        if (IsUsingMotionBlur())
                        {
                            sourceTexture = _motionBlur.FinalTexture;
                        }

                        // If the texture isn't a RenderTexture then blit it to the Rendertexture so the native plugin can grab it
                        if (!(sourceTexture is RenderTexture))
                        {
                            _renderTexture.DiscardContents();
                            Graphics.Blit(sourceTexture, _renderTexture);
                            sourceTexture = _renderTexture;
                        }

                        if (_targetNativePointer == System.IntPtr.Zero || _supportTextureRecreate)
                        {
                            // NOTE: If support for captures to survive through alt-tab events, or window resizes where the GPU resources are recreated
                            // is required, then this line is needed.  It is very expensive though as it does a sync with the rendering thread.
                            _targetNativePointer = sourceTexture.GetNativeTexturePtr();
                        }

                        NativePlugin.SetTexturePointer(_handle, _targetNativePointer);

                        RenderThreadEvent(NativePlugin.PluginEvent.CaptureFrameBuffer);

                        if (!IsUsingMotionBlur())
                        {
                            _isSourceTextureChanged = false;
                        }

                        // Handle audio from Unity
                        if (IsRecordingUnityAudio())
                        {
                            int           audioDataLength = 0;
                            System.IntPtr audioDataPtr    = _audioCapture.ReadData(out audioDataLength);
                            if (audioDataLength > 0)
                            {
                                NativePlugin.EncodeAudio(_handle, audioDataPtr, (uint)audioDataLength);
                            }
                        }

                        UpdateFPS();
                    }
                }
            }

            RenormTimer();
        }