Esempio n. 1
0
        public override void RecordFrame(RecordingSession session)
        {
            if (m_Inputs.Count != 2)
            {
                throw new Exception("Unsupported number of sources");
            }

            int width;
            int height;

            if (m_Inputs[0] is ScreenCaptureInput)
            {
                var input = (ScreenCaptureInput)m_Inputs[0];
                width  = input.outputWidth;
                height = input.outputHeight;
                m_Encoder.AddFrame(input.image);
            }
            else
            {
                var input = (BaseRenderTextureInput)m_Inputs[0];
                width  = input.outputWidth;
                height = input.outputHeight;

                if (!m_ReadBackTexture)
                {
                    m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false);
                }
                var backupActive = RenderTexture.active;
                RenderTexture.active = input.outputRT;
                m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false);
                m_Encoder.AddFrame(m_ReadBackTexture);
                RenderTexture.active = backupActive;
            }

            var audioInput = (AudioInput)m_Inputs[1];

            if (!audioInput.audioSettings.m_PreserveAudio)
            {
                return;
            }

#if RECORD_AUDIO_MIXERS
            for (int n = 0; n < m_WavWriters.Length; n++)
            {
                if (m_WavWriters[n] != null)
                {
                    m_WavWriters[n].Feed(audioInput.mixerGroupAudioBuffer(n));
                }
            }
#endif

            m_Encoder.AddSamples(audioInput.mainBuffer);
        }
Esempio n. 2
0
        /**
         * Update the webcam texture.
         * Aspect Ratio is calculated, to fit the texture onto the canvas
         */

        private void Update()
        {
            if (!_camAvailable)
            {
                return;
            }
            float ration = (float)_frontCam.width / (float)_frontCam.height;

            fit.aspectRatio = ration;

            float scaleY = _frontCam.videoVerticallyMirrored ? -1f : 1f;

            background.rectTransform.localScale = new Vector3(1f, scaleY, 1f);

            int orient = -_frontCam.videoRotationAngle;

            background.rectTransform.localEulerAngles = new Vector3(0, 0, orient);

            if (recording)
            {
                _currentTexture.SetPixels(_frontCam.GetPixels());
                //_mediaEncoder.AddFrame(_currentTexture,Time.deltaTime);
                _mediaEncoder.AddFrame(_currentTexture);
            }
        }
        /// <summary>
        /// Adds col x row frame to a MediaEncoder. Nonfunctional before Unity 2017.3
        /// </summary>
        /// <param name="frame">A frame to append to a media file</param>
        private void AddFrameToEncoder(Texture2D frame)
        {
#if UNITY_EDITOR && UNITY_2017_3_OR_NEWER
            if (encoder == null || !isActive)
            {
                BeginRecording();
            }
            encoder.AddFrame(frame);
#else
            throw new System.NotSupportedException("AddFrameToEncoder is not supported on " + UnityEngine.Application.version.ToString());
#endif
        }
Esempio n. 4
0
        public override void RecordFrame(RecordingSession session)
        {
            if (m_Inputs.Count != 2)
            {
                throw new Exception("Unsupported number of sources");
            }

            if (m_Inputs[0] is GameViewInput)
            {
                var input = (GameViewInput)m_Inputs[0];
                m_Encoder.AddFrame(input.image);
            }
            else
            {
                var input  = (BaseRenderTextureInput)m_Inputs[0];
                var width  = input.outputWidth;
                var height = input.outputHeight;

                if (!m_ReadBackTexture)
                {
                    m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false);
                }
                var backupActive = RenderTexture.active;
                RenderTexture.active = input.outputRT;
                m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false);
                m_Encoder.AddFrame(m_ReadBackTexture);
                RenderTexture.active = backupActive;
            }

            var audioInput = (AudioInput)m_Inputs[1];

            if (!audioInput.audioSettings.preserveAudio)
            {
                return;
            }

            m_Encoder.AddSamples(audioInput.mainBuffer);
        }
        IEnumerator OnPostRender()
        {
            if (isRecording && encoder != null)
            {
                yield return(new WaitForEndOfFrame());

                VimeoRecorder.CaptureLock(renderBuffer, (data) => {
                    encoder.AddFrame(data);
                });

                // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame.
                // ...
                //encoder.AddSamples(audioBuffer);
            }
        }
Esempio n. 6
0
        public override void RecordFrame(RecordingSession session)
        {
            if (m_Inputs.Count != 2)
            {
                throw new Exception("Unsupported number of sources");
            }

            var textureInput = (BaseRenderTextureInput)m_Inputs[0];
            var width        = textureInput.outputWidth;
            var height       = textureInput.outputHeight;

            if (Verbose.enabled)
            {
                Debug.Log(string.Format("MovieRecorder.RecordFrame {0} x {1} (wanted: {2} x {3})",
                                        textureInput.outputRT.width, textureInput.outputRT.height,
                                        width, height));
            }

            if (!m_ReadBackTexture)
            {
                m_ReadBackTexture = new Texture2D(width, height, TextureFormat.RGBA32, false);
            }
            var backupActive = RenderTexture.active;

            RenderTexture.active = textureInput.outputRT;
            m_ReadBackTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0, false);
            m_Encoder.AddFrame(m_ReadBackTexture);
            RenderTexture.active = backupActive;

            var audioInput = (AudioInput)m_Inputs[1];

            if (!audioInput.audioSettings.m_PreserveAudio)
            {
                return;
            }

#if RECORD_AUDIO_MIXERS
            for (int n = 0; n < m_WavWriters.Length; n++)
            {
                if (m_WavWriters[n] != null)
                {
                    m_WavWriters[n].Feed(audioInput.mixerGroupAudioBuffer(n));
                }
            }
#endif

            m_Encoder.AddSamples(audioInput.mainBuffer);
        }
Esempio n. 7
0
 public void MakeVideo()
 {
     float[] soundArray;
     sounds.GetData(soundArray = new float[sounds.samples], 0);
     using (var encoder = new MediaEncoder(encodedFilePath, videoAttrs, audioAttrs))
         using (var audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp))
         {
             sounds.GetData(audioBuffer.ToArray(), 0);
             for (int i = 0; i < images.Count; ++i)
             {
                 //tex.SetPixels(images[i].GetPixels());
                 encoder.AddFrame(images[i]);
                 encoder.AddSamples(audioBuffer);
             }
         }
     Microphone.End("");
 }
    static public void EncodeVideo()
    {
        Debug.Log("Enconde Start");
        VideoInfo            info      = VideoInfo.videoInfo;
        VideoTrackAttributes videoAttr = new VideoTrackAttributes
        {
            frameRate    = new MediaRational(info.FrameRate),
            width        = info.width,
            height       = info.height,
            includeAlpha = info.includeAlpha
        };

        AudioTrackAttributes audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };

        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4");

        Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false);

        using (MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr))
            using (NativeArray <float> audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp))
            {
                foreach (Texture2D _tex in info.TexList)
                {
                    encoder.AddFrame(_tex);
                }
                //for (int i = 0; i < info.TotalFrameCount; ++i)
                //{
                //    // Fill 'tex' with the video content to be encoded into the file for this frame.
                //    // ...



                //    // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame.
                //    // ...
                //    encoder.AddSamples(audioBuffer);
                //}
            }
    }
Esempio n. 9
0
        public void AddFrame()
        {
            if (encoder != null && isRecording)
            {
                encoder.AddFrame(videoInput.GetFrame());
                videoInput.EndFrame();

#if UNITY_2018_1_OR_NEWER
                if (recorder.recordAudio)
                {
                    audioInput.StartFrame();
                    encoder.AddSamples(audioInput.GetBuffer());
                    audioInput.EndFrame();
                }
#endif

                currentFrame++;
            }
        }
Esempio n. 10
0
    private void Update()
    {
        if (Input.GetKeyDown(KeyCode.R))
        {
            if (isRecording)
            {
                EndRecord();
            }
            else
            {
                StartRecord();
            }
        }

        if (textures.Count > 0)
        {
            mediaEncoder.AddFrame(textures.Dequeue());
        }
    }
Esempio n. 11
0
        IEnumerator RecordFrame()
        {
            yield return(new WaitForEndOfFrame());

            if (encoder != null && isRecording)
            {
                if (recorder.recordAudio)
                {
                    audioInput.StartFrame();
                }

                encoder.AddFrame(videoInput.GetFrame());
                videoInput.EndFrame();

                if (recorder.recordAudio)
                {
                    encoder.AddSamples(audioInput.GetBuffer());
                    audioInput.EndFrame();
                }
            }
        }
        IEnumerator CreateVideo(string path, List <byte[]> textureList, string chara, Text progressTxt)
        {
            progressTxt.gameObject.SetActive(true);
            VideoTrackAttributes videoAttributes = new VideoTrackAttributes {
                frameRate    = new MediaRational(90),
                width        = 320,
                height       = 240,
                includeAlpha = false
            };

            string filePath = Path.Combine(path, "eye_vid" + chara + ".mp4");

            using (MediaEncoder encoder = new MediaEncoder(filePath, videoAttributes)) {
                for (int i = 0; i < textureList.Count; i++)
                {
                    Texture2D tex = new Texture2D(320, 240, TextureFormat.RGBA32, false);
                    tex.LoadImage(textureList[i]);
                    Texture2D newTex = new Texture2D(320, 240, TextureFormat.RGBA32, false);
                    newTex.SetPixels(tex.GetPixels());
                    newTex.Apply();
                    encoder.AddFrame(newTex);
                    if (i % 100 == 0)
                    {
                        Resources.UnloadUnusedAssets();
                        System.GC.Collect();
                    }
                    if (i % 10 == 0)
                    {
                        progressTxt.text = "Writing " + chara + " : " + System.Math.Round(((float)i / (float)textureList.Count) * 100, 2) + "%";
                    }
                    yield return(new WaitForEndOfFrame());
                }
                encoder.Dispose();
            }
            Debug.Log("Finished!");
            textureList.Clear();
            Resources.UnloadUnusedAssets();
            System.GC.Collect();
            progressTxt.gameObject.SetActive(false);
        }
Esempio n. 13
0
    static public void RecordMovie()
    {
        var videoAttr = new VideoTrackAttributes
        {
            frameRate    = new MediaRational(50),
            width        = 320,
            height       = 200,
            includeAlpha = false
        };

        var audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };

        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        var encodedFilePath = Path.Combine(Path.GetTempPath(), "my_movie.mp4");

        Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false);

        using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr))
            using (var audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp))
            {
                for (int i = 0; i < 100; ++i)
                {
                    // Fill 'tex' with the video content to be encoded into the file for this frame.
                    // ...
                    encoder.AddFrame(tex);

                    // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame.
                    // ...
                    encoder.AddSamples(audioBuffer);
                }
            }
    }
Esempio n. 14
0
    private IEnumerator recordCoroutine()
    {
        var videoAttr = new VideoTrackAttributes {
            frameRate    = new MediaRational(FramesPerSecond),
            width        = Tic80Config.WIDTH,
            height       = Tic80Config.HEIGHT,
            includeAlpha = false
        };

        var audioAttr = new AudioTrackAttributes {
            sampleRate   = new MediaRational(SoundFreq),
            channelCount = 2,
            language     = "en"
        };

        int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        var   tex       = View.Instance.GetScreenTexture();
        var   frames    = RecordTime * FramesPerSecond;
        float deltaTime = (float)1 / FramesPerSecond;

        AudioRenderer.Start();

        using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr))
            using (var audioBuffer = new NativeArray <float> (sampleFramesPerVideoFrame, Allocator.Persistent)) {
                for (int i = 0; i < frames; ++i)
                {
                    encoder.AddFrame(tex);
                    AudioRenderer.Render(audioBuffer);
                    encoder.AddSamples(audioBuffer);
                    yield return(deltaTime);
                }
            }
        AudioRenderer.Stop();
        Debug.Log("Video saved to: " + encodedFilePath);
        EditorUtility.RevealInFinder(encodedFilePath);
    }
Esempio n. 15
0
 protected override void WriteFrame(Texture2D t)
 {
     m_Encoder.AddFrame(t);
 }
    void OnApplicationQuit()
    {
        if (!Record)
        {
            return;
        }

        if (!Directory.Exists(MoviePath))
        {
            Debug.LogError(MoviePath + " does not exist. Cannot save output");
            return;
        }

        Debug.Log("Saving recorded frames to disk...");

        string filePathMP4 = "";
        string filePathDir = "";

        for (int i = 0; i < 9999; ++i)
        {
            filePathMP4 = Path.Combine(MoviePath, "capture_" + i.ToString("0000") + ".mp4");
            filePathDir = Path.Combine(MoviePath, "capture_" + i.ToString("0000"));
            if (!File.Exists(filePathMP4) && !Directory.Exists(filePathDir))
            {
                break;
            }
        }

        var textures = new List <Texture2D>();

        foreach (var frame in m_Frames)
        {
            RenderTexture.active = frame;
            var tex = new Texture2D(Width, Height, TextureFormat.RGBA32, false);
            tex.ReadPixels(new Rect(0, 0, Width, Height), 0, 0);
            tex.Apply();
            textures.Add(tex);
        }
        RenderTexture.active = null;

#if UNITY_EDITOR
        if (MakeMP4InEditor)
        {
            VideoTrackAttributes videoAttr = new VideoTrackAttributes
            {
                frameRate    = new MediaRational((int)(GenlockRate + 0.5)),
                width        = (uint)Width,
                height       = (uint)Height,
                includeAlpha = false,
                bitRateMode  = UnityEditor.VideoBitrateMode.High
            };
            using (var encoder = new MediaEncoder(filePathMP4, videoAttr))
                foreach (var tex in textures)
                {
                    encoder.AddFrame(tex);
                }
            Debug.Log("Recorded " + m_Frames.Count + " frames to " + filePathMP4);
        }
        else
#else
        if (MakeMP4InEditor)
        {
            Debug.Log("Cannot encode MP4 outside of Editor");
        }
#endif
        {
            int f = 0;
            Directory.CreateDirectory(filePathDir);
            foreach (var tex in textures)
            {
                byte[] bytes = tex.EncodeToJPG();
                File.WriteAllBytes(Path.Combine(filePathDir, "frame_" + (f++).ToString("0000") + ".jpg"), bytes);
            }
            Debug.Log("Recorded " + m_Frames.Count + " frames to " + Path.Combine(filePathDir, "frame_XXXX.jpg"));
        }
    }
Esempio n. 17
0
    IEnumerator playVideo()
    {
        Debug.Log(Application.dataPath);
        rend = GetComponent <Renderer>();

        videoPlayer = gameObject.AddComponent <VideoPlayer>();
        audioSource = gameObject.AddComponent <AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source          = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            yield return(null);
        }

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        videoPlayer.sendFrameReadyEvents = true;

        videoPlayer.frameReady += OnNewFrame;

        videoPlayer.Play();

        audioSource.Play();

        Debug.Log("Playing Video");

        while (texList.Count != (int)videoPlayer.frameCount)
        {
            yield return(null);
        }
        Debug.Log("Done Playing Video");

        ///////////////////////////////////////////
        ////
        ///

        // 왜곡이미지 처리///

        ///////////////////////////////////////////

        VideoTrackAttributes videoAttr = new VideoTrackAttributes
        {
            frameRate = new MediaRational((int)videoPlayer.frameRate),
            width     = videoPlayer.width,
            height    = videoPlayer.height,
        };

        AudioTrackAttributes audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };


        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        // 동영상 생성 경로
        string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4");

        MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr);

        for (int i = 0; i < texList.Count; ++i)
        {
            Debug.Log("Encoding tex num " + (i + 1) + " / " + texList.Count);
            encoder.AddFrame(texList[i]);
            yield return(null);
        }
        encoder.Dispose();

        Debug.Log("Convert To Video Complete");
    }
    IEnumerator playVideo()
    {
        rend = GetComponent <MeshRenderer>();

        videoPlayer = gameObject.AddComponent <VideoPlayer>();
        audioSource = gameObject.AddComponent <AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source          = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            yield return(null);
        }

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        videoPlayer.sendFrameReadyEvents = true;

        videoPlayer.frameReady += OnNewFrame;

        videoPlayer.Play();

        audioSource.Play();


        while (videoPlayer.isPlaying)
        {
            Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
            Debug.Log("Playing Video");
            yield return(null);
        }
        //while (texList.Count != (int)videoPlayer.frameCount)
        //{
        //yield return null;
        //}
        //int i = 0;
        //foreach(Texture2D tex in texList)
        //{
        //    i++;
        //    if(i > 447 && i < 765)
        //    {
        //        byte[] bytes;
        //        bytes = tex.EncodeToPNG();

        //        string filePath = Application.dataPath + "/Resources/ConvertImages/";
        //        string fileName = filePath + i.ToString() + ".png";

        //        System.IO.File.WriteAllBytes(fileName, bytes);
        //        AssetDatabase.ImportAsset(fileName);
        //        yield return null;
        //    }

        //}



        Debug.Log("Done Playing Video");



        ///////////////////////////////////////////
        ////
        ///

        // 왜곡이미지 처리///

        ///////////////////////////////////////////

        VideoTrackAttributes videoAttr = new VideoTrackAttributes
        {
            frameRate = new MediaRational((int)videoPlayer.frameRate),
            width     = videoPlayer.width,
            height    = videoPlayer.height,
        };

        AudioTrackAttributes audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };


        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        // 동영상 생성 경로
        string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4");

        MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr);

        for (int i = 0; i < texList.Count; ++i)
        {
            Debug.Log("Encoding tex num " + (i + 1) + " / " + texList.Count);
            encoder.AddFrame(texList[i]);
            yield return(null);
        }
        encoder.Dispose();

        Debug.Log("Convert To Video Complete");
    }