Пример #1
0
    public void StartRecord()
    {
        var videoAttr = new VideoTrackAttributes
        {
            frameRate    = new MediaRational(30),
            width        = (uint)recordTexture.width,
            height       = (uint)recordTexture.height,
            includeAlpha = false
        };

        var audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "jp"
        };

        var time = DateTime.Now;

        var encodedFilePath = Path.Combine(Path.GetTempPath(), time.Year.ToString() + time.Month.ToString() + time.Day.ToString() + time.Hour.ToString() + time.Minute.ToString() + time.Second.ToString() + ".mp4");

        Debug.Log(encodedFilePath);

        mediaEncoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr);
        isRecording  = true;
        StartCoroutine(Record());
    }
Пример #2
0
        private void BeginMediaEncoderRecording()
        {
            InitInputs();

            if (recorder.realTime)
            {
                Application.targetFrameRate = recorder.frameRate;
            }
            else
            {
                Time.captureFramerate = recorder.frameRate;
            }

            // Configure encoder
            AudioSpeakerMode speakerMode = AudioSettings.speakerMode;

            audioAttrs = new AudioTrackAttributes
            {
                sampleRate = new MediaRational
                {
                    numerator   = AudioSettings.outputSampleRate,
                    denominator = 1
                },
                channelCount = (ushort)speakerMode,
                language     = ""
            };

            videoInput.BeginRecording();

            videoAttrs = new VideoTrackAttributes
            {
                frameRate    = new MediaRational(recorder.frameRate),
                width        = (uint)videoInput.outputWidth,
                height       = (uint)videoInput.outputHeight,
                includeAlpha = false
            };

            encodedFilePath = Path.Combine(outputPath, GetFileName());
            Debug.Log("[VimeoRecorder] Recording to " + GetFileName());

            if (!recorder.realTime)
            {
                recorder.recordAudio = false;
            }

            if (recorder.recordAudio)
            {
#if UNITY_2018_1_OR_NEWER
                audioInput.BeginRecording();
                encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs, audioAttrs);
#else
                encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs);
#endif
            }
            else
            {
                encoder = new UnityEditor.Media.MediaEncoder(encodedFilePath, videoAttrs);
            }
        }
Пример #3
0
        protected internal override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            try
            {
                Settings.fileNameGenerator.CreateDirectory(session);
            }
            catch (Exception)
            {
                ConsoleLogMessage($"Unable to create the output directory \"{Settings.fileNameGenerator.BuildAbsolutePath(session)}\".", LogType.Error);
                Recording = false;
                return(false);
            }

            var audioInput     = (AudioInput)m_Inputs[0];
            var audioAttrsList = new List <AudioTrackAttributes>();

            if (audioInput.audioSettings.PreserveAudio)
            {
                var audioAttrs = new AudioTrackAttributes
                {
                    sampleRate = new MediaRational
                    {
                        numerator   = audioInput.sampleRate,
                        denominator = 1
                    },
                    channelCount = audioInput.channelCount,
                    language     = ""
                };

                audioAttrsList.Add(audioAttrs);

                if (RecorderOptions.VerboseMode)
                {
                    ConsoleLogMessage($"Audio starting to write audio {audioAttrs.channelCount}ch @ {audioAttrs.sampleRate.numerator}Hz", LogType.Log);
                }
            }

            try
            {
                var path = Settings.fileNameGenerator.BuildAbsolutePath(session);
                m_Encoder = new WavEncoder(path);

                return(true);
            }
            catch (Exception ex)
            {
                if (RecorderOptions.VerboseMode)
                {
                    ConsoleLogMessage($"Unable to create encoder: '{ex.Message}'", LogType.Error);
                }
            }

            return(false);
        }
Пример #4
0
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            try
            {
                m_Settings.fileNameGenerator.CreateDirectory(session);
            }
            catch (Exception)
            {
                Debug.LogError(string.Format("Audio recorder output directory \"{0}\" could not be created.", m_Settings.fileNameGenerator.BuildAbsolutePath(session)));
                return(false);
            }

            var audioInput     = (AudioInput)m_Inputs[0];
            var audioAttrsList = new List <AudioTrackAttributes>();

            if (audioInput.audioSettings.preserveAudio)
            {
                var audioAttrs = new AudioTrackAttributes
                {
                    sampleRate = new MediaRational
                    {
                        numerator   = audioInput.sampleRate,
                        denominator = 1
                    },
                    channelCount = audioInput.channelCount,
                    language     = ""
                };

                audioAttrsList.Add(audioAttrs);

                if (Options.verboseMode)
                {
                    Debug.Log(string.Format("Audio starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator));
                }
            }

            try
            {
                var path = m_Settings.fileNameGenerator.BuildAbsolutePath(session);
                m_Encoder = new WavEncoder(path);

                return(true);
            }
            catch
            {
                if (Options.verboseMode)
                {
                    Debug.LogError("AudioRecorder unable to create MovieEncoder.");
                }
            }

            return(false);
        }
Пример #5
0
    static public void EncodeVideo()
    {
        Debug.Log("Enconde Start");
        VideoInfo            info      = VideoInfo.videoInfo;
        VideoTrackAttributes videoAttr = new VideoTrackAttributes
        {
            frameRate    = new MediaRational(info.FrameRate),
            width        = info.width,
            height       = info.height,
            includeAlpha = info.includeAlpha
        };

        AudioTrackAttributes audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };

        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4");

        Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false);

        using (MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr))
            using (NativeArray <float> audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp))
            {
                foreach (Texture2D _tex in info.TexList)
                {
                    encoder.AddFrame(_tex);
                }
                //for (int i = 0; i < info.TotalFrameCount; ++i)
                //{
                //    // Fill 'tex' with the video content to be encoded into the file for this frame.
                //    // ...



                //    // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame.
                //    // ...
                //    encoder.AddSamples(audioBuffer);
                //}
            }
    }
Пример #6
0
    static public void RecordMovie()
    {
        var videoAttr = new VideoTrackAttributes
        {
            frameRate    = new MediaRational(50),
            width        = 320,
            height       = 200,
            includeAlpha = false
        };

        var audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };

        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        var encodedFilePath = Path.Combine(Path.GetTempPath(), "my_movie.mp4");

        Texture2D tex = new Texture2D((int)videoAttr.width, (int)videoAttr.height, TextureFormat.RGBA32, false);

        using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr))
            using (var audioBuffer = new NativeArray <float>(sampleFramesPerVideoFrame, Allocator.Temp))
            {
                for (int i = 0; i < 100; ++i)
                {
                    // Fill 'tex' with the video content to be encoded into the file for this frame.
                    // ...
                    encoder.AddFrame(tex);

                    // Fill 'audioBuffer' with the audio content to be encoded into the file for this frame.
                    // ...
                    encoder.AddSamples(audioBuffer);
                }
            }
    }
Пример #7
0
    private IEnumerator recordCoroutine()
    {
        var videoAttr = new VideoTrackAttributes {
            frameRate    = new MediaRational(FramesPerSecond),
            width        = Tic80Config.WIDTH,
            height       = Tic80Config.HEIGHT,
            includeAlpha = false
        };

        var audioAttr = new AudioTrackAttributes {
            sampleRate   = new MediaRational(SoundFreq),
            channelCount = 2,
            language     = "en"
        };

        int sampleFramesPerVideoFrame = audioAttr.channelCount * audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        var   tex       = View.Instance.GetScreenTexture();
        var   frames    = RecordTime * FramesPerSecond;
        float deltaTime = (float)1 / FramesPerSecond;

        AudioRenderer.Start();

        using (var encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr))
            using (var audioBuffer = new NativeArray <float> (sampleFramesPerVideoFrame, Allocator.Persistent)) {
                for (int i = 0; i < frames; ++i)
                {
                    encoder.AddFrame(tex);
                    AudioRenderer.Render(audioBuffer);
                    encoder.AddSamples(audioBuffer);
                    yield return(deltaTime);
                }
            }
        AudioRenderer.Stop();
        Debug.Log("Video saved to: " + encodedFilePath);
        EditorUtility.RevealInFinder(encodedFilePath);
    }
Пример #8
0
 public AudioTrackMediaEncoderAttribute(string pname, AudioTrackAttributes aAttr)
 {
     name  = pname;
     Value = aAttr;
 }
Пример #9
0
 public void Construct(MediaEncoderHandle handle, string path, VideoTrackAttributes vAttr, AudioTrackAttributes aAttr)
 {
     DisposeCheck(handle);
     m_Encoders[handle.m_VersionHandle.Index].m_encoderInterface.Construct(path, vAttr, aAttr);
 }
Пример #10
0
        public override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            try
            {
                m_Settings.fileNameGenerator.CreateDirectory(session);
            }
            catch (Exception)
            {
                Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", m_Settings.fileNameGenerator.BuildAbsolutePath(session)));
                return(false);
            }

            var input = m_Inputs[0] as BaseRenderTextureInput;

            if (input == null)
            {
                Debug.LogError("MediaRecorder could not find input.");
                return(false);
            }
            int width  = input.outputWidth;
            int height = input.outputHeight;

            if (width <= 0 || height <= 0)
            {
                Debug.LogError(string.Format("MovieRecorder got invalid input resolution {0} x {1}.", width, height));
                return(false);
            }

            if (m_Settings.outputFormat == VideoRecorderOutputFormat.MP4)
            {
                if (width > 4096 || height > 4096)
                {
                    Debug.LogWarning(string.Format("Mp4 format might not support resolutions bigger than 4096. Current resolution: {0} x {1}.", width, height));
                }

                if (width % 2 != 0 || height % 2 != 0)
                {
                    Debug.LogError(string.Format("Mp4 format does not support odd values in resolution. Current resolution: {0} x {1}.", width, height));
                    return(false);
                }
            }

            var imageInputSettings = m_Inputs[0].settings as ImageInputSettings;

            var includeAlphaFromTexture = imageInputSettings != null && imageInputSettings.supportsTransparent && imageInputSettings.allowTransparency;

            if (includeAlphaFromTexture && m_Settings.outputFormat == VideoRecorderOutputFormat.MP4)
            {
                Debug.LogWarning("Mp4 format does not support alpha.");
                includeAlphaFromTexture = false;
            }

            var videoAttrs = new VideoTrackAttributes
            {
                frameRate    = RationalFromDouble(session.settings.frameRate),
                width        = (uint)width,
                height       = (uint)height,
                includeAlpha = includeAlphaFromTexture,
                bitRateMode  = m_Settings.videoBitRateMode
            };

            if (Options.verboseMode)
            {
                Debug.Log(
                    string.Format(
                        "MovieRecorder starting to write video {0}x{1}@[{2}/{3}] fps into {4}",
                        width, height, videoAttrs.frameRate.numerator,
                        videoAttrs.frameRate.denominator, m_Settings.fileNameGenerator.BuildAbsolutePath(session)));
            }

            var audioInput     = (AudioInput)m_Inputs[1];
            var audioAttrsList = new List <AudioTrackAttributes>();

            if (audioInput.audioSettings.preserveAudio)
            {
                var audioAttrs = new AudioTrackAttributes
                {
                    sampleRate = new MediaRational
                    {
                        numerator   = audioInput.sampleRate,
                        denominator = 1
                    },
                    channelCount = audioInput.channelCount,
                    language     = ""
                };

                audioAttrsList.Add(audioAttrs);

                if (Options.verboseMode)
                {
                    Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator));
                }
            }
            else
            {
                if (Options.verboseMode)
                {
                    Debug.Log("MovieRecorder starting with no audio.");
                }
            }

            try
            {
                var path = m_Settings.fileNameGenerator.BuildAbsolutePath(session);

                m_Encoder = new MediaEncoder(path, videoAttrs, audioAttrsList.ToArray());
                return(true);
            }
            catch
            {
                if (Options.verboseMode)
                {
                    Debug.LogError("MovieRecorder unable to create MovieEncoder.");
                }
            }

            return(false);
        }
 public void Construct(string path, VideoTrackAttributes vAttr, AudioTrackAttributes aAttr)
 {
     Construct(path, vAttr, new[] { aAttr });
 }
Пример #12
0
        public void BeginRecording()
        {
            Debug.Log("VimeoRecorder: BeginRecording()");
            isRecording = true;

            _camera         = GetComponent <Camera>();
            encodedFilePath = Path.Combine(outputPath, "test-recording.mp4");

            Debug.Log(encodedFilePath);

            // Setup shader/material/quad
            if (shaderCopy == null)
            {
                shaderCopy = Shader.Find("Hidden/FrameRecorder/CopyFrameBuffer");
            }

            if (matCopy == null)
            {
                matCopy = new Material(shaderCopy);
            }

            if (fullscreenQuad == null)
            {
                fullscreenQuad = VimeoRecorder.CreateFullscreenQuad();
            }

            // Get Camera data and prepare to send to buffer
            int captureWidth  = (_camera.pixelWidth + 1) & ~1;
            int captureHeight = (_camera.pixelHeight + 1) & ~1;

            renderBuffer          = new RenderTexture(captureWidth, captureHeight, 0);
            renderBuffer.wrapMode = TextureWrapMode.Repeat;
            renderBuffer.Create();

            Debug.Log("WxH: " + captureWidth + "x" + captureHeight);

            // Configure encoder
            videoAttrs = new VideoTrackAttributes
            {
                frameRate    = new MediaRational(40),
                width        = (uint)captureWidth,
                height       = (uint)captureHeight,
                includeAlpha = false
            };

            audioAttrs = new AudioTrackAttributes
            {
                sampleRate   = new MediaRational(48000),
                channelCount = 2,
                language     = "en"
            };

            encoder = new MediaEncoder(encodedFilePath, videoAttrs, audioAttrs);

            //sampleFramesPerVideoFrame = audioAttrs.channelCount * audioAttrs.sampleRate.numerator / videoAttrs.frameRate.numerator;
            //audioBuffer = new NativeArray<float>(sampleFramesPerVideoFrame, Allocator.Temp);

            // Setup the command buffer
            // TODO: Support RenderTexture
            int tid = Shader.PropertyToID("_TmpFrameBuffer");

            commandBuffer      = new CommandBuffer();
            commandBuffer.name = "VimeoRecorder: copy frame buffer";

            commandBuffer.GetTemporaryRT(tid, -1, -1, 0, FilterMode.Bilinear);
            commandBuffer.Blit(BuiltinRenderTextureType.CurrentActive, tid);
            commandBuffer.SetRenderTarget(renderBuffer);
            commandBuffer.DrawMesh(fullscreenQuad, Matrix4x4.identity, matCopy, 0, 0);
            commandBuffer.ReleaseTemporaryRT(tid);

            _camera.AddCommandBuffer(CameraEvent.AfterEverything, commandBuffer);
        }
    IEnumerator playVideo()
    {
        rend = GetComponent <MeshRenderer>();

        videoPlayer = gameObject.AddComponent <VideoPlayer>();
        audioSource = gameObject.AddComponent <AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source          = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            yield return(null);
        }

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        videoPlayer.sendFrameReadyEvents = true;

        videoPlayer.frameReady += OnNewFrame;

        videoPlayer.Play();

        audioSource.Play();


        while (videoPlayer.isPlaying)
        {
            Debug.LogWarning("Video Time: " + Mathf.FloorToInt((float)videoPlayer.time));
            Debug.Log("Playing Video");
            yield return(null);
        }
        //while (texList.Count != (int)videoPlayer.frameCount)
        //{
        //yield return null;
        //}
        //int i = 0;
        //foreach(Texture2D tex in texList)
        //{
        //    i++;
        //    if(i > 447 && i < 765)
        //    {
        //        byte[] bytes;
        //        bytes = tex.EncodeToPNG();

        //        string filePath = Application.dataPath + "/Resources/ConvertImages/";
        //        string fileName = filePath + i.ToString() + ".png";

        //        System.IO.File.WriteAllBytes(fileName, bytes);
        //        AssetDatabase.ImportAsset(fileName);
        //        yield return null;
        //    }

        //}



        Debug.Log("Done Playing Video");



        ///////////////////////////////////////////
        ////
        ///

        // 왜곡이미지 처리///

        ///////////////////////////////////////////

        VideoTrackAttributes videoAttr = new VideoTrackAttributes
        {
            frameRate = new MediaRational((int)videoPlayer.frameRate),
            width     = videoPlayer.width,
            height    = videoPlayer.height,
        };

        AudioTrackAttributes audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };


        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        // 동영상 생성 경로
        string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4");

        MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr);

        for (int i = 0; i < texList.Count; ++i)
        {
            Debug.Log("Encoding tex num " + (i + 1) + " / " + texList.Count);
            encoder.AddFrame(texList[i]);
            yield return(null);
        }
        encoder.Dispose();

        Debug.Log("Convert To Video Complete");
    }
        protected internal override bool BeginRecording(RecordingSession session)
        {
            m_RecordingStartedProperly = false;
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            try
            {
                Settings.fileNameGenerator.CreateDirectory(session);
            }
            catch (Exception)
            {
                Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", Settings.fileNameGenerator.BuildAbsolutePath(session)));
                return(false);
            }

            var input = m_Inputs[0] as BaseRenderTextureInput;

            if (input == null)
            {
                Debug.LogError("MediaRecorder could not find input.");
                return(false);
            }
            int width  = input.OutputWidth;
            int height = input.OutputHeight;

            if (width <= 0 || height <= 0)
            {
                Debug.LogError(string.Format("MovieRecorder got invalid input resolution {0} x {1}.", width, height));
                return(false);
            }

            var    currentEncoderReg = Settings.GetCurrentEncoder();
            string erroMessage;

            if (!currentEncoderReg.SupportsResolution(Settings, width, height, out erroMessage))
            {
                Debug.LogError(erroMessage);
                return(false);
            }

            var imageInputSettings = m_Inputs[0].settings as ImageInputSettings;

            var alphaWillBeInImage = imageInputSettings != null && imageInputSettings.SupportsTransparent && imageInputSettings.RecordTransparency;

            if (alphaWillBeInImage && !currentEncoderReg.SupportsTransparency(Settings, out erroMessage))
            {
                Debug.LogError(erroMessage);
                return(false);
            }

            // In variable frame rate mode, we set the encoder to the frame rate of the current display.
            m_FrameRate = RationalFromDouble(
                session.settings.FrameRatePlayback == FrameRatePlayback.Variable
                    ? GameHarness.DisplayFPSTarget
                    : session.settings.FrameRate);

            var videoAttrs = new VideoTrackAttributes
            {
                width        = (uint)width,
                height       = (uint)height,
                frameRate    = m_FrameRate,
                includeAlpha = alphaWillBeInImage,
                bitRateMode  = Settings.VideoBitRateMode
            };

            Debug.Log($"(UnityRecorder/MovieRecorder) Encoding video " +
                      $"{width}x{height}@[{videoAttrs.frameRate.numerator}/{videoAttrs.frameRate.denominator}] fps into " +
                      $"{Settings.fileNameGenerator.BuildAbsolutePath(session)}");

            var audioInput     = (AudioInputBase)m_Inputs[1];
            var audioAttrsList = new List <AudioTrackAttributes>();

            if (audioInput.audioSettings.PreserveAudio)
            {
#if UNITY_EDITOR_OSX
                // Special case with WebM and audio on older Apple computers: deactivate async GPU readback because there
                // is a risk of not respecting the WebM standard and receiving audio frames out of sync (see "monotonically
                // increasing timestamps"). This happens only with Target Cameras.
                if (m_Inputs[0].settings is CameraInputSettings && Settings.OutputFormat == VideoRecorderOutputFormat.WebM)
                {
                    UseAsyncGPUReadback = false;
                }
#endif
                var audioAttrs = new AudioTrackAttributes
                {
                    sampleRate = new MediaRational
                    {
                        numerator   = audioInput.sampleRate,
                        denominator = 1
                    },
                    channelCount = audioInput.channelCount,
                    language     = ""
                };

                audioAttrsList.Add(audioAttrs);

                if (RecorderOptions.VerboseMode)
                {
                    Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator));
                }
            }
            else
            {
                if (RecorderOptions.VerboseMode)
                {
                    Debug.Log("MovieRecorder starting with no audio.");
                }
            }

            try
            {
                var path = Settings.fileNameGenerator.BuildAbsolutePath(session);

                // If an encoder already exist destroy it
                Settings.DestroyIfExists(m_EncoderHandle);

                // Get the currently selected encoder register and create an encoder
                m_EncoderHandle = currentEncoderReg.Register(Settings.m_EncoderManager);

                // Create the list of attributes for the encoder, Video, Audio and preset
                // TODO: Query the list of attributes from the encoder attributes
                var attr = new List <IMediaEncoderAttribute>();
                attr.Add(new VideoTrackMediaEncoderAttribute("VideoAttributes", videoAttrs));

                if (audioInput.audioSettings.PreserveAudio)
                {
                    if (audioAttrsList.Count > 0)
                    {
                        attr.Add(new AudioTrackMediaEncoderAttribute("AudioAttributes", audioAttrsList.ToArray()[0]));
                    }
                }

                attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CodecFormat], Settings.encoderPresetSelected));
                attr.Add(new IntAttribute(AttributeLabels[MovieRecorderSettingsAttributes.ColorDefinition], Settings.encoderColorDefinitionSelected));

                if (Settings.encoderPresetSelectedName == "Custom")
                {
                    // For custom
                    attr.Add(new StringAttribute(AttributeLabels[MovieRecorderSettingsAttributes.CustomOptions], Settings.encoderCustomOptions));
                }
                // Construct the encoder given the list of attributes
                Settings.m_EncoderManager.Construct(m_EncoderHandle, path, attr);

                s_ConcurrentCount++;

                m_RecordingStartedProperly = true;
                return(true);
            }
            catch (Exception ex)
            {
                Debug.LogError("MovieRecorder unable to create MovieEncoder. " + ex.Message);
                return(false);
            }
        }
Пример #15
0
        protected internal override bool BeginRecording(RecordingSession session)
        {
            if (!base.BeginRecording(session))
            {
                return(false);
            }

            try
            {
                Settings.fileNameGenerator.CreateDirectory(session);
            }
            catch (Exception)
            {
                Debug.LogError(string.Format("Movie recorder output directory \"{0}\" could not be created.", Settings.fileNameGenerator.BuildAbsolutePath(session)));
                return(false);
            }

            var input = m_Inputs[0] as BaseRenderTextureInput;

            if (input == null)
            {
                Debug.LogError("MediaRecorder could not find input.");
                return(false);
            }
            int width  = input.OutputWidth;
            int height = input.OutputHeight;

            if (width <= 0 || height <= 0)
            {
                Debug.LogError(string.Format("MovieRecorder got invalid input resolution {0} x {1}.", width, height));
                return(false);
            }

            if (Settings.OutputFormat == MovieRecorderSettings.VideoRecorderOutputFormat.MP4)
            {
                if (width > 4096 || height > 4096)
                {
                    Debug.LogWarning(string.Format("Mp4 format might not support resolutions bigger than 4096. Current resolution: {0} x {1}.", width, height));
                }

                if (width % 2 != 0 || height % 2 != 0)
                {
                    Debug.LogError(string.Format("Mp4 format does not support odd values in resolution. Current resolution: {0} x {1}.", width, height));
                    return(false);
                }
            }

            var imageInputSettings = m_Inputs[0].settings as ImageInputSettings;

            var includeAlphaFromTexture = imageInputSettings != null && imageInputSettings.SupportsTransparent && imageInputSettings.AllowTransparency;

            if (includeAlphaFromTexture && Settings.OutputFormat == MovieRecorderSettings.VideoRecorderOutputFormat.MP4)
            {
                Debug.LogWarning("Mp4 format does not support alpha.");
                includeAlphaFromTexture = false;
            }

            var videoAttrs = new VideoTrackAttributes
            {
                frameRate    = RationalFromDouble(session.settings.FrameRate),
                width        = (uint)width,
                height       = (uint)height,
                includeAlpha = includeAlphaFromTexture,
                bitRateMode  = Settings.VideoBitRateMode
            };

            if (RecorderOptions.VerboseMode)
            {
                Debug.Log(
                    string.Format(
                        "MovieRecorder starting to write video {0}x{1}@[{2}/{3}] fps into {4}",
                        width, height, videoAttrs.frameRate.numerator,
                        videoAttrs.frameRate.denominator, Settings.fileNameGenerator.BuildAbsolutePath(session)));
            }

            var audioInput     = (AudioInput)m_Inputs[1];
            var audioAttrsList = new List <AudioTrackAttributes>();

            if (audioInput.audioSettings.PreserveAudio)
            {
#if UNITY_EDITOR_OSX
                // Special case with WebM and audio on older Apple computers: deactivate async GPU readback because there
                // is a risk of not respecting the WebM standard and receiving audio frames out of sync (see "monotonically
                // increasing timestamps"). This happens only with Target Cameras.
                if (m_Inputs[0].settings is CameraInputSettings && Settings.OutputFormat == MovieRecorderSettings.VideoRecorderOutputFormat.WebM)
                {
                    UseAsyncGPUReadback = false;
                }
#endif
                var audioAttrs = new AudioTrackAttributes
                {
                    sampleRate = new MediaRational
                    {
                        numerator   = audioInput.sampleRate,
                        denominator = 1
                    },
                    channelCount = audioInput.channelCount,
                    language     = ""
                };

                audioAttrsList.Add(audioAttrs);

                if (RecorderOptions.VerboseMode)
                {
                    Debug.Log(string.Format("MovieRecorder starting to write audio {0}ch @ {1}Hz", audioAttrs.channelCount, audioAttrs.sampleRate.numerator));
                }
            }
            else
            {
                if (RecorderOptions.VerboseMode)
                {
                    Debug.Log("MovieRecorder starting with no audio.");
                }
            }

            try
            {
                var path = Settings.fileNameGenerator.BuildAbsolutePath(session);

                m_Encoder = new MediaEncoder(path, videoAttrs, audioAttrsList.ToArray());
                return(true);
            }
            catch
            {
                if (RecorderOptions.VerboseMode)
                {
                    Debug.LogError("MovieRecorder unable to create MovieEncoder.");
                }
            }

            return(false);
        }
Пример #16
0
    IEnumerator playVideo()
    {
        Debug.Log(Application.dataPath);
        rend = GetComponent <Renderer>();

        videoPlayer = gameObject.AddComponent <VideoPlayer>();
        audioSource = gameObject.AddComponent <AudioSource>();

        //Disable Play on Awake for both Video and Audio
        videoPlayer.playOnAwake = false;
        audioSource.playOnAwake = false;

        videoPlayer.source          = VideoSource.VideoClip;
        videoPlayer.audioOutputMode = VideoAudioOutputMode.AudioSource;
        videoPlayer.EnableAudioTrack(0, true);
        videoPlayer.SetTargetAudioSource(0, audioSource);

        //Set video To Play then prepare Audio to prevent Buffering
        videoPlayer.clip = videoToPlay;
        videoPlayer.Prepare();

        //Wait until video is prepared
        while (!videoPlayer.isPrepared)
        {
            yield return(null);
        }

        //Assign the Texture from Video to Material texture
        tex = videoPlayer.texture;
        rend.material.mainTexture = tex;

        videoPlayer.sendFrameReadyEvents = true;

        videoPlayer.frameReady += OnNewFrame;

        videoPlayer.Play();

        audioSource.Play();

        Debug.Log("Playing Video");

        while (texList.Count != (int)videoPlayer.frameCount)
        {
            yield return(null);
        }
        Debug.Log("Done Playing Video");

        ///////////////////////////////////////////
        ////
        ///

        // 왜곡이미지 처리///

        ///////////////////////////////////////////

        VideoTrackAttributes videoAttr = new VideoTrackAttributes
        {
            frameRate = new MediaRational((int)videoPlayer.frameRate),
            width     = videoPlayer.width,
            height    = videoPlayer.height,
        };

        AudioTrackAttributes audioAttr = new AudioTrackAttributes
        {
            sampleRate   = new MediaRational(48000),
            channelCount = 2,
            language     = "fr"
        };


        int sampleFramesPerVideoFrame = audioAttr.channelCount *
                                        audioAttr.sampleRate.numerator / videoAttr.frameRate.numerator;

        // 동영상 생성 경로
        string encodedFilePath = Path.Combine(Application.dataPath + "/Resources/ConvertVideo", "my_movie.mp4");

        MediaEncoder encoder = new MediaEncoder(encodedFilePath, videoAttr, audioAttr);

        for (int i = 0; i < texList.Count; ++i)
        {
            Debug.Log("Encoding tex num " + (i + 1) + " / " + texList.Count);
            encoder.AddFrame(texList[i]);
            yield return(null);
        }
        encoder.Dispose();

        Debug.Log("Convert To Video Complete");
    }