void TranscodeVideoTest(string filename) { using (MFSystem.Start()) { var details = VideoAttributes.TestFor(filename); TraceInfo.WriteLine("Frame Rate: {0}, Frame Size: {1}x{2}, Video: {3} @ {4}Mbs, Audio: {5}, {6}Khz @ {7}Kbs, ".F (details.FrameRate, details.FrameSize.Width, details.FrameSize.Height, details.VideoEncoding, details.BitRate == 0 ? "-- " : details.BitRate.ToString(), details.AudioEncoding, details.AudioSamplesPerSecond / 1000, details.AudioAverageBytesPerSecond / 1000)); TraceInfo.WriteLine("Begining video re-encoding."); details.Transcoder.ProcessVideo((readers, saveToSink) => { readers.First().SourceReader.Samples(AVOperations.FadeIn(saveToSink)); }); TraceInfo.WriteLine("Video converted. Review the video file {0} to confirm it looks OK.", details.Transcoder.DestinationFile); TraceInfo.WriteLine("Success!"); } }
void TranscodeVideoTest(string filename) { List <int> supportedAudioBitRates = new List <int>(); using (MFSystem.Start()) { var details = VideoAttributes.TestFor(filename); TraceInfo.WriteLine("Frame Rate: {0}, Frame Size: {1}x{2}, Video: {3} @ {4}Mbs, Audio: {5}, {6}Khz @ {7}Kbs, ".F (details.FrameRate, details.FrameSize.Width, details.FrameSize.Height, details.VideoEncoding, details.BitRate == 0 ? "-- " : details.BitRate.ToString(), details.AudioEncoding, details.AudioSamplesPerSecond / 1000, details.AudioAverageBytesPerSecond / 1000)); TraceInfo.WriteLine("Begining video re-encoding."); details.Transcoder.ProcessVideo((readers, saveToSink) => { int lastSecond = 0; var fn = AVOperations.FadeIn(saveToSink); readers.First().SourceReader.Samples(sample => { if (sample.Stream.CurrentMediaType.IsVideo && sample.Sample != null) { var s = (int)sample.Sample.SampleTime.FromNanoToSeconds(); if (s != lastSecond) { TraceInfo.WriteLine("Converted: {0} seconds", s); } lastSecond = s; if (s > 10) { return(false); } } return(fn(sample)); }); }); TraceInfo.WriteLine("Video converted. Review the video file {0} to confirm it looks OK.", details.Transcoder.DestinationFile); TraceInfo.WriteLine("Success!"); } }
void sourceVideoTextBox_TextChanged(object sender, EventArgs e) { Settings.Default.lastVideoFile = sourceVideoTextBox.Text; Settings.Default.Save(); if (lookForAudioBitRates != null) { lookForAudioBitRates.Stop(); lookForAudioBitRates.Dispose(); lookForAudioBitRates = null; } OnGameDataFileChanged(); SetTanscodeMessage("", "", ""); if (sourceVideoTextBox.Text.Trim() == "") { return; } if (!File.Exists(sourceVideoTextBox.Text)) { SetTanscodeMessage(sourceVideoFileErrorMessage: "*File does not exist"); return; } try { var data = OverlayData.FromFile(sourceVideoTextBox.Text); var fileName = data.VideoFiles.Last().FileName; if (!File.Exists(fileName)) { SetTanscodeMessage(sourceVideoFileErrorMessage: "*Captured video does not exist: " + fileName); return; } var currentVersion = Assembly.GetExecutingAssembly().GetName().Version.ToString(); if (data.CapturedVersion != null && data.CapturedVersion != currentVersion) { SetTanscodeMessage(warningDetails: "*Video was captured with version {0}.\nIt is recommended to transcode and capture using the same version.\nTranscoding may not work.".F(data.CapturedVersion)); } var details = VideoAttributes.TestFor(data); SetTanscodeMessage(formatDetails: "Frame Rate: {0}, Frame Size: {1}x{2}, Video: {3} @ {4}Mbs, Audio: {5}, {6}Khz @ {7}Kbs, ".F (details.FrameRate, details.FrameSize.Width, details.FrameSize.Height, details.VideoEncoding, details.BitRate == 0 ? "-- " : details.BitRate.ToString(), details.AudioEncoding, details.AudioSamplesPerSecond / 1000, details.AudioAverageBytesPerSecond / 1000), sourceVideoFileErrorMessage: details.ErrorMessage); } catch (Exception ex) { SetTanscodeMessage(sourceVideoFileErrorMessage: "*Error reading the video file. {0}".F(ex.Message)); lookForAudioBitRates = new System.Windows.Forms.Timer(); lookForAudioBitRates.Tick += sourceVideoTextBox_TextChanged; lookForAudioBitRates.Interval = 5000; lookForAudioBitRates.Start(); } }
public override void UpdateVideoRegionAttribute(DeviceUnits units, VideoAttributeCommand attributeFunction, int row, int column, int height, int width, VideoAttributes attribute) { VerifyResult(_cco.UpdateVideoRegionAttribute((int)units, (int)attributeFunction, row, column, height, width, (int)attribute)); }
public override void DrawBox(DeviceUnits units, int row, int column, int height, int width, VideoAttributes attribute, BorderType borderType) { VerifyResult(_cco.DrawBox((int)units, row, column, height, width, (int)attribute, (int)borderType)); }
public override void DisplayData(DeviceUnits units, int row, int column, VideoAttributes attribute, string data) { VerifyResult(_cco.DisplayData((int)units, row, column, (int)attribute, data)); }
public override void ControlClock(DeviceUnits units, ClockFunction clockFunction, int clockId, int hours, int minutes, int seconds, int row, int column, VideoAttributes attribute, ClockMode mode) { VerifyResult(_cco.ControlClock((int)units, (int)clockFunction, clockId, hours, minutes, seconds, row, column, (int)attribute, (int)mode)); }
public override void ClearVideoRegion(DeviceUnits units, int row, int column, int height, int width, VideoAttributes attribute) { VerifyResult(_cco.ClearVideoRegion((int)units, row, column, height, width, (int)attribute)); }
public override void ClearVideo(DeviceUnits units, VideoAttributes attribute) { VerifyResult(_cco.ClearVideo((int)units, (int)attribute)); }
IEnumerator LoadAndPlayVideo(VideoAttributes videoAttributes, GameObject videoContainer) { var rawImage = videoContainer.AddComponent <RawImage>(); var rectTransform = videoContainer.GetComponent <RectTransform>(); rectTransform.localScale = videoAttributes.scale; rectTransform.rotation = videoAttributes.rotation; rectTransform.position = videoAttributes.position; rectTransform.sizeDelta = videoAttributes.sizeDelta; rawImage.color = videoAttributes.color; var videoPlayer = videoContainer.AddComponent <UnityEngine.Video.VideoPlayer>(); var audioSource = videoContainer.AddComponent <AudioSource>(); // dont use Path.Combine it loves to bug out videoAttributes.path = Application.streamingAssetsPath + videoAttributes.path; videoPlayer.url = @videoAttributes.path; var renderTexture = new RenderTexture( (int)videoAttributes.sizeDelta.x, (int)videoAttributes.sizeDelta.y, 24); videoPlayer.targetTexture = renderTexture; rawImage.texture = renderTexture; videoPlayer.playOnAwake = true; videoPlayer.isLooping = videoAttributes.loop; videoPlayer.waitForFirstFrame = true; switch (videoAttributes.fit) { case 0: videoPlayer.aspectRatio = VideoAspectRatio.NoScaling; break; case 1: videoPlayer.aspectRatio = VideoAspectRatio.Stretch; break; case 2: videoPlayer.aspectRatio = VideoAspectRatio.FitHorizontally; break; case 3: videoPlayer.aspectRatio = VideoAspectRatio.FitVertically; break; case 4: videoPlayer.aspectRatio = VideoAspectRatio.FitInside; break; case 5: videoPlayer.aspectRatio = VideoAspectRatio.FitOutside; break; default: videoPlayer.aspectRatio = VideoAspectRatio.NoScaling; break; } videoPlayer.renderMode = UnityEngine.Video.VideoRenderMode.RenderTexture; videoPlayer.audioOutputMode = UnityEngine.Video.VideoAudioOutputMode.AudioSource; videoPlayer.SetTargetAudioSource(0, audioSource); var videoWindow = new SM4VideoWindow(); videoWindow.window = videoContainer; videoWindow.rawImage = rawImage; videoWindow.videoPlayer = videoPlayer; videoWindow.audioSource = audioSource; videoWindow.renderTexture = renderTexture; while (videoPlayer.isPlaying) { yield return(null); } //Destroy(renderTexture); //Destroy(rawImage.texture); //Destroy(videoPlayer.targetTexture); //Destroy(gameObject); }
public void InstantiateVideo(VideoAttributes videoAttributes) { var videoContainer = Instantiate(new GameObject(), transform); StartCoroutine(LoadAndPlayVideo(videoAttributes, videoContainer)); }