void TranscodeVideoTest(string filename) { using (MFSystem.Start()) { var details = VideoAttributes.TestFor(filename); TraceInfo.WriteLine("Frame Rate: {0}, Frame Size: {1}x{2}, Video: {3} @ {4}Mbs, Audio: {5}, {6}Khz @ {7}Kbs, ".F (details.FrameRate, details.FrameSize.Width, details.FrameSize.Height, details.VideoEncoding, details.BitRate == 0 ? "-- " : details.BitRate.ToString(), details.AudioEncoding, details.AudioSamplesPerSecond / 1000, details.AudioAverageBytesPerSecond / 1000)); TraceInfo.WriteLine("Begining video re-encoding."); details.Transcoder.ProcessVideo((readers, saveToSink) => { readers.First().SourceReader.Samples(AVOperations.FadeIn(saveToSink)); }); TraceInfo.WriteLine("Video converted. Review the video file {0} to confirm it looks OK.", details.Transcoder.DestinationFile); TraceInfo.WriteLine("Success!"); } }
void TranscodeVideoTest(string filename) { List <int> supportedAudioBitRates = new List <int>(); using (MFSystem.Start()) { var details = VideoAttributes.TestFor(filename); TraceInfo.WriteLine("Frame Rate: {0}, Frame Size: {1}x{2}, Video: {3} @ {4}Mbs, Audio: {5}, {6}Khz @ {7}Kbs, ".F (details.FrameRate, details.FrameSize.Width, details.FrameSize.Height, details.VideoEncoding, details.BitRate == 0 ? "-- " : details.BitRate.ToString(), details.AudioEncoding, details.AudioSamplesPerSecond / 1000, details.AudioAverageBytesPerSecond / 1000)); TraceInfo.WriteLine("Begining video re-encoding."); details.Transcoder.ProcessVideo((readers, saveToSink) => { int lastSecond = 0; var fn = AVOperations.FadeIn(saveToSink); readers.First().SourceReader.Samples(sample => { if (sample.Stream.CurrentMediaType.IsVideo && sample.Sample != null) { var s = (int)sample.Sample.SampleTime.FromNanoToSeconds(); if (s != lastSecond) { TraceInfo.WriteLine("Converted: {0} seconds", s); } lastSecond = s; if (s > 10) { return(false); } } return(fn(sample)); }); }); TraceInfo.WriteLine("Video converted. Review the video file {0} to confirm it looks OK.", details.Transcoder.DestinationFile); TraceInfo.WriteLine("Success!"); } }
void Process(Transcoder transcoder, bool highlights, Action <long, long> monitorProgress, Func <bool> isAborted) { try { TraceInfo.WriteLineIf(highlights, "Transcoding highlights to {0}", transcoder.DestinationFile); TraceInfo.WriteLineIf(!highlights, "Transcoding full replay to {0}", transcoder.DestinationFile); transcoder.ProcessVideo((readers, saveToSink) => { var writeToSink = monitorProgress == null ? saveToSink : MonitorProgress(saveToSink); var fadeSegments = AVOperations.FadeIn(AVOperations.FadeOut(writeToSink)); var edits = highlights ? ApplyEdits(writeToSink) : writeToSink; var mainBodyOverlays = AVOperations.Overlay(applyRaceDataOverlay, edits); var introOverlay = AVOperations.Overlay(applyIntroOverlay, fadeSegments); var sourceReaderExtra = readers.FirstOrDefault(r => ((CapturedVideoFile)r.State).isIntroVideo); if (sourceReaderExtra != null) { var introSourceReader = sourceReaderExtra.SourceReader; var mainReaders = AVOperations.Combine(readers.Skip(1).Select(r => r.SourceReader).ToArray(), Settings.Default.VideoSplitGap); totalDuration += introSourceReader.Duration + mainReaders.Duration; AVOperations.StartConcat(introSourceReader, introOverlay, AVOperations.Concat(mainReaders, mainBodyOverlays, isAborted), isAborted); } else { var mainReaders = AVOperations.Combine(readers.Select(r => r.SourceReader).ToArray(), Settings.Default.VideoSplitGap); totalDuration += mainReaders.Duration; AVOperations.Concat(mainReaders, mainBodyOverlays, isAborted)(0, 0); } }); TraceInfo.WriteLineIf(highlights, "Done Transcoding highlights to {0}", transcoder.DestinationFile); TraceInfo.WriteLineIf(!highlights, "Done Transcoding full replay to {0}", transcoder.DestinationFile); } catch (Exception e) { TraceError.WriteLine(e.Message); TraceError.WriteLine(e.StackTrace); throw e; } }
private ProcessSample ConnectStreams(IEnumerable <SourceReaderExtra> readers, SinkWriter sinkWriter) { foreach (var r in readers) { SetAudioMediaType(r.SourceReader); SetVideoMediaType(r.SourceReader); } var sourceAudioStream = SetAudioMediaType(readers.First().SourceReader); var sourceVideoStream = SetVideoMediaType(readers.First().SourceReader); var sinkAudioStream = AddStream(sinkWriter, sourceAudioStream.CurrentMediaType, CreateTargetAudioMediaType(sourceAudioStream.NativeMediaType)); var sinkVideoStream = AddStream(sinkWriter, sourceVideoStream.CurrentMediaType, CreateTargetVideoMediaType(sourceVideoStream.NativeMediaType)); var saveAudio = AVOperations.MediaTypeChange(sinkAudioStream, AVOperations.SaveTo(sinkAudioStream)); var saveVideo = AVOperations.MediaTypeChange(sinkVideoStream, AVOperations.SaveTo(sinkVideoStream)); return(AVOperations.SeperateAudioVideo(saveAudio, saveVideo)); }
ProcessSample ApplyEdits(ProcessSample next) { var cut = next; var raceEdits = leaderBoard.OverlayData.RaceEvents.GetRaceEdits(); if (raceEdits.Count() == 0) { throw new Exception("Unable to create highlight - try reducing time for highlight duration"); } var firstEdit = raceEdits.First(); var lastEdit = raceEdits.Last(); foreach (var editCut in raceEdits) { cut = AVOperations.Cut(editCut.StartTime.FromSecondsToNano(), editCut.EndTime.FromSecondsToNano(), AVOperations.FadeInOut(cut)); totalDuration -= editCut.EndTime.FromSecondsToNano() - editCut.StartTime.FromSecondsToNano(); } return(cut); }