private EncoderController PrepareEncoder() { var new_encoder = new EncoderController(config); new_encoder.UseAbsoluteTime = true; if (usePlugsCheck.Checked) { // Plug-ins for UK Parliament encoding if (_toneDetector == null) { _toneDetector = new ToneDetector(980); _toneDetector.PowerGate = 15; _toneDetector.TemporalSmoothing = 350; } //new_encoder.RegisterPlugin(_toneDetector); if (_timeOverlay == null) _timeOverlay = new TimecodeOverlay(); new_encoder.RegisterPlugin(_timeOverlay); if (_videoOverlay == null) _videoOverlay = new VideoOverlay(_toneDetector); new_encoder.RegisterPlugin(_videoOverlay); } return new_encoder; }
static void Main() { /*Application.EnableVisualStyles(); Application.SetCompatibleTextRenderingDefault(false); Application.Run(new Form1());*/ byte[] data = File.ReadAllBytes(@"C:\temp\sample.ts"); MemoryStream ms = new MemoryStream(data); EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(@"C:\temp\dummy_only.xml"); EncoderController encoder = new EncoderController(config); #region Trick mode: encoder with no capture devices (so we can spoon-feed it content) encoder.DryRun = true; encoder.Start(); encoder.PauseCapture(); encoder.ClearBuffers(); encoder.DryRun = false; encoder.MinimumBufferPopulation = 15; // to allow re-ordering of B-frames #endregion plug_in = new TranscodeTimeOverlay(); encoder.RegisterPlugin(plug_in); // show captured time over recorded time. MpegTS_Demux demux = new MpegTS_Demux(); demux.FeedTransportStream(ms, 0L); DecoderJob decode = new DecoderJob(); EncoderBridge.InitialiseDecoderJob(ref decode, @"C:\temp\sample.ts"); Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight); double a_time = -1, v_time = -1; MediaFrame mf = new MediaFrame(); byte[] IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16]; short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2]; List<GenericMediaFrame> AudioFrames = demux.GetAvailableAudio(); List<GenericMediaFrame> VideoFrames = demux.GetAvailableVideo(); VideoFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime)); AudioFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime)); double dv_time = p2d((long)VideoFrames.Average(a => a.FrameDuration)); double da_time = p2d((long)AudioFrames.Average(a => a.FrameDuration)); GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned); mf.Yplane = pinX.AddrOfPinnedObject(); GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned); mf.AudioBuffer = pinY.AddrOfPinnedObject(); int i = 0, j=0; while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0) { if (mf.VideoSize > 0) { Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane); img.RotateFlip(RotateFlipType.RotateNoneFlipY); // because decode put things the TL->BR, where video capture is BL->TR. if (v_time < 0) v_time = p2d(VideoFrames[i].FramePresentationTime); else v_time += dv_time; // p2d(VideoFrames[i].FrameDuration); // using dv_time smooths things encoder.ForceInsertFrame(img, v_time); Console.Write("v"); i++; } if (mf.AudioSize > 0) { if (mf.AudioSize > 441000) { Console.Write("@"); // protect ourselves from over-size packets! } else { short[] samples = new short[mf.AudioSize]; Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length); if (a_time < 0) a_time = p2d(AudioFrames[j].FramePresentationTime); else a_time += p2d(AudioFrames[j].FrameDuration); encoder.ForceInsertFrame(new TimedSample(samples, a_time)); Console.Write("a"); } j++; } Application.DoEvents(); mf.VideoSize = 0; mf.AudioSize = 0; } pinX.Free(); pinY.Free(); encoder.MinimumBufferPopulation = 1; // let the buffers empty out Console.WriteLine("\r\nEND\r\n"); Thread.Sleep(2000); encoder.Stop(); EncoderBridge.CloseDecoderJob(ref decode); }
private void ToggleEncoder_Click(object sender, EventArgs e) { if (encoder != null && encoder.EncoderRunning) { encoder.Stop(); ToggleEncoder.Text = "Start Encoding"; usePlugsCheck.Enabled = true; encoder.Dispose(); encoder = null; GC.Collect(); //GC.WaitForFullGCComplete(); } else { if (encoder != null) { encoder.Dispose(); encoder = null; } encoder = PrepareEncoder(); usePlugsCheck.Enabled = false; encoder.Start(); // actual encode will happen on a different thread. ToggleEncoder.Text = "Stop Encoding"; } }
private void DoTranscode() { // Start decode job (gets some basic information) DecoderJob decode = new DecoderJob(); EncoderBridge.InitialiseDecoderJob(ref decode, SourceFile); if (decode.IsValid == 0) { MessageBox.Show("Sorry, the source file doesn't appear to be valid"); return; } // Load config, then tweak to match input EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(DestFile); config.Audio.Channels = decode.AudioChannels; if (config.Audio.Channels > 0) { config.Audio.CaptureDeviceNumber = -2; // dummy } else { config.Audio.CaptureDeviceNumber = -1; // no audio } if (decode.videoWidth * decode.videoHeight > 0) { config.Video.CaptureDeviceNumber = -2; // dummy device config.Video.InputFrameRate = (int)decode.Framerate; if (config.Video.InputFrameRate < 1) { config.Video.InputFrameRate = 25; // don't know frame rate, so adapt. } config.EncoderSettings.OutputHeight = decode.videoHeight; config.EncoderSettings.OutputWidth = decode.videoWidth; } else { config.Video.CaptureDeviceNumber = -1; // no video } #region Start up encoder in a trick mode EncoderController encoder = new EncoderController(config); encoder.DryRun = true; encoder.Start(); encoder.PauseCapture(); encoder.ClearBuffers(); encoder.DryRun = false; encoder.MinimumBufferPopulation = 5; // to allow re-ordering of weird frame timings #endregion Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight); double a_time = -1, v_time = -1; MediaFrame mf = new MediaFrame(); byte[] IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16]; short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2]; GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned); mf.Yplane = pinX.AddrOfPinnedObject(); GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned); mf.AudioBuffer = pinY.AddrOfPinnedObject(); int i = 0, j = 0; while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0) { if (mf.VideoSize > 0) { Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane); img.RotateFlip(RotateFlipType.RotateNoneFlipY); // because decode put things the TL->BR, where video capture is BL->TR. v_time = mf.VideoSampleTime; //v_time += 1.0 / config.Video.InputFrameRate; try { encoder.ForceInsertFrame(img, v_time); } catch { } Console.Write("v"); i++; } if (mf.AudioSize > 0) { if (mf.AudioSize > 441000) { Console.Write("@"); // protect ourselves from over-size packets! } else { short[] samples = new short[mf.AudioSize]; Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length); a_time = mf.AudioSampleTime; encoder.ForceInsertFrame(new TimedSample(samples, a_time)); Console.Write("a"); } j++; } //while (encoder.AudioQueueLength > 50 || encoder.VideoQueueLength > 50) { if (!encoder.EncoderRunning) throw new Exception("Encoder broken!"); Thread.Sleep((int)(250 / config.Video.InputFrameRate)); //} this.Text = "V (" + i + "/" + v_time + ") | A (" + j + "/" + a_time + ")"; Application.DoEvents(); if (!running) break; mf.VideoSize = 0; mf.AudioSize = 0; } pinX.Free(); pinY.Free(); encoder.MinimumBufferPopulation = 1; // let the buffers empty out Console.WriteLine("\r\nEND\r\n"); Thread.Sleep(5000); encoder.Stop(); EncoderBridge.CloseDecoderJob(ref decode); }