/// <summary>
 /// Create a new EncoderPackage
 /// </summary>
 public EncoderPackage(PackageSpec SrcSpec, int Index, EncoderJob Job, MediaFrame Frame)
 {
     Buffers = new List<IEncoderBuffer>();
     this.JobIndex = Index;
     this.Job = Job;
     this.Frame = Frame;
     Specification = SrcSpec;
 }
        /// <summary>
        /// Load the closest matching frame by offset time.
        /// Fills the encoder-ready frame, with given time-code.
        /// WARNING: use this *OR* 'LoadToFrame', but not both!
        /// </summary>
        public void SelectiveLoadFrame(ref MediaFrame Frame, double OffsetSeconds)
        {
            // This is meant to be used for big frame skips on static bars&tones.
            // will need to be called from a clocked reference, and will give frames
            // a time based on that clock.

            // You should call 'SelectiveDequeue' before updating the reference clock
            int idx = FirstFrameMatchingTime(OffsetSeconds);

            if (idx < 0)               // no frame available
            {
                Frame.Yplane    = IntPtr.Zero;
                Frame.Uplane    = IntPtr.Zero;
                Frame.Vplane    = IntPtr.Zero;
                Frame.VideoSize = 0;
                return;
            }

            try {
                TimedImage img = null;
                lock (WaitingFrames) {
                    img = WaitingFrames[idx];
                }
                if (img == null)
                {
                    return;                              // screw-up
                }
                if (img.Luma == null || img.Cr == null || img.Cb == null)
                {
                    return;                                                                       // crap frame
                }
                Y = img.Luma;
                u = img.Cr;
                v = img.Cb;

                Frame.VideoSize       = (ulong)Y.Length;
                Frame.VideoSampleTime = OffsetSeconds;

                pinY         = GCHandle.Alloc(Y, GCHandleType.Pinned);
                Frame.Yplane = pinY.AddrOfPinnedObject();

                pinU         = GCHandle.Alloc(u, GCHandleType.Pinned);
                Frame.Uplane = pinU.AddrOfPinnedObject();

                pinV         = GCHandle.Alloc(v, GCHandleType.Pinned);
                Frame.Vplane = pinV.AddrOfPinnedObject();
            } catch {
                // Drop the bad frame data:
                UnloadFrame(ref Frame);                 // this can still be sent to the encoder, it should just mean a dropped frame
                Console.WriteLine("Lost a frame (no image)");
            }
        }
        /// <summary>
        /// Load the buffer into a MediaFrame for the encoder.
        /// IMPORTANT: You must call UnloadFrame after this method is called.
        /// For efficiency, unload as soon as possible.
        /// </summary>
        public void LoadToFrame(ref MediaFrame Frame)
        {
            try {
                if (WaitingFrames.Count > 0)
                {
                    TimedImage img = null;
                    lock (WaitingFrames) {
                        WaitingFrames.RemoveAll(a => a == null);
                        WaitingFrames.Sort((a, b) => a.Seconds.CompareTo(b.Seconds));

                        img = WaitingFrames[0];
                        WaitingFrames.RemoveAt(0);
                    }

                    if (img.Luma == null || img.Cr == null || img.Cb == null)
                    {
                        return;                                                                           // crap frame
                    }
                    Y = img.Luma;
                    u = img.Cr;
                    v = img.Cb;

                    Frame.VideoSize       = (ulong)Y.Length;
                    Frame.VideoSampleTime = img.Seconds;

                    pinY         = GCHandle.Alloc(Y, GCHandleType.Pinned);
                    Frame.Yplane = pinY.AddrOfPinnedObject();

                    pinU         = GCHandle.Alloc(u, GCHandleType.Pinned);
                    Frame.Uplane = pinU.AddrOfPinnedObject();

                    pinV         = GCHandle.Alloc(v, GCHandleType.Pinned);
                    Frame.Vplane = pinV.AddrOfPinnedObject();
                }
                else
                {
                    Frame.Yplane    = IntPtr.Zero;
                    Frame.Uplane    = IntPtr.Zero;
                    Frame.Vplane    = IntPtr.Zero;
                    Frame.VideoSize = 0;
                    Console.WriteLine("Frame buffer was empty (in ImageToYUV_Buffer.LoadToFrame())");
                }
            } catch {
                // Drop the bad frame data:
                UnloadFrame(ref Frame);                 // this can still be sent to the encoder, it should just mean a dropped frame
                Console.WriteLine("Lost a frame (no image)");
            }
        }
        /// <summary>
        /// Release memory previously locked by LoadToFrame()
        /// </summary>
        public void UnloadFrame(ref MediaFrame Frame)
        {
            if (pinY.IsAllocated)
            {
                pinY.Free();
            }
            if (pinU.IsAllocated)
            {
                pinU.Free();
            }
            if (pinV.IsAllocated)
            {
                pinV.Free();
            }
            Frame.VideoSize = 0;

            Y = null;
            u = null;
            v = null;
        }
Beispiel #5
0
 public static extern int DecodeFrame(ref DecoderJob jobSpec, ref MediaFrame frame);
Beispiel #6
0
 [System.Security.SuppressUnmanagedCodeSecurity]         // this is for performance. Only makes a difference in tight loops.
 public static extern void EncodeFrame(ref EncoderJob JobSpec, ref MediaFrame Frame);
 public static extern void EncodeFrame(ref EncoderJob JobSpec, ref MediaFrame Frame);
 public static extern int DecodeFrame(ref DecoderJob jobSpec, ref MediaFrame frame);
        /// <summary>
        /// Release memory previously locked by LoadToFrame()
        /// </summary>
        public void UnloadFrame(ref MediaFrame Frame)
        {
            if (pinY.IsAllocated) pinY.Free();
            if (pinU.IsAllocated) pinU.Free();
            if (pinV.IsAllocated) pinV.Free();
            Frame.VideoSize = 0;

            Y = null;
            u = null;
            v = null;
        }
        /// <summary>
        /// Load the closest matching frame by offset time.
        /// Fills the encoder-ready frame, with given time-code.
        /// WARNING: use this *OR* 'LoadToFrame', but not both!
        /// </summary>
        public void SelectiveLoadFrame(ref MediaFrame Frame, double OffsetSeconds)
        {
            // This is meant to be used for big frame skips on static bars&tones.
            // will need to be called from a clocked reference, and will give frames
            // a time based on that clock.

            // You should call 'SelectiveDequeue' before updating the reference clock
            int idx = FirstFrameMatchingTime(OffsetSeconds);
            if (idx < 0) { // no frame available
                Frame.Yplane = IntPtr.Zero;
                Frame.Uplane = IntPtr.Zero;
                Frame.Vplane = IntPtr.Zero;
                Frame.VideoSize = 0;
                return;
            }

            try {
                TimedImage img = null;
                lock (WaitingFrames) {
                    img = WaitingFrames[idx];
                }
                if (img == null) return; // screw-up
                if (img.Luma == null || img.Cr == null || img.Cb == null) return; // crap frame

                Y = img.Luma;
                u = img.Cr;
                v = img.Cb;

                Frame.VideoSize = (ulong)Y.Length;
                Frame.VideoSampleTime = OffsetSeconds;

                pinY = GCHandle.Alloc(Y, GCHandleType.Pinned);
                Frame.Yplane = pinY.AddrOfPinnedObject();

                pinU = GCHandle.Alloc(u, GCHandleType.Pinned);
                Frame.Uplane = pinU.AddrOfPinnedObject();

                pinV = GCHandle.Alloc(v, GCHandleType.Pinned);
                Frame.Vplane = pinV.AddrOfPinnedObject();
            } catch {
                // Drop the bad frame data:
                UnloadFrame(ref Frame); // this can still be sent to the encoder, it should just mean a dropped frame
                Console.WriteLine("Lost a frame (no image)");
            }
        }
        /// <summary>
        /// Load the buffer into a MediaFrame for the encoder.
        /// IMPORTANT: You must call UnloadFrame after this method is called.
        /// For efficiency, unload as soon as possible.
        /// </summary>
        public void LoadToFrame(ref MediaFrame Frame)
        {
            try {
                if (WaitingFrames.Count > 0) {
                    TimedImage img = null;
                    lock (WaitingFrames) {
                        WaitingFrames.RemoveAll(a => a == null);
                        WaitingFrames.Sort((a, b) => a.Seconds.CompareTo(b.Seconds));

                        img = WaitingFrames[0];
                        WaitingFrames.RemoveAt(0);
                    }

                    if (img.Luma == null || img.Cr == null || img.Cb == null) return; // crap frame

                    Y = img.Luma;
                    u = img.Cr;
                    v = img.Cb;

                    Frame.VideoSize = (ulong)Y.Length;
                    Frame.VideoSampleTime = img.Seconds;

                    pinY = GCHandle.Alloc(Y, GCHandleType.Pinned);
                    Frame.Yplane = pinY.AddrOfPinnedObject();

                    pinU = GCHandle.Alloc(u, GCHandleType.Pinned);
                    Frame.Uplane = pinU.AddrOfPinnedObject();

                    pinV = GCHandle.Alloc(v, GCHandleType.Pinned);
                    Frame.Vplane = pinV.AddrOfPinnedObject();
                } else {
                    Frame.Yplane = IntPtr.Zero;
                    Frame.Uplane = IntPtr.Zero;
                    Frame.Vplane = IntPtr.Zero;
                    Frame.VideoSize = 0;
                    Console.WriteLine("Frame buffer was empty (in ImageToYUV_Buffer.LoadToFrame())");
                }
            } catch {
                // Drop the bad frame data:
                UnloadFrame(ref Frame); // this can still be sent to the encoder, it should just mean a dropped frame
                Console.WriteLine("Lost a frame (no image)");
            }
        }
Beispiel #12
0
        static void Main()
        {
            /*Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);
            Application.Run(new Form1());*/

            byte[] data = File.ReadAllBytes(@"C:\temp\sample.ts");
            MemoryStream ms = new MemoryStream(data);

            EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(@"C:\temp\dummy_only.xml");
            EncoderController encoder = new EncoderController(config);
            #region Trick mode: encoder with no capture devices (so we can spoon-feed it content)
            encoder.DryRun = true;
            encoder.Start();
            encoder.PauseCapture();
            encoder.ClearBuffers();
            encoder.DryRun = false;
            encoder.MinimumBufferPopulation = 15; // to allow re-ordering of B-frames
            #endregion

            plug_in = new TranscodeTimeOverlay();
            encoder.RegisterPlugin(plug_in); // show captured time over recorded time.

            MpegTS_Demux demux = new MpegTS_Demux();
            demux.FeedTransportStream(ms, 0L);

            DecoderJob decode = new DecoderJob();
            EncoderBridge.InitialiseDecoderJob(ref decode, @"C:\temp\sample.ts");

            Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight);
            double a_time = -1, v_time = -1;
            MediaFrame mf = new MediaFrame();

            byte[] IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16];
            short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2];

            List<GenericMediaFrame> AudioFrames = demux.GetAvailableAudio();
            List<GenericMediaFrame> VideoFrames = demux.GetAvailableVideo();
            VideoFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));
            AudioFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));

            double dv_time = p2d((long)VideoFrames.Average(a => a.FrameDuration));
            double da_time = p2d((long)AudioFrames.Average(a => a.FrameDuration));

            GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned);
            mf.Yplane = pinX.AddrOfPinnedObject();

            GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned);
            mf.AudioBuffer = pinY.AddrOfPinnedObject();

            int i = 0, j=0;
            while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0) {
                if (mf.VideoSize > 0) {
                    Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane);
                    img.RotateFlip(RotateFlipType.RotateNoneFlipY); // because decode put things the TL->BR, where video capture is BL->TR.

                    if (v_time < 0) v_time = p2d(VideoFrames[i].FramePresentationTime);
                    else v_time += dv_time; // p2d(VideoFrames[i].FrameDuration); // using dv_time smooths things

                    encoder.ForceInsertFrame(img, v_time);
                    Console.Write("v");
                    i++;
                }

                if (mf.AudioSize > 0) {
                    if (mf.AudioSize > 441000) {
                        Console.Write("@"); // protect ourselves from over-size packets!
                    } else {
                        short[] samples = new short[mf.AudioSize];
                        Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length);

                        if (a_time < 0) a_time = p2d(AudioFrames[j].FramePresentationTime);
                        else a_time += p2d(AudioFrames[j].FrameDuration);

                        encoder.ForceInsertFrame(new TimedSample(samples, a_time));
                        Console.Write("a");
                    }
                    j++;
                }

                Application.DoEvents();
                mf.VideoSize = 0;
                mf.AudioSize = 0;
            }

            pinX.Free();
            pinY.Free();

            encoder.MinimumBufferPopulation = 1; // let the buffers empty out

            Console.WriteLine("\r\nEND\r\n");

            Thread.Sleep(2000);
            encoder.Stop();
            EncoderBridge.CloseDecoderJob(ref decode);
        }
Beispiel #13
0
        private void DoTranscode()
        {
            // Start decode job (gets some basic information)
            DecoderJob decode = new DecoderJob();
            EncoderBridge.InitialiseDecoderJob(ref decode, SourceFile);

            if (decode.IsValid == 0) {
                MessageBox.Show("Sorry, the source file doesn't appear to be valid");
                return;
            }

            // Load config, then tweak to match input
            EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(DestFile);
            config.Audio.Channels = decode.AudioChannels;
            if (config.Audio.Channels > 0) {
                config.Audio.CaptureDeviceNumber = -2; // dummy
            } else {
                config.Audio.CaptureDeviceNumber = -1; // no audio
            }

            if (decode.videoWidth * decode.videoHeight > 0) {
                config.Video.CaptureDeviceNumber = -2; // dummy device
                config.Video.InputFrameRate = (int)decode.Framerate;
                if (config.Video.InputFrameRate < 1) {
                    config.Video.InputFrameRate = 25; // don't know frame rate, so adapt.
                }

                config.EncoderSettings.OutputHeight = decode.videoHeight;
                config.EncoderSettings.OutputWidth = decode.videoWidth;
            } else {
                config.Video.CaptureDeviceNumber = -1; // no video
            }

            #region Start up encoder in a trick mode
            EncoderController encoder = new EncoderController(config);
            encoder.DryRun = true;
            encoder.Start();
            encoder.PauseCapture();
            encoder.ClearBuffers();
            encoder.DryRun = false;
            encoder.MinimumBufferPopulation = 5; // to allow re-ordering of weird frame timings
            #endregion

            Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight);
            double a_time = -1, v_time = -1;
            MediaFrame mf = new MediaFrame();

            byte[] IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16];
            short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2];

            GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned);
            mf.Yplane = pinX.AddrOfPinnedObject();

            GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned);
            mf.AudioBuffer = pinY.AddrOfPinnedObject();

            int i = 0, j = 0;
            while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0) {
                if (mf.VideoSize > 0) {
                    Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane);
                    img.RotateFlip(RotateFlipType.RotateNoneFlipY); // because decode put things the TL->BR, where video capture is BL->TR.

                    v_time = mf.VideoSampleTime;
                    //v_time += 1.0 / config.Video.InputFrameRate;
                    try { encoder.ForceInsertFrame(img, v_time); } catch { }
                    Console.Write("v");
                    i++;
                }

                if (mf.AudioSize > 0) {
                    if (mf.AudioSize > 441000) {
                        Console.Write("@"); // protect ourselves from over-size packets!
                    } else {
                        short[] samples = new short[mf.AudioSize];
                        Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length);

                        a_time = mf.AudioSampleTime;

                        encoder.ForceInsertFrame(new TimedSample(samples, a_time));
                        Console.Write("a");
                    }
                    j++;
                }

                //while (encoder.AudioQueueLength > 50 || encoder.VideoQueueLength > 50) {
                    if (!encoder.EncoderRunning) throw new Exception("Encoder broken!");
                    Thread.Sleep((int)(250 / config.Video.InputFrameRate));
                //}

                this.Text = "V (" + i + "/" + v_time + ") | A (" + j + "/" + a_time + ")";

                Application.DoEvents();

                if (!running) break;

                mf.VideoSize = 0;
                mf.AudioSize = 0;
            }

            pinX.Free();
            pinY.Free();

            encoder.MinimumBufferPopulation = 1; // let the buffers empty out

            Console.WriteLine("\r\nEND\r\n");

            Thread.Sleep(5000);
            encoder.Stop();
            EncoderBridge.CloseDecoderJob(ref decode);
        }