/// <summary>
        /// Prepare a new output, with the given configuration.
        /// </summary>
        public OutputRouter(EncoderConfiguration.Configuration Configuration)
        {
            SyncLock = new object();
            Config   = Configuration;
            string handler = Configuration.Upload.UploadHandlerName ?? "empty";

            ChunksCompleted = new Queue <ChunkDetail>();
            FilesCompleted  = new Queue <FileInfo>();
            ShouldCleanup   = true;
            EnableOutput    = true;

            switch (handler.ToLower())
            {
            case "iis smooth":
                Prepare_IIS();
                break;

            case "http live":
                Prepare_HCS();
                break;

            case "test":
                Prepare_Test();
                break;

            default:
                throw new ArgumentException("Upload handler name not recognised", "Configuration");
            }
            if (SelectedHandler == null)
            {
                throw new Exception("Failed to start output handler");
            }

            SelectedHandler.FileConsumed += new EventHandler <FileEventArgs>(SelectedHandler_FileConsumed);
        }
        /// <summary>
        /// Create a new upload manager to transport HCS fragments and write a playlist in the configured location.
        /// The playlist will be re-written every time a fragment is successfully uploaded.
        /// </summary>
        /// <param name="Config">Job configuration.</param>
        public UploadManager(EncoderConfiguration.Configuration Config)
        {
            // Setup some some common strings and the upload queue
            pre  = Config.EncoderSettings.LocalSystemFilePrefix;
            src  = "file://" + Path.Combine(Config.EncoderSettings.LocalSystemOutputFolder, Config.EncoderSettings.LocalSystemFilePrefix);
            dest = Config.Upload.VideoDestinationRoot;

            if (String.IsNullOrEmpty(pre))
            {
                throw new Exception("Configuration is invalid. Check 'LocalSystemPrefix'");
            }
            if (String.IsNullOrEmpty(src))
            {
                throw new Exception("Configuration is invalid. Check output paths");
            }
            if (String.IsNullOrEmpty(dest))
            {
                throw new Exception("Configuration is invalid. Check 'VideoDestinationRoot'");
            }

            waitingChunks = new Queue <ChunkDetail>();

            // Setup and connect the playlist writer
            plw                     = new PlaylistWriter();
            plw.ServerRoot          = Config.Upload.ServerLookupRoot;
            plw.PlaylistDestination = Config.Upload.IndexFtpRoot + Config.Upload.IndexName;
            plw.PlaylistFilename    = Config.Upload.IndexName;
            plw.BaseDirectory       = Config.EncoderSettings.LocalSystemOutputFolder;
            plw.IsClosed            = false;


            SyncRoot = new object();
        }
예제 #3
0
 public ImageBufferList(EncoderConfiguration.Configuration config)
     : base()
 {
     SourceWidth     = config.Video.InputWidth;
     SourceHeight    = config.Video.InputHeight;
     WaitingCaptures = new SortedSubscriberQueue <TimedImage>();
 }
 /// <summary>
 /// Create a new chunk transformer.
 /// Will try to create a new IIS Live Smooth Streaming publishing point matching
 /// the configuration settings.
 /// Will try both WebDAV and FTP. If neither are not correctly enabled on the destination
 /// server, the point must already exist.
 /// </summary>
 public ChunkTransformer(EncoderConfiguration.Configuration Configuration, bool pubPointCreationIsOptional)
 {
     SyncRoot = new object();
     WaitingChunks = new Queue<ChunkDetail>();
     Config = Configuration;
     try {
         PreparePublishingPoint(Config);
     } catch {
         if (!pubPointCreationIsOptional) throw;
     }
 }
예제 #5
0
        private void SaveConfigDialog_FileOk(object sender, CancelEventArgs e)
        {
            // save config
            EncoderConfiguration.Configuration config = new EncoderConfiguration.Configuration();

            if (AudioDeviceMenu.SelectedItem.ToString() == "(none)")
            {
                config.Audio.CaptureDeviceNumber = -1;
            }
            else
            {
                config.Audio.CaptureDeviceNumber = Math.Max(0, AudioDeviceMenu.SelectedIndex);
            }
            config.Audio.Channels   = (int)AudioChannelCount.Value;
            config.Audio.SampleRate = (int)AudioCaptureRate.Value;

            config.EncoderSettings.FragmentSeconds         = (int)FragmentSize.Value;
            config.EncoderSettings.LocalSystemFilePrefix   = FilePrefix.Text;
            config.EncoderSettings.LocalSystemOutputFolder = LocalFilesystemOutputFolder.Text;
            config.EncoderSettings.OutputHeight            = (int)OutputHeight.Value;
            config.EncoderSettings.OutputWidth             = (int)OutputWidth.Value;
            config.EncoderSettings.VideoBitrate            = (int)VideoBitrate.Value;

            // MBR bits effect this:
            config.EncoderSettings.ReductionFactors = new List <double>();
            config.EncoderSettings.ReductionFactors.Add(1.0);
            foreach (var item in MBRChecklist.CheckedItems)
            {
                double val = double.Parse(item.ToString().Replace("%", "")) / 100.0;
                config.EncoderSettings.ReductionFactors.Add(val);
            }

            config.Upload.IndexFtpRoot         = IndexFTPRoot.Text;
            config.Upload.IndexName            = IndexName.Text;
            config.Upload.ServerLookupRoot     = ServerLookupRoot.Text;
            config.Upload.VideoDestinationRoot = VideoFTPRoot.Text;

            if (VideoDeviceMenu.SelectedItem.ToString() == "(none)")
            {
                config.Video.CaptureDeviceNumber = -1;
            }
            else
            {
                config.Video.CaptureDeviceNumber = Math.Max(0, VideoDeviceMenu.SelectedIndex);
            }
            config.Video.InputFrameRate = (int)FrameRate.Value;
            config.Video.InputHeight    = (int)CaptureHeight.Value;
            config.Video.InputWidth     = (int)CaptureWidth.Value;


            config.Upload.UploadHandlerName = OutputHandlerMenu.Text;

            config.SaveToFile(SaveConfigDialog.FileName);
        }
예제 #6
0
        /// <summary>
        /// Read the supplied configuration and prepare the transformer for work.
        /// </summary>
        private void PrepareTransformer(EncoderConfiguration.Configuration Configuration, List <EncoderPackage> Packages)
        {
            Config = Configuration;
            TimeSpan TargetDuration = TimeSpan.FromSeconds(Config.EncoderSettings.FragmentSeconds);

            PublishPoint = Config.Upload.VideoDestinationRoot;
            if (String.IsNullOrEmpty(PublishPoint))
            {
                throw new ArgumentException("Publishing point must not be empty", "PublishUrl");
            }

            PushServer     = new IisSmoothPush(new Uri(PublishPoint));
            TrackDurations = new Dictionary <int, long>();
            TrackOffsets   = new Dictionary <int, long>();

            targetDuration = (ulong)TargetDuration.Ticks;
            Streams        = new MediaStream[Packages.Count];

            foreach (var pkg in Packages)
            {
                if (pkg.Specification.HasVideo && pkg.Specification.HasAudio)
                {
                    throw new NotSupportedException("IIS Smooth output doesn't support pre-muxed streams");
                }

                if (pkg.Specification.HasAudio)
                {
                    Streams[pkg.JobIndex]         = new MediaStream();         // for now, stream 0 is audio, and all others are video.
                    Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1;
                    Streams[pkg.JobIndex].FourCC  = "mp3a";                    // MP3
                    //Streams[pkg.JobIndex].FourCC = "mp4a"; // AAC
                    Streams[pkg.JobIndex].Height  = 0;
                    Streams[pkg.JobIndex].Width   = 0;
                    Streams[pkg.JobIndex].Bitrate = 96000;                     //pkg.Job.Bitrate; // later!
                }
                else if (pkg.Specification.HasVideo)
                {
                    Streams[pkg.JobIndex]         = new MediaStream();                   // for now, stream 0 is audio, and all others are video.
                    Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1;
                    Streams[pkg.JobIndex].FourCC  = "H264";                              // this is the M$ format, not iso (which is 'avc1')
                    Streams[pkg.JobIndex].Height  = Config.EncoderSettings.OutputHeight; // the actual size may be different due to scaling factor.
                    Streams[pkg.JobIndex].Width   = Config.EncoderSettings.OutputWidth;
                    Streams[pkg.JobIndex].Bitrate = pkg.Job.Bitrate;
                }
            }

            Mp4fFile = new FileRoot(Streams);
            Demuxer  = new MpegTS_Demux[Packages.Count];
            for (int di = 0; di < Demuxer.Length; di++)
            {
                Demuxer[di] = new MpegTS_Demux();
            }
        }
예제 #7
0
 /// <summary>
 /// Create a new chunk transformer.
 /// Will try to create a new IIS Live Smooth Streaming publishing point matching
 /// the configuration settings.
 /// Will try both WebDAV and FTP. If neither are not correctly enabled on the destination
 /// server, the point must already exist.
 /// </summary>
 public ChunkTransformer(EncoderConfiguration.Configuration Configuration, bool pubPointCreationIsOptional)
 {
     SyncRoot      = new object();
     WaitingChunks = new Queue <ChunkDetail>();
     Config        = Configuration;
     try {
         PreparePublishingPoint(Config);
     } catch {
         if (!pubPointCreationIsOptional)
         {
             throw;
         }
     }
 }
예제 #8
0
        private static void PreparePublishingPoint(EncoderConfiguration.Configuration Configuration)
        {
            string          dest_root = null;
            IisPointCreator pc;

            try {
                dest_root = Configuration.Upload.IndexFtpRoot;
                if (!String.IsNullOrEmpty(dest_root))
                {
                    pc = PreparePointCreator(dest_root);
                    pc.CreatePoint();
                }
                else
                {
                    dest_root = Configuration.Upload.VideoDestinationRoot;
                    pc        = PreparePointCreator(dest_root);
                    pc.CreatePoint();
                }
            } catch (Exception ex) {
                throw new Exception("Could not establish a publishing point at " + dest_root, ex);
            }
        }
        /// <summary>
        /// Read the supplied configuration and prepare the transformer for work.
        /// </summary>
        private void PrepareTransformer(EncoderConfiguration.Configuration Configuration, List<EncoderPackage> Packages)
        {
            Config = Configuration;
            TimeSpan TargetDuration = TimeSpan.FromSeconds(Config.EncoderSettings.FragmentSeconds);

            PublishPoint = Config.Upload.VideoDestinationRoot;
            if (String.IsNullOrEmpty(PublishPoint)) throw new ArgumentException("Publishing point must not be empty", "PublishUrl");

            PushServer = new IisSmoothPush(new Uri(PublishPoint));
            TrackDurations = new Dictionary<int, long>();
            TrackOffsets = new Dictionary<int, long>();

            targetDuration = (ulong)TargetDuration.Ticks;
            Streams = new MediaStream[Packages.Count];

            foreach (var pkg in Packages) {
                if (pkg.Specification.HasVideo && pkg.Specification.HasAudio) {
                    throw new NotSupportedException("IIS Smooth output doesn't support pre-muxed streams");
                }

                if (pkg.Specification.HasAudio) {
                    Streams[pkg.JobIndex] = new MediaStream(); // for now, stream 0 is audio, and all others are video.
                    Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1;
                    Streams[pkg.JobIndex].FourCC = "mp3a"; // MP3
                    //Streams[pkg.JobIndex].FourCC = "mp4a"; // AAC
                    Streams[pkg.JobIndex].Height = 0;
                    Streams[pkg.JobIndex].Width = 0;
                    Streams[pkg.JobIndex].Bitrate = 96000; //pkg.Job.Bitrate; // later!
                } else if (pkg.Specification.HasVideo) {
                    Streams[pkg.JobIndex] = new MediaStream(); // for now, stream 0 is audio, and all others are video.
                    Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1;
                    Streams[pkg.JobIndex].FourCC = "H264"; // this is the M$ format, not iso (which is 'avc1')
                    Streams[pkg.JobIndex].Height = Config.EncoderSettings.OutputHeight; // the actual size may be different due to scaling factor.
                    Streams[pkg.JobIndex].Width = Config.EncoderSettings.OutputWidth;
                    Streams[pkg.JobIndex].Bitrate = pkg.Job.Bitrate;
                }
            }

            Mp4fFile = new FileRoot(Streams);
            Demuxer = new MpegTS_Demux[Packages.Count];
            for (int di = 0; di < Demuxer.Length; di++) {
                Demuxer[di] = new MpegTS_Demux();
            }
        }
		public AudioBufferList (EncoderConfiguration.Configuration config) {
		}
예제 #11
0
        private void LoadConfigDialog_FileOk(object sender, CancelEventArgs e)
        {
            try {
                // load a config file
                EncoderConfiguration.Configuration config = null;
                string msg = "";
                try {
                    config = EncoderConfiguration.Configuration.LoadFromFile(LoadConfigDialog.FileName);
                } catch (Exception ex) {
                    msg = ex.Message;
                }
                if (config == null)
                {
                    MessageBox.Show("Not a valid configuration file\r\n" + msg, "HCS Config");
                    return;
                }

                LoadedFileLabel.Text = (LoadConfigDialog.SafeFileName);

                if (config.Audio.CaptureDeviceNumber < 0)
                {
                    AudioDeviceMenu.SelectedIndex = AudioDeviceMenu.Items.Count - 1;                     // "(none)" at end of list
                }
                else
                {
                    AudioDeviceMenu.SelectedIndex = Math.Min(config.Audio.CaptureDeviceNumber, AudioDeviceMenu.Items.Count - 1);
                }
                AudioChannelCount.Value = config.Audio.Channels;
                AudioCaptureRate.Value  = config.Audio.SampleRate;

                FragmentSize.Value = config.EncoderSettings.FragmentSeconds;
                FilePrefix.Text    = config.EncoderSettings.LocalSystemFilePrefix;
                LocalFilesystemOutputFolder.Text = config.EncoderSettings.LocalSystemOutputFolder;
                OutputHeight.Value = config.EncoderSettings.OutputHeight;
                OutputWidth.Value  = config.EncoderSettings.OutputWidth;
                VideoBitrate.Value = config.EncoderSettings.VideoBitrate;

                // MBR bits effect this:
                var factors = config.EncoderSettings.ReductionFactors;
                if (factors == null)
                {
                    factors = new List <double>();
                }
                factors.Remove(1.0);
                for (int i = 0; i < MBRChecklist.Items.Count; i++)
                {
                    double val = double.Parse(MBRChecklist.Items[i].ToString().Replace("%", "")) / 100.0;
                    if (factors.Contains(val))
                    {
                        MBRChecklist.SetItemChecked(i, true);
                        factors.Remove(val);
                    }
                    else
                    {
                        MBRChecklist.SetItemChecked(i, false);
                    }
                }
                foreach (var factor in factors)                   // any extra non-standard factors:
                {
                    string wrd = (factor * 100.0).ToString("0") + "%";
                    MBRChecklist.Items.Add(wrd, true);
                }

                IndexFTPRoot.Text     = config.Upload.IndexFtpRoot;
                IndexName.Text        = config.Upload.IndexName;
                ServerLookupRoot.Text = config.Upload.ServerLookupRoot;
                VideoFTPRoot.Text     = config.Upload.VideoDestinationRoot;

                if (config.Video.CaptureDeviceNumber < 0)
                {
                    VideoDeviceMenu.SelectedIndex = VideoDeviceMenu.Items.Count - 1;                     // "(none)" at end of list
                }
                else
                {
                    VideoDeviceMenu.SelectedIndex = Math.Min(config.Video.CaptureDeviceNumber, VideoDeviceMenu.Items.Count - 1);
                }
                FrameRate.Value     = config.Video.InputFrameRate;
                CaptureHeight.Value = config.Video.InputHeight;
                CaptureWidth.Value  = config.Video.InputWidth;

                OutputHandlerMenu.Text = config.Upload.UploadHandlerName;
            } catch (Exception ex) {
                MessageBox.Show("Couldn't open: " + ex.Message);
            }
            SaveConfigDialog.FileName = LoadConfigDialog.SafeFileName;             // next save is assumed to overwrite.
        }
예제 #12
0
        private void SaveConfigDialog_FileOk(object sender, CancelEventArgs e)
        {
            // save config
            EncoderConfiguration.Configuration config = new EncoderConfiguration.Configuration();

            if (AudioDeviceMenu.SelectedItem.ToString() == "(none)") {
                config.Audio.CaptureDeviceNumber = -1;
            } else {
                config.Audio.CaptureDeviceNumber = Math.Max(0, AudioDeviceMenu.SelectedIndex);
            }
            config.Audio.Channels = (int)AudioChannelCount.Value;
            config.Audio.SampleRate = (int)AudioCaptureRate.Value;

            config.EncoderSettings.FragmentSeconds = (int)FragmentSize.Value;
            config.EncoderSettings.LocalSystemFilePrefix = FilePrefix.Text;
            config.EncoderSettings.LocalSystemOutputFolder = LocalFilesystemOutputFolder.Text;
            config.EncoderSettings.OutputHeight = (int)OutputHeight.Value;
            config.EncoderSettings.OutputWidth = (int)OutputWidth.Value;
            config.EncoderSettings.VideoBitrate = (int)VideoBitrate.Value;

            // MBR bits effect this:
            config.EncoderSettings.ReductionFactors = new List<double>();
            config.EncoderSettings.ReductionFactors.Add(1.0);
            foreach (var item in MBRChecklist.CheckedItems) {
                double val = double.Parse(item.ToString().Replace("%", "")) / 100.0;
                config.EncoderSettings.ReductionFactors.Add(val);
            }

            config.Upload.IndexFtpRoot = IndexFTPRoot.Text;
            config.Upload.IndexName = IndexName.Text;
            config.Upload.ServerLookupRoot = ServerLookupRoot.Text;
            config.Upload.VideoDestinationRoot = VideoFTPRoot.Text;

            if (VideoDeviceMenu.SelectedItem.ToString() == "(none)") {
                config.Video.CaptureDeviceNumber = -1;
            } else {
                config.Video.CaptureDeviceNumber = Math.Max(0, VideoDeviceMenu.SelectedIndex);
            }
            config.Video.InputFrameRate = (int)FrameRate.Value;
            config.Video.InputHeight = (int)CaptureHeight.Value;
            config.Video.InputWidth = (int)CaptureWidth.Value;

            config.Upload.UploadHandlerName = OutputHandlerMenu.Text;

            config.SaveToFile(SaveConfigDialog.FileName);
        }
예제 #13
0
        private void DoTranscode()
        {
            // Start decode job (gets some basic information)
            DecoderJob decode = new DecoderJob();

            EncoderBridge.InitialiseDecoderJob(ref decode, SourceFile);

            if (decode.IsValid == 0)
            {
                MessageBox.Show("Sorry, the source file doesn't appear to be valid");
                return;
            }

            // Load config, then tweak to match input
            EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(DestFile);
            config.Audio.Channels = decode.AudioChannels;
            if (config.Audio.Channels > 0)
            {
                config.Audio.CaptureDeviceNumber = -2;                 // dummy
            }
            else
            {
                config.Audio.CaptureDeviceNumber = -1;                 // no audio
            }

            if (decode.videoWidth * decode.videoHeight > 0)
            {
                config.Video.CaptureDeviceNumber = -2;                 // dummy device
                config.Video.InputFrameRate      = (int)decode.Framerate;
                if (config.Video.InputFrameRate < 1)
                {
                    config.Video.InputFrameRate = 25;                     // don't know frame rate, so adapt.
                }

                config.EncoderSettings.OutputHeight = decode.videoHeight;
                config.EncoderSettings.OutputWidth  = decode.videoWidth;
            }
            else
            {
                config.Video.CaptureDeviceNumber = -1;                 // no video
            }

            #region Start up encoder in a trick mode
            EncoderController encoder = new EncoderController(config);
            encoder.DryRun = true;
            encoder.Start();
            encoder.PauseCapture();
            encoder.ClearBuffers();
            encoder.DryRun = false;
            encoder.MinimumBufferPopulation = 5;             // to allow re-ordering of weird frame timings
            #endregion

            Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight);
            double     a_time = -1, v_time = -1;
            MediaFrame mf = new MediaFrame();

            byte[]  IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16];
            short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2];

            GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned);
            mf.Yplane = pinX.AddrOfPinnedObject();

            GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned);
            mf.AudioBuffer = pinY.AddrOfPinnedObject();

            int i = 0, j = 0;
            while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0)
            {
                if (mf.VideoSize > 0)
                {
                    Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane);
                    img.RotateFlip(RotateFlipType.RotateNoneFlipY);                     // because decode put things the TL->BR, where video capture is BL->TR.

                    v_time = mf.VideoSampleTime;
                    //v_time += 1.0 / config.Video.InputFrameRate;
                    try { encoder.ForceInsertFrame(img, v_time); } catch { }
                    Console.Write("v");
                    i++;
                }

                if (mf.AudioSize > 0)
                {
                    if (mf.AudioSize > 441000)
                    {
                        Console.Write("@");                         // protect ourselves from over-size packets!
                    }
                    else
                    {
                        short[] samples = new short[mf.AudioSize];
                        Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length);

                        a_time = mf.AudioSampleTime;

                        encoder.ForceInsertFrame(new TimedSample(samples, a_time));
                        Console.Write("a");
                    }
                    j++;
                }

                //while (encoder.AudioQueueLength > 50 || encoder.VideoQueueLength > 50) {
                if (!encoder.EncoderRunning)
                {
                    throw new Exception("Encoder broken!");
                }
                Thread.Sleep((int)(250 / config.Video.InputFrameRate));
                //}

                this.Text = "V (" + i + "/" + v_time + ") | A (" + j + "/" + a_time + ")";

                Application.DoEvents();

                if (!running)
                {
                    break;
                }

                mf.VideoSize = 0;
                mf.AudioSize = 0;
            }

            pinX.Free();
            pinY.Free();

            encoder.MinimumBufferPopulation = 1;             // let the buffers empty out

            Console.WriteLine("\r\nEND\r\n");

            Thread.Sleep(5000);
            encoder.Stop();
            EncoderBridge.CloseDecoderJob(ref decode);
        }
예제 #14
0
        static void Main()
        {
            /*Application.EnableVisualStyles();
             * Application.SetCompatibleTextRenderingDefault(false);
             * Application.Run(new Form1());*/


            byte[]       data = File.ReadAllBytes(@"C:\temp\sample.ts");
            MemoryStream ms   = new MemoryStream(data);


            EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(@"C:\temp\dummy_only.xml");
            EncoderController encoder = new EncoderController(config);

            #region Trick mode: encoder with no capture devices (so we can spoon-feed it content)
            encoder.DryRun = true;
            encoder.Start();
            encoder.PauseCapture();
            encoder.ClearBuffers();
            encoder.DryRun = false;
            encoder.MinimumBufferPopulation = 15;             // to allow re-ordering of B-frames
            #endregion

            plug_in = new TranscodeTimeOverlay();
            encoder.RegisterPlugin(plug_in);             // show captured time over recorded time.

            MpegTS_Demux demux = new MpegTS_Demux();
            demux.FeedTransportStream(ms, 0L);

            DecoderJob decode = new DecoderJob();
            EncoderBridge.InitialiseDecoderJob(ref decode, @"C:\temp\sample.ts");

            Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight);
            double     a_time = -1, v_time = -1;
            MediaFrame mf = new MediaFrame();

            byte[]  IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16];
            short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2];

            List <GenericMediaFrame> AudioFrames = demux.GetAvailableAudio();
            List <GenericMediaFrame> VideoFrames = demux.GetAvailableVideo();
            VideoFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));
            AudioFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));

            double dv_time = p2d((long)VideoFrames.Average(a => a.FrameDuration));
            double da_time = p2d((long)AudioFrames.Average(a => a.FrameDuration));

            GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned);
            mf.Yplane = pinX.AddrOfPinnedObject();

            GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned);
            mf.AudioBuffer = pinY.AddrOfPinnedObject();

            int i = 0, j = 0;
            while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0)
            {
                if (mf.VideoSize > 0)
                {
                    Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane);
                    img.RotateFlip(RotateFlipType.RotateNoneFlipY);                     // because decode put things the TL->BR, where video capture is BL->TR.

                    if (v_time < 0)
                    {
                        v_time = p2d(VideoFrames[i].FramePresentationTime);
                    }
                    else
                    {
                        v_time += dv_time;                      // p2d(VideoFrames[i].FrameDuration); // using dv_time smooths things
                    }
                    encoder.ForceInsertFrame(img, v_time);
                    Console.Write("v");
                    i++;
                }

                if (mf.AudioSize > 0)
                {
                    if (mf.AudioSize > 441000)
                    {
                        Console.Write("@");                         // protect ourselves from over-size packets!
                    }
                    else
                    {
                        short[] samples = new short[mf.AudioSize];
                        Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length);

                        if (a_time < 0)
                        {
                            a_time = p2d(AudioFrames[j].FramePresentationTime);
                        }
                        else
                        {
                            a_time += p2d(AudioFrames[j].FrameDuration);
                        }

                        encoder.ForceInsertFrame(new TimedSample(samples, a_time));
                        Console.Write("a");
                    }
                    j++;
                }

                Application.DoEvents();
                mf.VideoSize = 0;
                mf.AudioSize = 0;
            }

            pinX.Free();
            pinY.Free();

            encoder.MinimumBufferPopulation = 1;             // let the buffers empty out

            Console.WriteLine("\r\nEND\r\n");

            Thread.Sleep(2000);
            encoder.Stop();
            EncoderBridge.CloseDecoderJob(ref decode);
        }