/// <summary>
        /// Stop the encoding process
        /// </summary>
        public void Stop()
        {
            if (cam != null)
            {
                cam.Pause();
            }
            if (mic != null)
            {
                mic.Pause();
            }
            EncoderRunning = false;

            if (coreloops != null)
            {
                foreach (var loop in coreloops)
                {
                    loop.Join();                     // wait for the encoder to get the message and actually stop. Easier than screwing it's state data.
                }
            }
            try { cam.Dispose(); } catch { }
            try { mic.Dispose(); } catch { }

            foreach (var pkg in Packages)
            {
                try {
                    EncoderBridge.CloseEncoderJob(ref pkg.Job);
                } catch { }
            }
            if (outputRouter != null)
            {
                outputRouter.Close();
            }
        }
        /// <summary>
        /// Initialise an encoder job based on previously setup capture devices.
        /// Need to have one job per 'ReductionFactor' in the config.
        /// </summary>
        private void EncoderSetup()
        {
            var factors = config.EncoderSettings.ReductionFactors;

            Packages = new List <EncoderPackage>();

            int fps = (cam != null) ? (cam.m_frameRate) : (config.Video.InputFrameRate);

            var needed_packages = ListRequiredPackages();

            int pkg_id = 0;

            foreach (var np in needed_packages)
            {
                EncoderJob job = new EncoderJob();
                job.OldSegmentNumber = 1;
                string joined = Path.Combine(config.EncoderSettings.LocalSystemOutputFolder, config.EncoderSettings.LocalSystemFilePrefix);

                joined += "_" + pkg_id;
                int bitrate = (int)(config.EncoderSettings.VideoBitrate * np.Quality);                 // linear scale

                int error = EncoderBridge.InitialiseEncoderJob(
                    ref job,                                                                            // job to complete
                    np.VideoSize.Width,                                                                 // OUTPUT video width
                    np.VideoSize.Height,                                                                // OUTPUT video height
                    joined,                                                                             // OUTPUT folder + filename prefix
                    fps,                                                                                // INPUT frame rate (output will match)
                    bitrate,                                                                            // OUTPUT video bit rate
                    config.EncoderSettings.FragmentSeconds);                                            // Fragment length (seconds)

                if (error != 0)
                {
                    throw new Exception("Encoder setup error #" + error);
                }
                if (!job.IsValid)
                {
                    throw new Exception("Job rejected");
                }

                var mf = new MediaFrame();
                mf.ForceAudioConsumption = (np.HasVideo) ? ((byte)0) : ((byte)1);                 // don't sync if no video.

                var pkg = new EncoderPackage(np, pkg_id, job, mf);

                ConnectPackageToBuffers(pkg, np);

                Packages.Add(pkg); pkg_id++;
            }
        }
Exemplo n.º 3
0
        /// <summary>
        /// Convert a GDI image into an equal-sized YUV planar image.
        /// </summary>
        private void ResampleBuffer(Bitmap img, TimedImage ti)
        {
            if (img == null)
            {
                return;
            }
            Rectangle  r   = new Rectangle(0, 0, img.Width, img.Height);
            BitmapData bmp = img.LockBits(r, ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb);             // for speed, always use the bitmap's real pixel format!

            // Do the RGB -> YUV conversion
            try {
                GCHandle _Lx = default(GCHandle), _Crx = default(GCHandle), _Cbx = default(GCHandle);
                try {
                    _Lx = GCHandle.Alloc(ti.Luma, GCHandleType.Pinned);
                    IntPtr _L = _Lx.AddrOfPinnedObject();
                    _Crx = GCHandle.Alloc(ti.Cr, GCHandleType.Pinned);
                    IntPtr _Cr = _Crx.AddrOfPinnedObject();
                    _Cbx = GCHandle.Alloc(ti.Cb, GCHandleType.Pinned);
                    IntPtr _Cb = _Cbx.AddrOfPinnedObject();

                    // Convert, but don't scale. No sub-sampling here.
                    EncoderBridge.Rgb2YuvIS(ti.Width, ti.Height,
                                            bmp.Scan0, _L, _Cb, _Cr);
                } finally {
                    if (_Lx.IsAllocated)
                    {
                        _Lx.Free();
                    }
                    if (_Crx.IsAllocated)
                    {
                        _Crx.Free();
                    }
                    if (_Cbx.IsAllocated)
                    {
                        _Cbx.Free();
                    }
                }
            } finally {
                img.UnlockBits(bmp);
            }
        }
        private void ScalePlane(byte[] Src, byte[] Dst, int src_w, int src_h, int dst_w, int dst_h, bool HQ)
        {
            GCHandle _Src = default(GCHandle), _Dst = default(GCHandle);

            try {
                _Src = GCHandle.Alloc(Src, GCHandleType.Pinned);
                IntPtr _A = _Src.AddrOfPinnedObject();
                _Dst = GCHandle.Alloc(Dst, GCHandleType.Pinned);
                IntPtr _B = _Dst.AddrOfPinnedObject();

                EncoderBridge.PlanarScale(_A, _B, src_w, src_h, dst_w, dst_h, HQ);
            } finally {
                if (_Src.IsAllocated)
                {
                    _Src.Free();
                }
                if (_Dst.IsAllocated)
                {
                    _Dst.Free();
                }
            }
        }
        /// <summary>
        /// This loop does the actual work of reading caches into the encoder and firing actions.
        /// This loop controls Multiple-bit-rate encoding
        /// </summary>
        private void EncoderCoreLoop(object Package)
        {
            EncoderPackage pkg = Package as EncoderPackage;

            try {
                #region Start up
                if (pkg == null)
                {
                    throw new Exception("Encoder core loop package was lost");
                }
                double lastVideoTime = 0.0;                 // used for Adaptive frame rate

                if (cam != null)
                {
                    lastVideoTime = cam.TimecodeStart;
                }

                int loop_frame_incr = 0;
                if (pkg.Specification.HasVideo)
                {
                    loop_frame_incr = 1;
                    videoJobs++;
                }
                #endregion

                while (!EncoderRunning)                   // wait for the signal!
                {
                    System.Threading.Thread.Sleep(1000);
                }

                WaitForSyncFlag();

                start = DateTime.Now;
                while (EncoderRunning)                   // Encode frames until stopped
                {
                    #region Frame availability checks
                    // Wait for buffers to be populated
                    while (pkg.BuffersEmpty(MinimumBufferPopulation) && EncoderRunning)
                    {
                        foreach (var buf in pkg.Buffers)
                        {
                            buf.RebufferCapturedFrames();
                        }
                        if (pkg.BuffersEmpty(MinimumBufferPopulation))
                        {
                            System.Threading.Thread.Sleep(frameSleep);
                        }
                    }

                    if (DryRun)
                    {
                        System.Threading.Thread.Sleep(frameSleep);
                        continue;                         // don't encode
                    }
                    #endregion

                    pkg.LoadAllBuffers();
                    EncoderBridge.EncodeFrame(ref pkg.Job, ref pkg.Frame);
                    pkg.UnloadAllBuffers();

                    if (!pkg.Job.IsValid)
                    {
                        throw new Exception("Job became invalid. Possible memory or filesystem error");
                    }

                    #region Segment switching
                    if (pkg.Job.SegmentNumber != pkg.Job.OldSegmentNumber)
                    {
                        double real_chunk_duration = pkg.Frame.VideoSampleTime - lastVideoTime;
                        lock (outputRouter) {
                            outputRouter.NewChunkAvailable(pkg.Job.OldSegmentNumber, pkg.JobIndex, real_chunk_duration);
                        }
                        lastVideoTime            = pkg.Frame.VideoSampleTime;
                        pkg.Job.OldSegmentNumber = pkg.Job.SegmentNumber;
                    }

                    FrameCount += loop_frame_incr;
                    #endregion
                }
            } catch (Exception ex) {
                System.Diagnostics.Debug.Fail("EncoderController.cs: Core loop fault.", ex.Message + "\r\n" + ex.StackTrace);

                File.WriteAllText(config.EncoderSettings.LocalSystemOutputFolder + @"/error.txt", "Main loop: " + ex.Message + "\r\n" + ex.StackTrace);
            } finally {
                if (pkg != null)
                {
                    EncoderBridge.CloseEncoderJob(ref pkg.Job);                    // NEVER FORGET THIS!!
                }
                if (EncoderRunning)
                {
                    Halt();                                 // Don't use 'Stop()' in the core loop, or the system will freeze!
                }
                System.Threading.Thread.CurrentThread.Abort();
            }
        }
Exemplo n.º 6
0
        private void DoTranscode()
        {
            // Start decode job (gets some basic information)
            DecoderJob decode = new DecoderJob();

            EncoderBridge.InitialiseDecoderJob(ref decode, SourceFile);

            if (decode.IsValid == 0)
            {
                MessageBox.Show("Sorry, the source file doesn't appear to be valid");
                return;
            }

            // Load config, then tweak to match input
            EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(DestFile);
            config.Audio.Channels = decode.AudioChannels;
            if (config.Audio.Channels > 0)
            {
                config.Audio.CaptureDeviceNumber = -2;                 // dummy
            }
            else
            {
                config.Audio.CaptureDeviceNumber = -1;                 // no audio
            }

            if (decode.videoWidth * decode.videoHeight > 0)
            {
                config.Video.CaptureDeviceNumber = -2;                 // dummy device
                config.Video.InputFrameRate      = (int)decode.Framerate;
                if (config.Video.InputFrameRate < 1)
                {
                    config.Video.InputFrameRate = 25;                     // don't know frame rate, so adapt.
                }

                config.EncoderSettings.OutputHeight = decode.videoHeight;
                config.EncoderSettings.OutputWidth  = decode.videoWidth;
            }
            else
            {
                config.Video.CaptureDeviceNumber = -1;                 // no video
            }

            #region Start up encoder in a trick mode
            EncoderController encoder = new EncoderController(config);
            encoder.DryRun = true;
            encoder.Start();
            encoder.PauseCapture();
            encoder.ClearBuffers();
            encoder.DryRun = false;
            encoder.MinimumBufferPopulation = 5;             // to allow re-ordering of weird frame timings
            #endregion

            Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight);
            double     a_time = -1, v_time = -1;
            MediaFrame mf = new MediaFrame();

            byte[]  IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16];
            short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2];

            GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned);
            mf.Yplane = pinX.AddrOfPinnedObject();

            GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned);
            mf.AudioBuffer = pinY.AddrOfPinnedObject();

            int i = 0, j = 0;
            while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0)
            {
                if (mf.VideoSize > 0)
                {
                    Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane);
                    img.RotateFlip(RotateFlipType.RotateNoneFlipY);                     // because decode put things the TL->BR, where video capture is BL->TR.

                    v_time = mf.VideoSampleTime;
                    //v_time += 1.0 / config.Video.InputFrameRate;
                    try { encoder.ForceInsertFrame(img, v_time); } catch { }
                    Console.Write("v");
                    i++;
                }

                if (mf.AudioSize > 0)
                {
                    if (mf.AudioSize > 441000)
                    {
                        Console.Write("@");                         // protect ourselves from over-size packets!
                    }
                    else
                    {
                        short[] samples = new short[mf.AudioSize];
                        Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length);

                        a_time = mf.AudioSampleTime;

                        encoder.ForceInsertFrame(new TimedSample(samples, a_time));
                        Console.Write("a");
                    }
                    j++;
                }

                //while (encoder.AudioQueueLength > 50 || encoder.VideoQueueLength > 50) {
                if (!encoder.EncoderRunning)
                {
                    throw new Exception("Encoder broken!");
                }
                Thread.Sleep((int)(250 / config.Video.InputFrameRate));
                //}

                this.Text = "V (" + i + "/" + v_time + ") | A (" + j + "/" + a_time + ")";

                Application.DoEvents();

                if (!running)
                {
                    break;
                }

                mf.VideoSize = 0;
                mf.AudioSize = 0;
            }

            pinX.Free();
            pinY.Free();

            encoder.MinimumBufferPopulation = 1;             // let the buffers empty out

            Console.WriteLine("\r\nEND\r\n");

            Thread.Sleep(5000);
            encoder.Stop();
            EncoderBridge.CloseDecoderJob(ref decode);
        }
Exemplo n.º 7
0
        static void Main()
        {
            /*Application.EnableVisualStyles();
             * Application.SetCompatibleTextRenderingDefault(false);
             * Application.Run(new Form1());*/


            byte[]       data = File.ReadAllBytes(@"C:\temp\sample.ts");
            MemoryStream ms   = new MemoryStream(data);


            EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(@"C:\temp\dummy_only.xml");
            EncoderController encoder = new EncoderController(config);

            #region Trick mode: encoder with no capture devices (so we can spoon-feed it content)
            encoder.DryRun = true;
            encoder.Start();
            encoder.PauseCapture();
            encoder.ClearBuffers();
            encoder.DryRun = false;
            encoder.MinimumBufferPopulation = 15;             // to allow re-ordering of B-frames
            #endregion

            plug_in = new TranscodeTimeOverlay();
            encoder.RegisterPlugin(plug_in);             // show captured time over recorded time.

            MpegTS_Demux demux = new MpegTS_Demux();
            demux.FeedTransportStream(ms, 0L);

            DecoderJob decode = new DecoderJob();
            EncoderBridge.InitialiseDecoderJob(ref decode, @"C:\temp\sample.ts");

            Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight);
            double     a_time = -1, v_time = -1;
            MediaFrame mf = new MediaFrame();

            byte[]  IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16];
            short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2];

            List <GenericMediaFrame> AudioFrames = demux.GetAvailableAudio();
            List <GenericMediaFrame> VideoFrames = demux.GetAvailableVideo();
            VideoFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));
            AudioFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));

            double dv_time = p2d((long)VideoFrames.Average(a => a.FrameDuration));
            double da_time = p2d((long)AudioFrames.Average(a => a.FrameDuration));

            GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned);
            mf.Yplane = pinX.AddrOfPinnedObject();

            GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned);
            mf.AudioBuffer = pinY.AddrOfPinnedObject();

            int i = 0, j = 0;
            while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0)
            {
                if (mf.VideoSize > 0)
                {
                    Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane);
                    img.RotateFlip(RotateFlipType.RotateNoneFlipY);                     // because decode put things the TL->BR, where video capture is BL->TR.

                    if (v_time < 0)
                    {
                        v_time = p2d(VideoFrames[i].FramePresentationTime);
                    }
                    else
                    {
                        v_time += dv_time;                      // p2d(VideoFrames[i].FrameDuration); // using dv_time smooths things
                    }
                    encoder.ForceInsertFrame(img, v_time);
                    Console.Write("v");
                    i++;
                }

                if (mf.AudioSize > 0)
                {
                    if (mf.AudioSize > 441000)
                    {
                        Console.Write("@");                         // protect ourselves from over-size packets!
                    }
                    else
                    {
                        short[] samples = new short[mf.AudioSize];
                        Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length);

                        if (a_time < 0)
                        {
                            a_time = p2d(AudioFrames[j].FramePresentationTime);
                        }
                        else
                        {
                            a_time += p2d(AudioFrames[j].FrameDuration);
                        }

                        encoder.ForceInsertFrame(new TimedSample(samples, a_time));
                        Console.Write("a");
                    }
                    j++;
                }

                Application.DoEvents();
                mf.VideoSize = 0;
                mf.AudioSize = 0;
            }

            pinX.Free();
            pinY.Free();

            encoder.MinimumBufferPopulation = 1;             // let the buffers empty out

            Console.WriteLine("\r\nEND\r\n");

            Thread.Sleep(2000);
            encoder.Stop();
            EncoderBridge.CloseDecoderJob(ref decode);
        }