Esempio n. 1
0
        /// <summary>
        /// Read the supplied configuration and prepare the transformer for work.
        /// </summary>
        private void PrepareTransformer(EncoderConfiguration.Configuration Configuration, List <EncoderPackage> Packages)
        {
            Config = Configuration;
            TimeSpan TargetDuration = TimeSpan.FromSeconds(Config.EncoderSettings.FragmentSeconds);

            PublishPoint = Config.Upload.VideoDestinationRoot;
            if (String.IsNullOrEmpty(PublishPoint))
            {
                throw new ArgumentException("Publishing point must not be empty", "PublishUrl");
            }

            PushServer     = new IisSmoothPush(new Uri(PublishPoint));
            TrackDurations = new Dictionary <int, long>();
            TrackOffsets   = new Dictionary <int, long>();

            targetDuration = (ulong)TargetDuration.Ticks;
            Streams        = new MediaStream[Packages.Count];

            foreach (var pkg in Packages)
            {
                if (pkg.Specification.HasVideo && pkg.Specification.HasAudio)
                {
                    throw new NotSupportedException("IIS Smooth output doesn't support pre-muxed streams");
                }

                if (pkg.Specification.HasAudio)
                {
                    Streams[pkg.JobIndex]         = new MediaStream();         // for now, stream 0 is audio, and all others are video.
                    Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1;
                    Streams[pkg.JobIndex].FourCC  = "mp3a";                    // MP3
                    //Streams[pkg.JobIndex].FourCC = "mp4a"; // AAC
                    Streams[pkg.JobIndex].Height  = 0;
                    Streams[pkg.JobIndex].Width   = 0;
                    Streams[pkg.JobIndex].Bitrate = 96000;                     //pkg.Job.Bitrate; // later!
                }
                else if (pkg.Specification.HasVideo)
                {
                    Streams[pkg.JobIndex]         = new MediaStream();                   // for now, stream 0 is audio, and all others are video.
                    Streams[pkg.JobIndex].TrackId = pkg.JobIndex + 1;
                    Streams[pkg.JobIndex].FourCC  = "H264";                              // this is the M$ format, not iso (which is 'avc1')
                    Streams[pkg.JobIndex].Height  = Config.EncoderSettings.OutputHeight; // the actual size may be different due to scaling factor.
                    Streams[pkg.JobIndex].Width   = Config.EncoderSettings.OutputWidth;
                    Streams[pkg.JobIndex].Bitrate = pkg.Job.Bitrate;
                }
            }

            Mp4fFile = new FileRoot(Streams);
            Demuxer  = new MpegTS_Demux[Packages.Count];
            for (int di = 0; di < Demuxer.Length; di++)
            {
                Demuxer[di] = new MpegTS_Demux();
            }
        }
Esempio n. 2
0
        /// <summary>
        /// Transform an MPEG-TS file into a MP4f file.
        /// This all needs to be stripped out and each stream needs it's own IisSmoothPush connection.
        /// </summary>
        private void Transform(string SourceFile, int StreamIndex, int ChunkIndex)
        {
            // Check for chunk:
            string src = SourceFile;

            if (!File.Exists(src))
            {
                throw new Exception("Source file missing");
            }

            // Get demuxer for THIS set of chunks (they can't be shared across different feeds)
            MpegTS_Demux demux = Demuxer[StreamIndex];

            double min_time = Math.Max(0, ChunkIndex - 10) * Config.EncoderSettings.FragmentSeconds;

            // Parse TS file into frames:
            using (var fs = new FileStream(src, FileMode.Open)) {
                lock (demux) {
                    demux.FeedTransportStream(fs, min_time);
                }
            }

            List <GenericMediaFrame> aud_frames = null;
            List <GenericMediaFrame> vid_frames = null;

            // Read frames that are ready
            int idx = StreamIndex;

            lock (demux) {
                aud_frames = demux.GetAvailableAudio();
                vid_frames = demux.GetAvailableVideo();


                if (aud_frames != null && aud_frames.Count > 0)
                {
                    Streams[idx].Frames = aud_frames;                     // for now, all audio is stream 0.
                    PushStream(Streams[idx], Mp4fFile);
                }

                if (vid_frames != null && vid_frames.Count > 0)
                {
                    Streams[idx].Frames = vid_frames;
                    PushStream(Streams[idx], Mp4fFile);
                }
            }

            // File is no longer needed
            OnFileConsumed(this, new FileEventArgs {
                ReferencedFile = new FileInfo(SourceFile)
            });
        }
        static void Main()
        {
            /*Application.EnableVisualStyles();
             * Application.SetCompatibleTextRenderingDefault(false);
             * Application.Run(new Form1());*/


            byte[]       data = File.ReadAllBytes(@"C:\temp\sample.ts");
            MemoryStream ms   = new MemoryStream(data);


            EncoderConfiguration.Configuration config = EncoderConfiguration.Configuration.LoadFromFile(@"C:\temp\dummy_only.xml");
            EncoderController encoder = new EncoderController(config);

            #region Trick mode: encoder with no capture devices (so we can spoon-feed it content)
            encoder.DryRun = true;
            encoder.Start();
            encoder.PauseCapture();
            encoder.ClearBuffers();
            encoder.DryRun = false;
            encoder.MinimumBufferPopulation = 15;             // to allow re-ordering of B-frames
            #endregion

            plug_in = new TranscodeTimeOverlay();
            encoder.RegisterPlugin(plug_in);             // show captured time over recorded time.

            MpegTS_Demux demux = new MpegTS_Demux();
            demux.FeedTransportStream(ms, 0L);

            DecoderJob decode = new DecoderJob();
            EncoderBridge.InitialiseDecoderJob(ref decode, @"C:\temp\sample.ts");

            Console.WriteLine(decode.videoWidth + "x" + decode.videoHeight);
            double     a_time = -1, v_time = -1;
            MediaFrame mf = new MediaFrame();

            byte[]  IMAGE = new byte[decode.videoWidth * decode.videoHeight * 16];
            short[] AUDIO = new short[decode.MinimumAudioBufferSize * 2];

            List <GenericMediaFrame> AudioFrames = demux.GetAvailableAudio();
            List <GenericMediaFrame> VideoFrames = demux.GetAvailableVideo();
            VideoFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));
            AudioFrames.Sort((a, b) => a.FramePresentationTime.CompareTo(b.FramePresentationTime));

            double dv_time = p2d((long)VideoFrames.Average(a => a.FrameDuration));
            double da_time = p2d((long)AudioFrames.Average(a => a.FrameDuration));

            GCHandle pinX = GCHandle.Alloc(IMAGE, GCHandleType.Pinned);
            mf.Yplane = pinX.AddrOfPinnedObject();

            GCHandle pinY = GCHandle.Alloc(AUDIO, GCHandleType.Pinned);
            mf.AudioBuffer = pinY.AddrOfPinnedObject();

            int i = 0, j = 0;
            while (EncoderBridge.DecodeFrame(ref decode, ref mf) >= 0)
            {
                if (mf.VideoSize > 0)
                {
                    Bitmap img = new Bitmap(decode.videoWidth, decode.videoHeight, decode.videoWidth * 3, System.Drawing.Imaging.PixelFormat.Format24bppRgb, mf.Yplane);
                    img.RotateFlip(RotateFlipType.RotateNoneFlipY);                     // because decode put things the TL->BR, where video capture is BL->TR.

                    if (v_time < 0)
                    {
                        v_time = p2d(VideoFrames[i].FramePresentationTime);
                    }
                    else
                    {
                        v_time += dv_time;                      // p2d(VideoFrames[i].FrameDuration); // using dv_time smooths things
                    }
                    encoder.ForceInsertFrame(img, v_time);
                    Console.Write("v");
                    i++;
                }

                if (mf.AudioSize > 0)
                {
                    if (mf.AudioSize > 441000)
                    {
                        Console.Write("@");                         // protect ourselves from over-size packets!
                    }
                    else
                    {
                        short[] samples = new short[mf.AudioSize];
                        Marshal.Copy(mf.AudioBuffer, samples, 0, samples.Length);

                        if (a_time < 0)
                        {
                            a_time = p2d(AudioFrames[j].FramePresentationTime);
                        }
                        else
                        {
                            a_time += p2d(AudioFrames[j].FrameDuration);
                        }

                        encoder.ForceInsertFrame(new TimedSample(samples, a_time));
                        Console.Write("a");
                    }
                    j++;
                }

                Application.DoEvents();
                mf.VideoSize = 0;
                mf.AudioSize = 0;
            }

            pinX.Free();
            pinY.Free();

            encoder.MinimumBufferPopulation = 1;             // let the buffers empty out

            Console.WriteLine("\r\nEND\r\n");

            Thread.Sleep(2000);
            encoder.Stop();
            EncoderBridge.CloseDecoderJob(ref decode);
        }