Пример #1
0
        public Player(VideoFormat format = VideoFormat.Bgra)
        {
            // Create the empty pipeline
            playbin = ElementFactory.Make("playbin3") as Pipeline;
            bus     = playbin.Bus;
            bus.AddSignalWatch();
            bus.EnableSyncMessageEmission();
            bus.Message += Bus_Message;

            audiosink = ElementFactory.Make("autoaudiosink", "audiosink");
            videosink = new AppSink("videosink");

            if (playbin == null || videosink == null || audiosink == null)
            {
                throw new Exception("Not all elements could be created");
            }

            videosink.Sync = true;
            videosink.Qos  = false;
            videosink.Drop = false;
            var formatString = (format != VideoFormat.Unknown ? format : VideoFormat.Bgrx).ToFormatString();

            videosink.Caps        = Caps.FromString($"video/x-raw, format={formatString}");
            videosink.MaxBuffers  = 1;
            videosink.EmitSignals = true;
            videosink.NewPreroll += Videosink_NewPreroll;
            videosink.NewSample  += Videosink_NewSample;

            playbin["video-sink"]         = videosink;
            playbin["audio-sink"]         = audiosink;
            playbin.SetState(FTargetState = State.Ready);
        }
Пример #2
0
        private void CreatePipeline()
        {
            if (Pipeline != null)
            {
                Pipeline.SetState(State.Null);
                IsLive = false;
                return;
            }

            Pipeline = new Pipeline("playback");
            Playbin  = ElementFactory.Make("playbin", "playbin");
            Pipeline.Add(Playbin);

            VideoSink         = new AppSink("videoSink");
            VideoSink["caps"] = Caps.FromString("video/x-raw,format=RGBA");

            VideoSink.Drop             = true;                  // drop frames if cannot keep up
            VideoSink.Sync             = true;                  // synchronized playback
            VideoSink.MaxLateness      = (1000 / 30) * 1000000; // maximum latency to achieve at least 30 fps
            VideoSink.MaxBuffers       = 1;                     // no buffering for video sink
            VideoSink.Qos              = true;                  // QoS for video sink
            VideoSink.EnableLastSample = false;                 // no need for last sample as we are pulling samples

            Playbin["video-sink"] = VideoSink;

            var ret = Pipeline.SetState(State.Ready);
        }
        /// <summary>
        /// The appsink has received a buffer
        /// </summary>
        static void NewSample(object o, NewSampleArgs args)
        {
            AppSink sink = o as AppSink;

            // Retrieve the buffer
            using (var sample = sink.PullSample())
            {
                if (sample == null)
                {
                    return;
                }
                // The only thing we do in this example is print a * to indicate a received buffer
                Console.Write("* ");
                sample.Dispose();
            }
        }
Пример #4
0
        public static void Run(ref string[] args, string source, string sourceOptions = "")
        {
            Console.WriteLine($"Playing video and audio from {source}");
            Application.Init(ref args);

            Pipeline  = new Gst.Pipeline("simplepipeline");
            VideoSink = new AppSink("videoSink");
            Playbin   = ElementFactory.Make("playbin", "playbin");

            Playbin["uri"]        = source;
            Playbin["video-sink"] = VideoSink;

            VideoSink["caps"]     = Caps.FromString("video/x-raw,format=RGBA");
            VideoSink.EmitSignals = true;
            VideoSink.NewSample  += NewVideoSample;
            VideoSink.Drop        = true;
            VideoSink.Sync        = true;
            VideoSink.Qos         = true;

            Pipeline.Add(Playbin);

            MainLoop = new GLib.MainLoop();

            Pipeline.Bus.AddSignalWatch();
            Pipeline.Bus.Message += OnMessage;

            var ret = Pipeline.SetState(State.Playing);

            if (ret == StateChangeReturn.Failure)
            {
                Console.WriteLine("Unable to set the pipeline to the playing state.");
                return;
            }
            else if (ret == StateChangeReturn.NoPreroll)
            {
                IsLive = true;
                Console.WriteLine("Playing a live stream.");
            }

            MainLoop.Run();

            Pipeline.SetState(State.Null);
        }
        // GST polling thread function
        void KeepPolling()
        {
            while (isrunning)
            {
                switch (playerState)
                {
                case VideoPlayerState.STOPPED:
                    break;

                case VideoPlayerState.LOADING:
                    //get video width/height
                    int w = 0, h = 0;
                    //Query video information
                    Gst.Buffer buf = appSink.PullBuffer();
                    if (buf != null)
                    {
                        Console.WriteLine(buf.Caps.ToString());

                        //string format = buf.Caps[0].GetValue("format").Val.ToString();
                        //Console.WriteLine("format: " + format);
                        int.TryParse(buf.Caps[0].GetValue("width").Val.ToString(), out w);
                        int.TryParse(buf.Caps[0].GetValue("height").Val.ToString(), out h);
                        if (w * h != 0)
                        {
                            //Create decoded buffer
                            lock (lockFrameBuf)
                            {
                                width   = w;
                                height  = h;
                                bufferY = new byte[width * height];
                                bufferU = new byte[width * height / 4];
                                bufferV = new byte[width * height / 4];
                                IntPtr src = buf.Data;
                                Marshal.Copy(src, bufferY, 0, width * height);
                                src = new IntPtr(src.ToInt64() + width * height);
                                Marshal.Copy(src, bufferU, 0, width * height / 4);
                                src = new IntPtr(src.ToInt64() + width * height / 4);
                                Marshal.Copy(src, bufferV, 0, width * height / 4);
                                isFrameNew = true;
                                //Dispose handle to avoid memory leak
                                //gst_mini_object_unref(buf.Handle);
                                buf.Dispose();
                            }

                            Console.WriteLine("PLAYING");
                            playerState = VideoPlayerState.PLAYING;

                            continue;
                        }
                    }
                    break;

                case VideoPlayerState.PLAYING:
                    Gst.Buffer buf2 = appSink.PullBuffer();
                    if (buf2 != null)
                    {
                        lock (lockFrameBuf){
                            //Update buffer
                            IntPtr src = buf2.Data;
                            Marshal.Copy(src, bufferY, 0, width * height);
                            src = new IntPtr(src.ToInt64() + width * height);
                            Marshal.Copy(src, bufferU, 0, width * height / 4);
                            src = new IntPtr(src.ToInt64() + width * height / 4);
                            Marshal.Copy(src, bufferV, 0, width * height / 4);
                            isFrameNew = true;
                            //gst_mini_object_unref(buf2.Handle);
                            //buf2.Dispose();
                        }
                        buf2.Dispose();
                    }
                    else
                    {
                        lock (lockFrameBuf)
                        {
                            //Clear buffer
                            bufferY = new byte[width * height];
                            bufferU = new byte[width * height / 4];
                            bufferV = new byte[width * height / 4];
                        }
                        playerState = VideoPlayerState.STOPPED;
                        Console.WriteLine("STOPPED");
                    }

                    break;

                case VideoPlayerState.PAUSED:
                    //Do nothing
                    break;

                default:
                    //Do nothing
                    break;
                }
                Thread.Sleep(10);
            }

            //Clean up
            this.PlayerState = VideoPlayerState.STOPPED;
            playBin.SetState(State.Null);
            playBin.Dispose();
            appSink.SetState(State.Null);
            appSink.Dispose();
            playBin = null;
            appSink = null;
        }
        public void LoadVideo(string uri)
        {
            if (gstThread != null)
            {
                isrunning = false;
                gstThread.Join();
                gstThread = new Thread(new ThreadStart(KeepPolling));
            }

            if (playBin != null)
            {
                playerState = VideoPlayerState.STOPPED;
                Console.WriteLine("STOPPED");

                //Dispose playbin2 and appsink
                playBin.SetState(State.Null);
                playBin.Dispose();
                appSink.SetState(State.Null);
                appSink.Dispose();

                //Create playbin2 and appsink
                playBin = new PlayBin2();

                appSink      = ElementFactory.Make("appsink", "sink") as AppSink;
                appSink.Caps = new Caps("video/x-raw-yuv", new object[] {});
                //appSink.Caps = new Caps("video/x-raw-rgb", new object[] { "bpp", 24 });
                appSink.Drop       = true;
                appSink.MaxBuffers = 8;
                playBin.VideoSink  = appSink;
            }
            else
            {
                //Create playbin2 and appsink
                playBin      = new PlayBin2();
                appSink      = ElementFactory.Make("appsink", "sink") as AppSink;
                appSink.Caps = new Caps("video/x-raw-yuv", new object[] {});
                //appSink.Caps = new Caps("video/x-raw-rgb", new object[] { "bpp", 24 });
                appSink.Drop       = true;
                appSink.MaxBuffers = 8;
                playBin.VideoSink  = appSink;
            }

            //init variables
            texturesOK = false;
            width      = 0;
            height     = 0;

            //Set file uri
            string validUri = uri;

            if (!validUri.StartsWith("file://"))
            {
                validUri = "file://" + uri;
            }
            playBin.Uri = validUri;
            StateChangeReturn sr = playBin.SetState(State.Playing);

            Console.WriteLine(sr.ToString());
            playerState = VideoPlayerState.LOADING;
            Console.WriteLine("LOADING:" + validUri);

            if (gstThread == null)
            {
                gstThread = new Thread(new ThreadStart(KeepPolling));
            }

            isrunning = true;
            //Start polling thread...future thought, using async queue?
            gstThread.Start();

            return;
        }
Пример #7
0
        public static void Run(ref string[] args, string source, string sourceOptions = "",
                               bool withVideoPlayback = true, bool withAudioPlayback = true)
        {
            Console.WriteLine($"Getting raw video and audio samples and playing {source}");
            Application.Init(ref args);
            GtkSharp.GstreamerSharp.ObjectManager.Initialize(); // for AppSink, including finding exisitng app sinks

            bool validUri = false;

            if (Gst.Uri.IsValid(source))
            {
                var protocol = Gst.Uri.GetProtocol(source);
                if (Gst.Uri.ProtocolIsValid(protocol) && Gst.Uri.ProtocolIsSupported(URIType.Src, protocol))
                {
                    validUri = true;
                }
            }
            if (!validUri)
            {
                // still trying as a file path
                source = "file://" + source.Replace('\\', '/');
            }

            // if needed to force TCP with rtps source and uridecodebin,
            // use rtspt:// as source scheme. Also see http://gstreamer-devel.966125.n4.nabble.com/setting-protocols-for-rtsp-when-using-uridecodebin-tp4669327p4669328.html

            // We create a pipeline with 4 target sinks:
            //                                                                             ==> autoaudiosink (audio playback)
            //                                                                           //
            //                                               ==> audio (tee 'audioTee')=||
            //                                             //                            \\ 
            //                                            //                               ==> appsink 'audioSink' (raw audio samples)
            // =>>> source stream =>>> urlcodebin demux =||
            //                                            \\                               ==> autovideosink (video playback)
            //                                             \\                            //
            //                                               ==> video (tee 'videoTee')=||
            //                                                                           \\
            //                                                                             ==> appsink 'videoSink' (raw video samples)
            //
            // We can initialize a pipeline declaratively using Gst.Parse.Launch as if we were doing that in command line.
            // Then access pipeline elements by their names (audioSink and videoSink)

            Pipeline = Gst.Parse.Launch(
                $"uridecodebin uri=\"{source}\" {sourceOptions} name=dmux " +           // using uridecodebin as a demuxer
                                                                                        // you can also use replace it with:
                                                                                        // for HTTP(S) - "souphttpsrc location=\"{source}\" {sourceOptions} ! decodebin name=dmux"
                                                                                        // for RTSP - "rtspsrc location=\"{source}\" {sourceOptions} ! decodebin name=dmux"

                "dmux. ! queue ! audioconvert ! audio/x-raw,format=F32LE " +            // audio flow: raw PCM, 32bit float, little-endian
                "! tee name=audioTee " +                                                // create a tee for audio split
                (!withAudioPlayback ? " " :
                 "audioTee. ! queue ! autoaudiosink ") +                                // first [optional] audio branch - an automatic sink for playback
                "audioTee. ! queue ! appsink name=audioSink " +                         // second audio branch - an appsink 'audioSink' for raw audio samples

                "dmux. ! queue ! videoconvert " +                                       // video flow: raw RGBA, 32bpp
                                                                                        // color conversion to RGBA on GPU
                "! glupload ! glcolorconvert ! video/x-raw(memory:GLMemory),texture-target=2D,format=(string)RGBA ! gldownload " +

                "! tee name=videoTee " +                                                        // create second tee - for video split
                (!withVideoPlayback ? " " :
                 "videoTee. ! queue! autovideosink ") +                                         // first [optional] video branch - an automatic sink for playback
                "videoTee. ! queue ! appsink name=videoSink");                                  // second video branch - an appsink 'videoSink' for raw video samples

            MainLoop = new GLib.MainLoop();

            Pipeline.Bus.AddSignalWatch();
            Pipeline.Bus.EnableSyncMessageEmission();
            Pipeline.Bus.Message     += OnMessage;
            Pipeline.Bus.SyncMessage += OnSync;

            AppSink videoSink        = null;
            var     videoSinkElement = (Pipeline as Gst.Pipeline).GetChildByName("videoSink");

            if (videoSinkElement != null)
            {
                videoSink = videoSinkElement as AppSink;
                if (videoSink != null)
                {
                    videoSink.EmitSignals = true;
                    videoSink.NewSample  += NewVideoSample;

                    videoSink.Drop = true;
                    videoSink.Sync = true;
                    videoSink.Qos  = true;
                }
            }

            AppSink audioSink        = null;
            var     audioSinkElement = (Pipeline as Gst.Pipeline).GetChildByName("audioSink");

            if (audioSinkElement != null)
            {
                audioSink = audioSinkElement as AppSink;
                if (audioSink != null)
                {
                    audioSink.EmitSignals = true;
                    audioSink.NewSample  += NewAudioSample;

                    audioSink.Drop = true;
                    audioSink.Sync = true;
                    audioSink.Qos  = true;
                }
            }

            var ret = Pipeline.SetState(State.Playing);

            if (ret == StateChangeReturn.Failure)
            {
                Console.WriteLine("Unable to set the pipeline to the playing state.");
                return;
            }
            else if (ret == StateChangeReturn.NoPreroll)
            {
                IsLive = true;
                Console.WriteLine("Playing a live stream.");
            }

            MainLoop.Run();

            Pipeline.SetState(State.Null);
        }