コード例 #1
0
		// This function is called when playbin has created the appsrc element, so we have a chance to configure it.
		static void SourceSetup (object sender, GLib.SignalArgs args) {
			var info = new Gst.Audio.AudioInfo ();
			var source = new Gst.App.AppSrc(((Element)args.Args [0]).Handle);
			Console.WriteLine ("Source has been created. Configuring.");
			AppSource = source;

			// Configure appsrc
			Gst.Audio.AudioChannelPosition[] position = {};
			info.SetFormat (Gst.Audio.AudioFormat.S16, SampleRate, 1, position);
			var audioCaps = info.ToCaps ();
			source ["caps"] = audioCaps;
			source ["format"] = Format.Time;
			source.NeedData += StartFeed;
			source.EnoughData += StopFeed;
		}
コード例 #2
0
        /// <summary>
        /// This function is called when playbin has created the appsrc element, so we have
        /// a chance to configure it.
        /// </summary>
        static void HandleSourceSetup(object o, GLib.SignalArgs args)
        {
            var pipeline = o as Pipeline;

            Console.WriteLine("Source has been created. Configuring.");

            //AppSource = (Element)args.Args[0];
            _data.AppSource = new Gst.App.AppSrc(((Element)args.Args[0]).Handle);;
            // Configure appsrc
            var info = new Gst.Audio.AudioInfo();

            Gst.Audio.AudioChannelPosition[] position = { };
            info.SetFormat(Gst.Audio.AudioFormat.S16, SAMPLE_RATE, 1, position);
            var audioCaps = info.ToCaps();

            _data.AppSource["caps"]     = audioCaps;
            _data.AppSource["format"]   = Format.Time;
            _data.AppSource.NeedData   += HandleStartFeed;
            _data.AppSource.EnoughData += HandleStopFeed;
//            AppSource.Connect("need-data", HandleStartFeed);
//            AppSource.Connect("enough-data", HandleStopFeed);
            audioCaps.Dispose();
        }
コード例 #3
0
        public static void Main(string[] args)
        {
            b = 1;
            d = 1;
            Gst.Audio.AudioInfo info = new Gst.Audio.AudioInfo();

            // Initialize Gstreamer
            Gst.Application.Init(ref args);

            // Create the elements
            AppSource     = new Gst.App.AppSrc("app_src");
            Tee           = ElementFactory.Make("tee", "tee");
            AudioQueue    = ElementFactory.Make("queue", "audio_queue");
            AudioConvert1 = ElementFactory.Make("audioconvert", "audio_convert1");
            AudioResample = ElementFactory.Make("audioresample", "audio_resample");
            AudioSink     = ElementFactory.Make("autoaudiosink", "audio_sink");
            VideoQueue    = ElementFactory.Make("queue", "video_queue");
            AudioConvert2 = ElementFactory.Make("audioconvert", "audio_convert2");
            Visual        = ElementFactory.Make("wavescope", "visual");
            VideoConvert  = ElementFactory.Make("videoconvert", "video_convert");
            VideoSink     = ElementFactory.Make("autovideosink", "video_sink");
            AppQueue      = ElementFactory.Make("queue", "app_queue");
            AppSink       = new Gst.App.AppSink("app_sink");

            // Create the empty pipeline
            var pipeline = new Pipeline("test-pipeline");

            if (AppSource == null || Tee == null || AudioQueue == null || AudioConvert1 == null || AudioResample == null ||
                AudioSink == null || VideoQueue == null || AudioConvert2 == null || Visual == null || VideoConvert == null ||
                AppQueue == null || AppSink == null || pipeline == null)
            {
                Console.WriteLine("Not all elements could be created.");
                return;
            }

            // Configure wavescope
            Visual ["shader"] = 0;
            Visual ["style"]  = 0;

            // Configure appsrc
            info.SetFormat(Gst.Audio.AudioFormat.S16, SampleRate, 1, (Gst.Audio.AudioChannelPosition) 0);
            var audioCaps = info.ToCaps();

            AppSource ["caps"]   = audioCaps;
            AppSource ["format"] = Format.Time;

            AppSource.NeedData   += StartFeed;
            AppSource.EnoughData += StopFeed;

            // Configure appsink
            AppSink ["emit-signals"] = true;
            AppSink ["caps"]         = audioCaps;
            AppSink.NewSample       += NewSample;

            // Link all elements that can be automatically linked because they have "Always" pads
            pipeline.Add(AppSource, Tee, AudioQueue, AudioConvert1, AudioResample,
                         AudioSink, VideoQueue, AudioConvert2, Visual, VideoConvert, VideoSink, AppQueue, AppSink);
            if (!Element.Link(AppSource, Tee) ||
                !Element.Link(AudioQueue, AudioConvert1, AudioResample, AudioSink) ||
                !Element.Link(VideoQueue, AudioConvert2, Visual, VideoConvert, VideoSink) ||
                !Element.Link(AppQueue, AppSink))
            {
                Console.WriteLine("Elements could not be linked.");
                return;
            }

            // Manually link the Tee, which has "Request" pads
            var teeSrcPadTemplate = Tee.GetPadTemplate("src_%u");
            var teeAudioPad       = Tee.RequestPad(teeSrcPadTemplate);

            Console.WriteLine("Obtained request pad {0} for audio branch.", teeAudioPad.Name);
            var queueAudioPad = AudioQueue.GetStaticPad("sink");
            var teeVideoPad   = Tee.RequestPad(teeSrcPadTemplate);

            Console.WriteLine("Obtained request pad {0} for video branch.", teeVideoPad.Name);
            var queueVideoPad = VideoQueue.GetStaticPad("sink");
            var teeAppPad     = Tee.RequestPad(teeSrcPadTemplate);

            Console.WriteLine("Obtained request pad {0} for app branch.", teeAppPad.Name);
            var queueAppPad = AppQueue.GetStaticPad("sink");

            if (teeAudioPad.Link(queueAudioPad) != PadLinkReturn.Ok ||
                teeVideoPad.Link(queueVideoPad) != PadLinkReturn.Ok ||
                teeAppPad.Link(queueAppPad) != PadLinkReturn.Ok)
            {
                Console.WriteLine("Tee could not be linked");
                return;
            }

            // Instruct the bus to emit signals for each received message, and connect to the interesting signals
            var bus = pipeline.Bus;

            bus.AddSignalWatch();
            bus.Connect("message::error", HandleError);

            // Start playing the pipeline
            pipeline.SetState(State.Playing);

            // Create a GLib Main Loop and set it to run
            MainLoop = new GLib.MainLoop();
            MainLoop.Run();

            // Release the request pads from the Tee, and unref them
            Tee.ReleaseRequestPad(teeAudioPad);
            Tee.ReleaseRequestPad(teeVideoPad);
            Tee.ReleaseRequestPad(teeAppPad);

            // Free resources
            pipeline.SetState(State.Null);

            Gst.Global.Deinit();
        }
コード例 #4
0
        public static void Run(string[] args)
        {
            var info = new Gst.Audio.AudioInfo();

            Data.b = 1;
            Data.d = 1;
            Gst.Application.Init(ref args);

            Data.AppSource     = new Gst.App.AppSrc("audio_source");
            Data.Tee           = ElementFactory.Make("tee", "tee");
            Data.AudioQueue    = ElementFactory.Make("queue", "audio_queue");
            Data.AudioConvert1 = ElementFactory.Make("audioconvert", "audio_convert1");
            Data.AudioResample = ElementFactory.Make("audioresample", "audio_resample");
            Data.AudioSink     = ElementFactory.Make("autoaudiosink", "audio_sink");
            Data.VideoQueue    = ElementFactory.Make("queue", "video_queue");
            Data.AudioConvert2 = ElementFactory.Make("audioconvert", "audio_convert2");
            Data.Visual        = ElementFactory.Make("wavescope", "visual");
            Data.VideoConvert  = ElementFactory.Make("videoconvert", "csp");
            Data.VideoSink     = ElementFactory.Make("autovideosink", "video_sink");
            Data.AppQueue      = ElementFactory.Make("queue", "app_queue");
            Data.AppSink       = new Gst.App.AppSink("app_sink");
            Data.Pipeline      = new Pipeline("test-pipeline");

            if (new[] { Data.Pipeline, Data.AppSource, Data.Tee, Data.AudioQueue,
                        Data.AudioConvert1, Data.AudioResample, Data.AudioSink, Data.VideoQueue,
                        Data.AudioConvert2, Data.Visual, Data.VideoConvert, Data.VideoSink, Data.AppQueue, Data.AppSink }.Any(el => el == null))
            {
                "Not all elements could be created".PrintErr();
                return;
            }

            // Configure wavescope
            Data.Visual["shader"] = 0;
            Data.Visual["style"]  = 0;

            // Configure appsrc
            info.SetFormat(Gst.Audio.AudioFormat.S16, SAMPLE_RATE, 1);
            var audioCaps = info.ToCaps();

            Data.AppSource["caps"]   = audioCaps;
            Data.AppSource["format"] = Format.Time;
            Data.AppSource.Connect("need-data", StartFeed);
//            Data.AppSource.NeedData += StartFeed;
            Data.AppSource.EnoughData += StopFeed;

            // Configure appsink
            Data.AppSink.EmitSignals = true;
            Data.AppSink.Caps        = audioCaps;
            Data.AppSink.NewSample  += NewSample;

            // Link all elements that can be automatically linked because they have "Always" pads
            Data.Pipeline.Add(Data.AppSource, Data.Tee, Data.AudioQueue, Data.AudioConvert1, Data.AudioResample,
                              Data.AudioSink, Data.VideoQueue, Data.AudioConvert2, Data.Visual, Data.VideoConvert, Data.VideoSink, Data.AppQueue, Data.AppSink);
            if (!Element.Link(Data.AppSource, Data.Tee) ||
                !Element.Link(Data.AudioQueue, Data.AudioConvert1, Data.AudioResample, Data.AudioSink) ||
                !Element.Link(Data.VideoQueue, Data.AudioConvert2, Data.Visual, Data.VideoConvert, Data.VideoSink) ||
                !Element.Link(Data.AppQueue, Data.AppSink))
            {
                "Elements could not be linked.".PrintErr();
                return;
            }

            // Manually link the Tee, which has "Request" pads
            var teeAudioPad = Data.Tee.GetRequestPad("src_%u");

            Console.WriteLine($"Obtained request pad {teeAudioPad.Name} for audio branch.");
            var queueAudioPad = Data.AudioQueue.GetStaticPad("sink");
            var teeVideoPad   = Data.Tee.GetRequestPad("src_%u");

            Console.WriteLine($"Obtained request pad {teeVideoPad.Name} for video branch.");
            var queueVideoPad = Data.VideoQueue.GetStaticPad("sink");
            var teeAppPad     = Data.Tee.GetRequestPad("src_%u");

            Console.WriteLine($"Obtained request pad {teeAppPad.Name} for app branch.");
            var queueAppPad = Data.AppQueue.GetStaticPad("sink");

            if ((teeAudioPad.Link(queueAudioPad) != PadLinkReturn.Ok) ||
                (teeVideoPad.Link(queueVideoPad) != PadLinkReturn.Ok) ||
                (teeAppPad.Link(queueAppPad) != PadLinkReturn.Ok))
            {
                "tee could not be linked".PrintErr();
                return;
            }

            // Instruct the bus to emit signals for each received message, and connect to the interesting signals

            var bus = Data.Pipeline.Bus;

            bus.AddSignalWatch();
            bus.Connect("message::error", HandleError);

            // Start playing the pipeline
            Data.Pipeline.SetState(State.Playing);

            Data.MainLoop = new GLib.MainLoop();
            Data.MainLoop.Run();

            // Release the request pads from the Tee, and unref them
            Data.Tee.ReleaseRequestPad(teeAudioPad);
            Data.Tee.ReleaseRequestPad(teeVideoPad);
            Data.Tee.ReleaseRequestPad(teeAppPad);

            // Free resources
            Data.Pipeline.SetState(State.Null);

            Gst.Global.Deinit();
        }