// This function is called when playbin has created the appsrc element, so we have a chance to configure it.
		static void SourceSetup (object sender, GLib.SignalArgs args) {
			var info = new Gst.Audio.AudioInfo ();
			var source = new Gst.App.AppSrc(((Element)args.Args [0]).Handle);
			Console.WriteLine ("Source has been created. Configuring.");
			AppSource = source;

			// Configure appsrc
			Gst.Audio.AudioChannelPosition[] position = {};
			info.SetFormat (Gst.Audio.AudioFormat.S16, SampleRate, 1, position);
			var audioCaps = info.ToCaps ();
			source ["caps"] = audioCaps;
			source ["format"] = Format.Time;
			source.NeedData += StartFeed;
			source.EnoughData += StopFeed;
		}
示例#2
0
        public static void Main(string[] args)
        {
            b = 1;
            d = 1;
            Gst.Audio.AudioInfo info = new Gst.Audio.AudioInfo();

            // Initialize Gstreamer
            Gst.Application.Init(ref args);

            // Create the elements
            AppSource     = new Gst.App.AppSrc("app_src");
            Tee           = ElementFactory.Make("tee", "tee");
            AudioQueue    = ElementFactory.Make("queue", "audio_queue");
            AudioConvert1 = ElementFactory.Make("audioconvert", "audio_convert1");
            AudioResample = ElementFactory.Make("audioresample", "audio_resample");
            AudioSink     = ElementFactory.Make("autoaudiosink", "audio_sink");
            VideoQueue    = ElementFactory.Make("queue", "video_queue");
            AudioConvert2 = ElementFactory.Make("audioconvert", "audio_convert2");
            Visual        = ElementFactory.Make("wavescope", "visual");
            VideoConvert  = ElementFactory.Make("videoconvert", "video_convert");
            VideoSink     = ElementFactory.Make("autovideosink", "video_sink");
            AppQueue      = ElementFactory.Make("queue", "app_queue");
            AppSink       = new Gst.App.AppSink("app_sink");

            // Create the empty pipeline
            var pipeline = new Pipeline("test-pipeline");

            if (AppSource == null || Tee == null || AudioQueue == null || AudioConvert1 == null || AudioResample == null ||
                AudioSink == null || VideoQueue == null || AudioConvert2 == null || Visual == null || VideoConvert == null ||
                AppQueue == null || AppSink == null || pipeline == null)
            {
                Console.WriteLine("Not all elements could be created.");
                return;
            }

            // Configure wavescope
            Visual ["shader"] = 0;
            Visual ["style"]  = 0;

            // Configure appsrc
            info.SetFormat(Gst.Audio.AudioFormat.S16, SampleRate, 1, (Gst.Audio.AudioChannelPosition) 0);
            var audioCaps = info.ToCaps();

            AppSource ["caps"]   = audioCaps;
            AppSource ["format"] = Format.Time;

            AppSource.NeedData   += StartFeed;
            AppSource.EnoughData += StopFeed;

            // Configure appsink
            AppSink ["emit-signals"] = true;
            AppSink ["caps"]         = audioCaps;
            AppSink.NewSample       += NewSample;

            // Link all elements that can be automatically linked because they have "Always" pads
            pipeline.Add(AppSource, Tee, AudioQueue, AudioConvert1, AudioResample,
                         AudioSink, VideoQueue, AudioConvert2, Visual, VideoConvert, VideoSink, AppQueue, AppSink);
            if (!Element.Link(AppSource, Tee) ||
                !Element.Link(AudioQueue, AudioConvert1, AudioResample, AudioSink) ||
                !Element.Link(VideoQueue, AudioConvert2, Visual, VideoConvert, VideoSink) ||
                !Element.Link(AppQueue, AppSink))
            {
                Console.WriteLine("Elements could not be linked.");
                return;
            }

            // Manually link the Tee, which has "Request" pads
            var teeSrcPadTemplate = Tee.GetPadTemplate("src_%u");
            var teeAudioPad       = Tee.RequestPad(teeSrcPadTemplate);

            Console.WriteLine("Obtained request pad {0} for audio branch.", teeAudioPad.Name);
            var queueAudioPad = AudioQueue.GetStaticPad("sink");
            var teeVideoPad   = Tee.RequestPad(teeSrcPadTemplate);

            Console.WriteLine("Obtained request pad {0} for video branch.", teeVideoPad.Name);
            var queueVideoPad = VideoQueue.GetStaticPad("sink");
            var teeAppPad     = Tee.RequestPad(teeSrcPadTemplate);

            Console.WriteLine("Obtained request pad {0} for app branch.", teeAppPad.Name);
            var queueAppPad = AppQueue.GetStaticPad("sink");

            if (teeAudioPad.Link(queueAudioPad) != PadLinkReturn.Ok ||
                teeVideoPad.Link(queueVideoPad) != PadLinkReturn.Ok ||
                teeAppPad.Link(queueAppPad) != PadLinkReturn.Ok)
            {
                Console.WriteLine("Tee could not be linked");
                return;
            }

            // Instruct the bus to emit signals for each received message, and connect to the interesting signals
            var bus = pipeline.Bus;

            bus.AddSignalWatch();
            bus.Connect("message::error", HandleError);

            // Start playing the pipeline
            pipeline.SetState(State.Playing);

            // Create a GLib Main Loop and set it to run
            MainLoop = new GLib.MainLoop();
            MainLoop.Run();

            // Release the request pads from the Tee, and unref them
            Tee.ReleaseRequestPad(teeAudioPad);
            Tee.ReleaseRequestPad(teeVideoPad);
            Tee.ReleaseRequestPad(teeAppPad);

            // Free resources
            pipeline.SetState(State.Null);

            Gst.Global.Deinit();
        }