Example #1
0
        public void TestPipelineDispose()
        {
            Gst.Pipeline pipeline = new Gst.Pipeline();
            var          src      = Gst.ElementFactory.Make("videotestsrc");

            src["num-buffers"] = 10;

            var vsink = ElementFactory.Make("fakesink");

            pipeline.Add(src, vsink);
            src.Link(vsink);

            var srcWeakRef      = new WeakReference(src);
            var vsinkWeakRef    = new WeakReference(vsink);
            var pipelineWeakRef = new WeakReference(pipeline);
            var busWeakRef      = new WeakReference(pipeline.Bus);

            pipeline.SetState(State.Playing);
            bool terminated = false;

            do
            {
                using (Message msg = pipeline.Bus.PopFiltered(MessageType.StateChanged))
                {
                    if (msg == null || msg.Src != pipeline)
                    {
                        continue;
                    }

                    msg.ParseStateChanged(out State oldstate, out State newstate, out State pendingstate);

                    if (newstate == State.Playing)
                    {
                        terminated = true;
                    }
                }
            } while (!terminated);

            pipeline.SetState(State.Null);

            pipeline.Dispose();
            pipeline = null;

            GC.Collect();
            GC.WaitForPendingFinalizers();
            GC.Collect();

            Assert.IsFalse(srcWeakRef.IsAlive);
            Assert.IsFalse(vsinkWeakRef.IsAlive);
            Assert.IsFalse(busWeakRef.IsAlive);
            Assert.IsFalse(pipelineWeakRef.IsAlive);
        }
Example #2
0
        private void InitGStreamerPipeline()
        {
            //#region BuildPipeline
            switch (mCfgVideosinkType)
            {
            case videosinktype.glimagesink:
                mGlImageSink = (Gst.Video.VideoSink)Gst.ElementFactory.Make("glimagesink", "glimagesink");
                break;

            case videosinktype.d3dvideosink:
                mGlImageSink = (Gst.Video.VideoSink)Gst.ElementFactory.Make("d3dvideosink", "d3dvideosink");
                //mGlImageSink = (Gst.Video.VideoSink)Gst.ElementFactory.Make("dshowvideosink", "dshowvideosink");
                break;

            case videosinktype.dshowvideosink:
                mGlImageSink = (Gst.Video.VideoSink)Gst.ElementFactory.Make("dshowvideosink", "dshowvideosink");
                break;

            case videosinktype.directdrawsink:
                mGlImageSink = (Gst.Video.VideoSink)Gst.ElementFactory.Make("directdrawsink", "directdrawsink");
                break;

            default:
                break;
            }

            Gst.Element pipeline;
            //string command = "videotestsrc pattern=ball ! queue ! x264enc ! rtph264pay ! queue ! decodebin ! autovideosink";
            //string command = "rtspsrc location = rtsp://140.130.20.168:8554/RTSP0001 latency=0 ! application/x-rtp,encoding-name=H264,payload=96 ! rtph264depay ! decodebin ! autovideosink";
            string command = "rtspsrc location = rtsp://140.130.20.168:8554/RTSP0001 latency=0 ! application/x-rtp,encoding-name=H264,payload=96 ! rtph264depay ! decodebin ! autovideosink";

            pipeline = Gst.Parse.Launch(command);

            mCurrentPipeline = new Gst.Pipeline("pipeline");
            mCurrentPipeline.Add(pipeline);


            //subscribe to bus & bussync msgs

            SubscribeBusMessage();
            SubscribeBusSyncMessage();

            //play the stream
            var setStateRet = mCurrentPipeline.SetState(State.Null);

            System.Diagnostics.Debug.WriteLine("SetStateNULL returned: " + setStateRet.ToString());
            setStateRet = mCurrentPipeline.SetState(State.Ready);
            System.Diagnostics.Debug.WriteLine("SetStateReady returned: " + setStateRet.ToString());
            setStateRet = mCurrentPipeline.SetState(Gst.State.Playing);
        }
Example #3
0
        public void Finish()
        {
            timer.Stop();

            if (pipeline != null)
            {
                pipeline.SetState(State.Null);
                pipeline = null;
            }

            if (output_uri != null)
            {
                output_uri = null;
            }
        }
 public void StartRecording()
 {
     if (_isRecording)
     {
         StopRecordingAndSaveAsWav();
         throw new ApplicationException("recording while already recording");
     }
     if (!_recordInit)
     {
         SetupRecordingPipeline();
     }
     _pipeline.SetState(Gst.State.Playing);
     _startRecordingTime = DateTime.Now;
     _isRecording        = true;
     _pipeline.Bus.AddWatch(new BusFunc(BusCb));
 }
Example #5
0
        public void Finish()
        {
            if (output_path != null)
            {
                Banshee.IO.File.Delete(new SafeUri(output_path));
            }

            TrackReset();

            encoder_pipeline = null;
            output_extension = null;

            if (timer != null)
            {
                timer.Stop();
            }

            if (pipeline != null && pipeline is Element)
            {
                pipeline.SetState(State.Null);
                pipeline = null;
            }
        }
Example #6
0
	public static void StopRecording()
	{
		if (gstRecording != null)
		{
			gstRecording.SetState(Gst.State.Null);

			Gst.State s;
			gstRecording.GetState(out s, timeout);

			gstRecording.Dispose();
			gstRecording = null;
		}


		isRecording = false;

	}
        private void SetupRecordingPipeline()
        {
            // set up pipeline and elements
            // gst-launch-0.10 autoaudiosrc ! audioconvert ! vorbisenc ! oggmux ! filesink location=dump.ogg
            // Create new pipeline.

            _pipeline = new Gst.Pipeline();

            //Pipeline pipeline = new Pipeline("pipeline");

            // Construct pipeline filesrc -> avidemux -> mpeg4 -> directdrawsink

            //_eleAudioSource = ElementFactory.Make("autoaudiosrc", "autoaudiosrc");
            _eleAudioSource  = ElementFactory.Make("pulsesrc");
            _eleAudioConvert = ElementFactory.Make("audioconvert");
            if (_recordOgg)
            {
                _eleVorbisEnc = ElementFactory.Make("vorbisenc");
                _eleOggMux    = ElementFactory.Make("oggmux");
            }
            else
            {
                _eleWavPack = ElementFactory.Make("wavpackenc");
            }
            _eleFileSink = new Gst.CorePlugins.FileSink();
            //_eleFileSink = ElementFactory.Make("filesink", "filesink");
            _eleFileSink ["location"] = _path;

            // Add and link pipeline.
            if (_recordOgg)
            {
                _pipeline.Add(_eleAudioSource, _eleAudioConvert, _eleVorbisEnc, _eleOggMux, _eleFileSink);
            }
            else
            {
                _pipeline.Add(_eleAudioSource, _eleAudioConvert, _eleWavPack, _eleFileSink);
            }

            // Play video.
            _pipeline.SetState(Gst.State.Ready);
            _pipeline.SetState(Gst.State.Paused);

            if (!_eleAudioSource.Link(_eleAudioConvert))
            {
                Console.WriteLine("link failed between source and converter");
            }
            if (_recordOgg)
            {
                if (!_eleAudioConvert.Link(_eleVorbisEnc))
                {
                    Console.WriteLine("link failed between converter and encoder");
                }

                if (!_eleVorbisEnc.Link(_eleOggMux))
                {
                    Console.WriteLine("link failed between e and parser");
                }

                if (!_eleOggMux.Link(_eleFileSink))
                {
                    Console.Error.WriteLine("link failed between parser and sink");
                }
            }
            else
            {
                if (!_eleAudioConvert.Link(_eleWavPack))
                {
                    Console.WriteLine("link failed between converter and encoder");
                }

                if (!_eleWavPack.Link(_eleFileSink))
                {
                    Console.Error.WriteLine("link failed between encoder and sink");
                }
            }

            _recordInit = true;
        }
        static void Main(string[] args)
        {
            Application.Init(ref args);

            Element src = ElementFactory.Make("audiotestsrc");
            Element convert = ElementFactory.Make("audioconvert");
            Element volume = new ExampleVolume();
            Element sink = ElementFactory.Make("autoaudiosink");

            Pipeline pipeline = new Pipeline();
            pipeline.Add(src, convert, volume, sink);
            Element.Link(src, convert, volume, sink);

            pipeline.SetState(State.Playing);

            MainLoop loop = new MainLoop();
            loop.Run();

            pipeline.SetState(State.Null);

            Console.ReadLine();
        }
Example #9
0
        private void ReInitPipeline()
        {
            if (pipeline != null) {
                pipeline.SetState (Gst.State.Null);
                pipeline = null;
            }

            pipeline = new Gst.Pipeline ();
            drawSink = Gst.ElementFactory.Make ("xvimagesink");
            camerabin = Gst.ElementFactory.Make ("camerabin");
            camerabin.Connect ("image-done", new Gst.SignalHandler (OnImageDone));
            pipeline.SetState (Gst.State.Null);

            overlayAdapter = new Gst.Interfaces.XOverlayAdapter (drawSink.Handle);
            overlayAdapter.XwindowId = gdk_x11_drawable_get_xid (drawingArea.GdkWindow.Handle);
            pipeline.Add (camerabin);

            if (camerabin.HasProperty ("viewfinder-sink")) {
                camerabin ["viewfinder-sink"] = drawSink;
            }

            if (camerabin.HasProperty ("filename")) {
                camerabin ["filename"] = "snapshot.png";
            }

            pipeline.SetState (Gst.State.Playing);
            needReInit = false;
        }
        public static void Main(string[] args)
        {
            // Initialize Gstreamer
            Gst.Application.Init(ref args);

            // Create the element factories
            var sourceFactory = ElementFactory.Find ("audiotestsrc");
            var sinkFactory = ElementFactory.Find ("autoaudiosink");

            if (sourceFactory == null || sinkFactory == null) {
                Console.WriteLine ("Not all element factories could be created.");
                return;
            }

            // Print information about the pad templates of these factories
            PrintPadTemplateInformation (sourceFactory);
            PrintPadTemplateInformation (sinkFactory);

            // Ask the factories to instantiate actual elements
            var source = sourceFactory.Create ("source");
            var sink = sinkFactory.Create ("sink");

            // Create the empty pipeline
            var pipeline = new Pipeline ("test-pipeline");

            if (pipeline == null || source == null || sink == null) {
                Console.WriteLine ("Not all elements could be created.");
                return;
            }

            // Build the pipeline
            pipeline.Add (source, sink);
            if (!source.Link (sink)) {
                Console.WriteLine ("Elements could not be linked.");
                return;
            }

            // Print initial negotiated caps (in NULL state)
            Console.WriteLine ("In NULL state:");
            PrintPadCapabilities (sink, "sink");

            // Start playing
            var ret = pipeline.SetState (State.Playing);
            if (ret == StateChangeReturn.Failure) {
                Console.WriteLine ("Unable to set the pipeline to the playing state (check the bus for error messages).");
            }

            // Wait until error, EOS or State Change
            var bus = pipeline.Bus;
            var terminate = false;

            do {
                var msg = bus.TimedPopFiltered (Constants.CLOCK_TIME_NONE, MessageType.Error | MessageType.Eos | MessageType.StateChanged);

                // Parse message
                if (msg != null) {
                    switch (msg.Type) {
                    case MessageType.Error:
                        string debug;
                        GLib.GException exc;
                        msg.ParseError (out exc, out debug);
                        Console.WriteLine ("Error received from element {0}: {1}", msg.Src.Name, exc.Message);
                        Console.WriteLine ("Debugging information: {0}", debug != null ? debug : "none");
                        terminate = true;
                        break;
                    case MessageType.Eos:
                        Console.WriteLine ("End-Of-Stream reached.\n");
                        terminate = true;
                        break;
                    case MessageType.StateChanged:
                        // We are only interested in state-changed messages from the pipeline
                        if (msg.Src == pipeline) {
                            State oldState, newState, pendingState;
                            msg.ParseStateChanged (out oldState, out newState, out pendingState);
                            Console.WriteLine ("Pipeline state changed from {0} to {1}:",
                                Element.StateGetName (oldState), Element.StateGetName (newState));
                            // Print the current capabilities of the sink element
                            PrintPadCapabilities (sink, "sink");
                        }
                        break;
                    default:
                        // We should not reach here because we only asked for ERRORs, EOS and STATE_CHANGED
                        Console.WriteLine ("Unexpected message received.");
                        break;
                    }
                }
            } while (!terminate);

            // Free resources
            pipeline.SetState (State.Null);
        }
        public static void Main(string[] args)
        {
            // Initialize Gstreamer
            Gst.Application.Init(ref args);

            // Create the elements
            var audioSource = ElementFactory.Make ("audiotestsrc", "audio_source");
            var tee = ElementFactory.Make ("tee", "tee");
            var audioQueue = ElementFactory.Make ("queue", "audio_queue");
            var audioConvert = ElementFactory.Make ("audioconvert", "audio_convert");
            var audioResample = ElementFactory.Make ("audioresample", "audio_resample");
            var audioSink = ElementFactory.Make ("autoaudiosink", "audio_sink");
            var videoQueue = ElementFactory.Make ("queue", "video_queue");
            var visual = ElementFactory.Make ("wavescope", "visual");
            var videoConvert = ElementFactory.Make ("videoconvert", "csp");
            var videoSink = ElementFactory.Make ("autovideosink", "video_sink");

            // Create the empty pipeline
            var pipeline = new Pipeline ("test-pipeline");

            if (audioSource == null || tee == null || audioQueue == null || audioConvert == null || audioResample == null ||
                audioSink == null || videoQueue == null || visual == null || videoConvert == null || videoSink == null || pipeline == null) {
                Console.WriteLine ("Not all elements could be created.");
                return;
            }

            // Link all elements that can be automatically linked because they have "Always" pads
            pipeline.Add (audioSource, tee, audioQueue, audioConvert, audioResample, audioSink,
                videoQueue, visual, videoConvert, videoSink);
            if (!audioSource.Link (tee) ||
                !Element.Link (audioQueue, audioConvert, audioResample, audioSink) ||
                !Element.Link (videoQueue, visual, videoConvert, videoSink)) {
                Console.WriteLine ("Elements could not be linked.");
                return;
            }

            // Manually link the Tee, which has "Request" pads
            var teeSrcPadTemplate = tee.GetPadTemplate ("src_%u");
            var teeAudioPad = tee.RequestPad (teeSrcPadTemplate, null, null);
            Console.WriteLine ("Obtained request pad {0} for audio branch.", teeAudioPad.Name);
            var queueAudioPad = audioQueue.GetStaticPad ("sink");
            var teeVideoPad = tee.RequestPad (teeSrcPadTemplate, null, null);
            Console.WriteLine ("Obtained request pad {0} for video branch.", teeVideoPad.Name);
            var queueVideoPad = videoQueue.GetStaticPad ("sink");
            if (teeAudioPad.Link (queueAudioPad) != PadLinkReturn.Ok ||
                teeVideoPad.Link(queueVideoPad) != PadLinkReturn.Ok) {
                Console.WriteLine ("Tee could not be linked.");
                return;
            }

            // Start playing
            var ret = pipeline.SetState (State.Playing);
            if (ret == StateChangeReturn.Failure) {
                Console.WriteLine ("Unable to set the pipeline to the playing state (check the bus for error messages).");
            }

            // Wait until error or EOS
            pipeline.Bus.TimedPopFiltered (Constants.CLOCK_TIME_NONE, MessageType.Error | MessageType.Eos);

            // Release the request pads from the Tee, and unref them
            tee.ReleaseRequestPad (teeAudioPad);
            tee.ReleaseRequestPad (teeVideoPad);

            // Free resources
            pipeline.SetState (State.Null);
        }
		private void SetupRecordingPipeline ()
		{
			// set up pipeline and elements
			// gst-launch-0.10 autoaudiosrc ! audioconvert ! vorbisenc ! oggmux ! filesink location=dump.ogg
			// Create new pipeline.

			_pipeline = new Gst.Pipeline ();

			//Pipeline pipeline = new Pipeline("pipeline");

			// Construct pipeline filesrc -> avidemux -> mpeg4 -> directdrawsink

			//_eleAudioSource = ElementFactory.Make("autoaudiosrc", "autoaudiosrc");
			_eleAudioSource = ElementFactory.Make ("pulsesrc");
			_eleAudioConvert = ElementFactory.Make ("audioconvert");
			if (_recordOgg) {
				_eleVorbisEnc = ElementFactory.Make ("vorbisenc");
				_eleOggMux = ElementFactory.Make ("oggmux");
			} else {
				_eleWavPack = ElementFactory.Make ("wavpackenc");
			}
			_eleFileSink = new Gst.CorePlugins.FileSink ();
			//_eleFileSink = ElementFactory.Make("filesink", "filesink");
			_eleFileSink ["location"] = _path;

			// Add and link pipeline.
			if (_recordOgg) {
				_pipeline.Add (_eleAudioSource, _eleAudioConvert, _eleVorbisEnc, _eleOggMux, _eleFileSink);
			} else {
				_pipeline.Add (_eleAudioSource, _eleAudioConvert, _eleWavPack, _eleFileSink);
			}

			// Play video.
			_pipeline.SetState (Gst.State.Ready);
			_pipeline.SetState (Gst.State.Paused);

			if (!_eleAudioSource.Link (_eleAudioConvert)) {
				Console.WriteLine ("link failed between source and converter");
			}
			if (_recordOgg) {
				if (!_eleAudioConvert.Link (_eleVorbisEnc)) {
					Console.WriteLine ("link failed between converter and encoder");
				}

				if (!_eleVorbisEnc.Link (_eleOggMux)) {
					Console.WriteLine ("link failed between e and parser");
				}

				if (!_eleOggMux.Link (_eleFileSink)) {
					Console.Error.WriteLine ("link failed between parser and sink");
				}
			} else {

				if (!_eleAudioConvert.Link (_eleWavPack)) {
					Console.WriteLine ("link failed between converter and encoder");
				}

				if (!_eleWavPack.Link (_eleFileSink)) {
					Console.Error.WriteLine ("link failed between encoder and sink");
				}
			}

			_recordInit = true;
		}
Example #13
0
	public void StartRecordingUnix()
	{
		if (txtFolderOut.Text != String.Empty && Directory.Exists(txtFolderOut.Text.Trim()))
		{
			GstCapture.cameraDevice cDev = cameras[cbxCamera.ActiveText];
			//String sDev = "yes";
			String aDev = cbxMic.ActiveText;
			String _path = txtFolderOut.Text.Trim();

			DateTime dt = DateTime.Now;

			//Encoding w = Encoding.GetEncoding("windows-1251"); // HACK

			String aargs = null,
			cargs = null,
			sargs = null;


			if ((aDev != null) && ckbxMic.Active)
			{


				//aargs = String.Format(" dshowaudiosrc device-name=\"{0}\"", w.GetString(Encoding.UTF8.GetBytes(aDev)));
				aargs = String.Format(" pulsesrc");
				//aargs += " ! audio/x-raw-int, rate = 44100, channels = 1, depth = 16 ! queue ! faac ! tee name = audio";
				aargs += " ! audio/x-raw-int, rate = 44100, channels = 1, depth = 16 ! queue ! ffenc_adpcm_swf ! tee name = audio";

			}

			dir = String.Format("{0:yyyy-MM-dd_HH-mm-ss}", dt);
			path = Directory.CreateDirectory(
				System.IO.Path.Combine(_path, dir)
				).FullName;

			Directory.SetCurrentDirectory(path);
			Environment.SetEnvironmentVariable("GST_DEBUG", "3");

			if (cDev != null && ckbxCamera.Active)
			{
				int gop = 450;

				cargs = " flvmux name=camera ! filesink location=\"camera.flv\"";

				cargs += String.Format(" v4l2src device =\"{0}\"", cDev.device.ToString());
				cargs += String.Format(" ! video/x-raw-yuv, framerate = {0}/{1}, width={2}, height={3}" +
				                       " ! videorate ! video/x-raw-yuv, framerate = {4}/1" +
				                       " ! queue ! ffenc_flv name = cv gop-size = {5} ! camera.",
				                       cDev.framerate.Numerator, cDev.framerate.Denominator, cDev.width, cDev.height,
				                       30, gop);
				if (aargs != null)
				{
					//cargs += " audio. ! queue ! audio/mpeg ! camera.";
					cargs += " audio. ! queue ! audio/x-adpcm ! camera.";
				}

			}

			if (ckbxDisplay.Active)//sDev != null)
			{
				int gop = 150;
				sargs = " flvmux name=\"screen\" ! filesink location=\"screen.flv\"";

				sargs += " ximagesrc use-damage=false";
				sargs += String.Format(" ! video/x-raw-rgb, framerate = {0}/1" +
				                       " ! ffmpegcolorspace ! queue " +
				                       " ! ffenc_flashsv name=sv gop-size = {1} ! screen.",
				                       5, gop);


				if (aargs != null)
				{
					// sargs += " audio. ! queue ! audio/mpeg ! screen.";
					sargs += " audio. ! queue ! audio/x-adpcm ! screen.";
				}

			}

			try
			{
				String final =  cargs + sargs + aargs;
				gstRecording = (Gst.Pipeline)Gst.Parse.Launch(final);

				lblStatus.Text = "Recording";

			}
			catch (Exception error)
			{
				GstCapture.MessageBox.Show(String.Format("{0}: {1}", error.Message, cargs + sargs + aargs));
			}

			if (gstRecording != null)
			{
				gstRecording.SetState(Gst.State.Playing);

				Gst.State s;
				gstRecording.GetState(out s, timeout);

				isRecording = true;

			}

			Directory.SetCurrentDirectory(_path);
		}
	}
Example #14
0
        //udpsrc port=9000  buffer-size=60000 ! application/x-rtp,encoding-name=H264,payload=96 ! rtph264depay ! h264parse ! queue ! avdec_h264

        public MainForm()
        {
            // These environment variables are necessary to locate GStreamer libraries, and to stop it from loading

            // wrong libraries installed elsewhere on the system.

            string apppath = System.IO.Path.GetDirectoryName(System.Windows.Forms.Application.ExecutablePath);

            System.Environment.SetEnvironmentVariable("GST_PLUGIN_PATH", "");

            System.Environment.SetEnvironmentVariable("GST_PLUGIN_SYSTEM_PATH", apppath + @"\gstreamer\bin\plugins");

            System.Environment.SetEnvironmentVariable("PATH", @"C:\Windows;"

                                                        + apppath + @"\gstreamer\lib;"

                                                        + apppath + @"\gstreamer\bin");

            System.Environment.SetEnvironmentVariable("GST_REGISTRY", apppath + @"\gstreamer\bin\registry.bin");



            // These are for saving debug information.

            System.Environment.SetEnvironmentVariable("GST_DEBUG", "*:3");

            System.Environment.SetEnvironmentVariable("GST_DEBUG_FILE", "GstreamerLog.txt");

            System.Environment.SetEnvironmentVariable("GST_DEBUG_DUMP_DOT_DIR", apppath);











            // Initialize Gstreamer
            Gst.Application.Init();

            // Build the pipeline
            var source = ElementFactory.Make("videotestsrc", "source");
            var sink = ElementFactory.Make("autovideosink", "sink");

            // Create the empty pipeline
            var pipeline = new Pipeline("test-pipeline");

            if (pipeline == null || source == null || sink == null)
            {
                Console.WriteLine("Not all elements could be created");
                return;
            }

            // Build the pipeline
            pipeline.Add(source, sink);
            if (!source.Link(sink))
            {
                Console.WriteLine("Elements could not be linked");
                return;
            }

            // Modify the source's properties
            source["pattern"] = 0;

            // Start playing
            var ret = pipeline.SetState(State.Playing);
            if (ret == StateChangeReturn.Failure)
            {
                Console.WriteLine("Unable to set the pipeline to the playing state");
                return;
            }

            // Wait until error or EOS
            var bus = pipeline.Bus;
            //var msg = bus.TimedPopFiltered(Constants.CLOCK_TIME_NONE, MessageType.Eos | MessageType.Error);

            // Free resources
          /*  if (msg != null)
            {
                switch (msg.Type)
                {
                    case MessageType.Error:
                        GLib.GException exc;
                        string debug;
                        msg.ParseError(out exc, out debug);
                        Console.WriteLine(String.Format("Error received from element {0}: {1}", msg.Src.Name, exc.Message));
                        Console.WriteLine(String.Format("Debugging information {0}", debug));
                        break;
                    case MessageType.Eos:
                        Console.WriteLine("End-Of-Stream reached");
                        break;
                    default:
                        // We should not reach here because we only asked for ERRORs and EOS
                        Console.WriteLine("Unexpected messag received");
                        break;
                }
            }*/

            pipeline.SetState(State.Null);

          //  return;


          

            Gst.Application.Init(); 



            InitializeComponent();



            // Create a main loop for GLib, run it in a separate thread

            m_GLibMainLoop = new Gst.GLib.MainLoop();

            m_GLibThread = new System.Threading.Thread(m_GLibMainLoop.Run);

            m_GLibThread.IsBackground = true;

            m_GLibThread.Name = "GLibMainLoop";

            m_GLibThread.Start();



           // System.Threading.Thread.CurrentThread.Name = "WinForms";



            CreatePipeline();

        }
        public static void Main(string[] args)
        {
            // Initialize Gstreamer
            Application.Init(ref args);

            // Create the elements
            source = ElementFactory.Make ("uridecodebin", "source");
            convert = ElementFactory.Make ("audioconvert", "convert");
            sink = ElementFactory.Make ("autoaudiosink", "sink");

            // Create the empty pipeline
            pipeline = new Pipeline ("test-pipeline");

            if (source == null || convert == null || sink == null || pipeline == null) {
                Console.WriteLine ("Not all elements could be created");
                return;
            }

            // Build the pipeline. Note that we are NOT linking the source at this point.
            // We will do it later.
            pipeline.Add (source, convert, sink);
            if (!convert.Link (sink)) {
                Console.WriteLine ("Elements could not be linked");
                return;
            }

            // Set the URI to play
            source ["uri"] = "http://download.blender.org/durian/trailer/sintel_trailer-1080p.mp4";

            // Connect to the pad-added signal
            source.PadAdded += HandlePadAdded;

            // Start playing
            var ret = pipeline.SetState (State.Playing);
            if (ret == StateChangeReturn.Failure) {
                Console.WriteLine ("Unable to set the pipeline to the playing state.");
                return;
            }

            // Listen to the bus
            var bus = pipeline.Bus;
            bool terminated = false;
            do {
                var msg = bus.TimedPopFiltered (Constants.CLOCK_TIME_NONE, MessageType.StateChanged | MessageType.Error | MessageType.Eos);

                if (msg != null) {
                    switch (msg.Type) {
                    case MessageType.Error:
                        string debug;
                        GLib.GException exc;
                        msg.ParseError (out exc, out debug);
                        Console.WriteLine (string.Format ("Error received from element {0}: {1}", msg.Src.Name, exc.Message));
                        Console.WriteLine ("Debugging information: {0}", debug);
                        terminated = true;
                        break;
                    case MessageType.Eos:
                        Console.WriteLine("End-Of-Stream reached.");
                        terminated = true;
                        break;
                    case MessageType.StateChanged:
                        // We are only interested in state-changed messages from the pipeline
                        if (msg.Src == pipeline) {
                            State oldState, newState, pendingState;
                            msg.ParseStateChanged(out oldState, out newState, out pendingState);
                            Console.WriteLine ("Pipeline state changed from {0} to {1}:", Element.StateGetName(oldState), Element.StateGetName(newState));
                        }
                        break;
                    default:
                        // We should not reach here
                        Console.WriteLine ("Unexpected message received.");
                        break;
                    }
                }
            } while (!terminated);

            pipeline.SetState (State.Null);
        }