public Player(VideoFormat format = VideoFormat.Bgra) { // Create the empty pipeline playbin = ElementFactory.Make("playbin3") as Pipeline; bus = playbin.Bus; bus.AddSignalWatch(); bus.EnableSyncMessageEmission(); bus.Message += Bus_Message; audiosink = ElementFactory.Make("autoaudiosink", "audiosink"); videosink = new AppSink("videosink"); if (playbin == null || videosink == null || audiosink == null) { throw new Exception("Not all elements could be created"); } videosink.Sync = true; videosink.Qos = false; videosink.Drop = false; var formatString = (format != VideoFormat.Unknown ? format : VideoFormat.Bgrx).ToFormatString(); videosink.Caps = Caps.FromString($"video/x-raw, format={formatString}"); videosink.MaxBuffers = 1; videosink.EmitSignals = true; videosink.NewPreroll += Videosink_NewPreroll; videosink.NewSample += Videosink_NewSample; playbin["video-sink"] = videosink; playbin["audio-sink"] = audiosink; playbin.SetState(FTargetState = State.Ready); }
public void TestGetAllowedCaps() { Caps caps; Pad sink = new Pad("sink", PadDirection.Sink); caps = sink.AllowedCaps; Assert.IsNull(caps); Pad src = new Pad("src", PadDirection.Src); caps = src.AllowedCaps; Assert.IsNull(caps); caps = Caps.FromString("foo/bar"); src.SetCaps(caps); sink.SetCaps(caps); PadLinkReturn plr = src.Link(sink); Assert.AreEqual(plr, PadLinkReturn.Ok); Caps gotcaps = src.AllowedCaps; Assert.IsNotNull(gotcaps); Assert.IsTrue(gotcaps.IsEqual(caps)); }
private void CreatePipeline() { if (Pipeline != null) { Pipeline.SetState(State.Null); IsLive = false; return; } Pipeline = new Pipeline("playback"); Playbin = ElementFactory.Make("playbin", "playbin"); Pipeline.Add(Playbin); VideoSink = new AppSink("videoSink"); VideoSink["caps"] = Caps.FromString("video/x-raw,format=RGBA"); VideoSink.Drop = true; // drop frames if cannot keep up VideoSink.Sync = true; // synchronized playback VideoSink.MaxLateness = (1000 / 30) * 1000000; // maximum latency to achieve at least 30 fps VideoSink.MaxBuffers = 1; // no buffering for video sink VideoSink.Qos = true; // QoS for video sink VideoSink.EnableLastSample = false; // no need for last sample as we are pulling samples Playbin["video-sink"] = VideoSink; var ret = Pipeline.SetState(State.Ready); }
static ExampleVolume() { Caps audioCaps = Caps.FromString("audio/x-raw, format=(string) S16LE, rate=(int) [1, MAX], channels=(int) 2, layout=(string) interleaved"); __srcTemplate = new PadTemplate("src", PadDirection.Src, PadPresence.Always, audioCaps); __sinkTemplate = new PadTemplate("sink", PadDirection.Sink, PadPresence.Always, audioCaps); }
public void TestIntersecting() { Caps caps1 = Caps.FromString("video/x-raw-yuv, " + "format=(fourcc)I420, " + "width=(int)[ 1,1000 ], " + "height=(int)[ 1, 1000 ], " + "framerate=(fraction)[ 0/1, 100/1 ]"); Caps caps2 = Caps.FromString("video/x-raw-yuv, " + "format=(fourcc)I420, " + "width=(int)640, " + "height=(int)480"); Assert.IsNotNull(caps1); Assert.IsNotNull(caps2); Assert.IsFalse(caps1.Handle == IntPtr.Zero, "Ooops, null handle in caps1"); Assert.IsFalse(caps1.Handle == IntPtr.Zero, "Ooops, null handle in caps2"); Caps caps3 = caps1.Intersect(caps2); Assert.IsFalse(caps3.IsFixed, "How come caps are FIXED?!"); Assert.IsFalse(caps3.IsEmpty, "How come caps are EMPTY?!"); Assert.AreEqual(caps2.ToString() + ", framerate=(fraction)[ 0/1, 100/1 ]", caps3.ToString()); }
public void TestUnion() { Caps caps1 = Caps.FromString("video/x-raw-yuv, " + "format=(fourcc)I420, " + "width=(int)640"); Caps caps2 = Caps.FromString("video/x-raw-yuv, " + "format=(fourcc)I420, " + "height=(int)480"); Assert.IsNotNull(caps1); Assert.IsNotNull(caps2); Assert.IsFalse(caps1.Handle == IntPtr.Zero, "Ooops, null handle in caps1"); Assert.IsFalse(caps1.Handle == IntPtr.Zero, "Ooops, null handle in caps2"); Caps caps3 = caps1.Union(caps2); Assert.IsFalse(caps3.IsEmpty, "How come caps are EMPTY?!"); Caps caps4 = Caps.FromString("video/x-raw-yuv, " + "format=(fourcc)I420, " + "width=(int)640; " + "video/x-raw-yuv, " + "format=(fourcc)I420, " + "height=(int)480"); Assert.IsTrue(caps3.IsEqual(caps4)); }
public static bool Register() { GLib.GType gtype = (GLib.GType) typeof(MyTransformNIp); SetDetails(gtype, "My Transform", "Filter/Transform", "Do nothing useful", "Nobody"); Caps caps = Caps.FromString("foo/bar"); AddPadTemplate(gtype, new PadTemplate("src", PadDirection.Src, PadPresence.Always, caps)); AddPadTemplate(gtype, new PadTemplate("sink", PadDirection.Sink, PadPresence.Always, caps)); return(ElementFactory.Register(null, "mytransform-nip", (uint)Gst.Rank.None, gtype)); }
public void StartVideoRx(int recvPort, string name) { _videoRxPipeline = new Pipeline("videorx-pipeline"); Bin bin = (Bin)_videoRxPipeline; //bin.Bus.AddWatch(new BusFunc(BusCall)); _videoRxPipeline.Bus.AddSignalWatch(); _videoRxPipeline.Bus.Message += (o, args) => BusCall(args.Message); Element udpSrc = ElementFactory.Make("udpsrc", "udp_src"); Element rtpH263Depayloader = ElementFactory.Make("rtph263pdepay", "h263_deplayloader"); Element h263Decoder = ElementFactory.Make("ffdec_h263", "h263_decoder"); Element cspFilter = ElementFactory.Make("ffmpegcolorspace", "csp_filter_rx"); Element screenSink = ElementFactory.Make("dshowvideosink", "video-sink_rx"); if ((_videoRxPipeline == null) || (udpSrc == null) || (rtpH263Depayloader == null) || (h263Decoder == null) || (cspFilter == null) || (screenSink == null)) { MessageBox.Show("Error Creating Gstreamer Elements for Recieving Video!"); } else { udpSrc["port"] = recvPort; bin.Add(udpSrc, rtpH263Depayloader, h263Decoder, cspFilter, screenSink); Caps caps = Caps.FromString("application/x-rtp,clock-rate=90000,payload=96,encoding-name=H263-1998"); if (!udpSrc.LinkFiltered(rtpH263Depayloader, caps)) { Console.WriteLine("link failed between udp_src and rtp_h263_depayloader"); } if (!Element.Link(rtpH263Depayloader, h263Decoder, cspFilter, screenSink)) { Console.WriteLine("link failed between rtp_h263_depayloader and screen_sink"); } Gst.Interfaces.XOverlayAdapter xadapter = new Gst.Interfaces.XOverlayAdapter(screenSink.Handle); VideoWindow.wfhost2.Dispatcher.Invoke( System.Windows.Threading.DispatcherPriority.Normal, new Action( delegate { VideoWindow.remote_video_label.Content = "Remote Video:" + name; xadapter.XwindowId = (ulong)VideoWindow.video_rx_canvas.Handle; })); _videoRxPipeline.SetState(State.Playing); } }
public void TestFromString() { Caps caps = Caps.FromString("video/x-raw-yuv, " + "format=(fourcc)I420, " + "width=(int)384, " + "height=(int)288, " + "framerate=(fraction)25/1"); Assert.IsNotNull(caps); Assert.IsFalse(caps.Handle == IntPtr.Zero, "Ooops, null handle"); Assert.IsTrue(caps.IsFixed, "Caps should be FIXED!"); Assert.IsFalse(caps.IsEmpty, "Caps shouldn't be EMPTY!"); Assert.IsFalse(caps.IsAny, "Caps shouldn't be ANY!"); }
public void TestCaps() { Gst.Buffer buffer = new Gst.Buffer(4); Caps caps = Caps.FromString("audio/x-raw-int"); Assert.IsNull(buffer.Caps, "buffer.Caps should be null"); buffer.Caps = caps; Assert.IsNotNull(buffer.Caps, "buffer.Caps is null"); Caps caps2 = Caps.FromString("audio/x-raw-float"); buffer.Caps = caps2; Assert.AreNotEqual(buffer.Caps, caps); Assert.AreEqual(buffer.Caps, caps2); buffer.Caps = null; Assert.IsNull(buffer.Caps, "buffer.Caps should be null"); }
public void TestBufferOwnershipNIp() { MyTransformNIp.Register(); Pipeline pipeline = new Pipeline(); Element src = ElementFactory.Make("fakesrc"); src["sizetype"] = 2; Element capsfilter = ElementFactory.Make("capsfilter"); capsfilter["caps"] = Caps.FromString("foo/bar"); src["num-buffers"] = 10; MyTransformNIp transform = new MyTransformNIp(); Element sink = ElementFactory.Make("fakesink"); pipeline.Add(src, capsfilter, transform, sink); Element.Link(src, capsfilter, transform, sink); GLib.MainLoop loop = new GLib.MainLoop(); pipeline.Bus.AddWatch(delegate(Bus bus, Message message) { switch (message.Type) { case MessageType.Error: Enum err; string msg; message.ParseError(out err, out msg); Assert.Fail(String.Format("Error message: {0}", msg)); loop.Quit(); break; case MessageType.Eos: loop.Quit(); break; } return(true); }); pipeline.SetState(State.Playing); loop.Run(); Assert.IsTrue(transform.transformed); pipeline.SetState(State.Null); }
public static void Run(ref string[] args, string source, string sourceOptions = "") { Console.WriteLine($"Playing video and audio from {source}"); Application.Init(ref args); Pipeline = new Gst.Pipeline("simplepipeline"); VideoSink = new AppSink("videoSink"); Playbin = ElementFactory.Make("playbin", "playbin"); Playbin["uri"] = source; Playbin["video-sink"] = VideoSink; VideoSink["caps"] = Caps.FromString("video/x-raw,format=RGBA"); VideoSink.EmitSignals = true; VideoSink.NewSample += NewVideoSample; VideoSink.Drop = true; VideoSink.Sync = true; VideoSink.Qos = true; Pipeline.Add(Playbin); MainLoop = new GLib.MainLoop(); Pipeline.Bus.AddSignalWatch(); Pipeline.Bus.Message += OnMessage; var ret = Pipeline.SetState(State.Playing); if (ret == StateChangeReturn.Failure) { Console.WriteLine("Unable to set the pipeline to the playing state."); return; } else if (ret == StateChangeReturn.NoPreroll) { IsLive = true; Console.WriteLine("Playing a live stream."); } MainLoop.Run(); Pipeline.SetState(State.Null); }
public void TestPushUnlinked() { Pad src = new Pad("src", PadDirection.Src); Assert.IsNotNull(src, "Could not create src"); Caps caps = src.AllowedCaps; Assert.IsNull(caps); caps = Caps.FromString("foo/bar"); src.SetCaps(caps); Gst.Buffer buffer = new Gst.Buffer(); Assert.AreEqual(src.Push(buffer), FlowReturn.NotLinked); ulong handler_id = src.AddBufferProbe(new PadBufferProbeCallback(ProbeHandler)); buffer = new Gst.Buffer(new byte[] { 0 }); FlowReturn flowreturn = src.Push(buffer); Assert.AreEqual(flowreturn, FlowReturn.Ok); }
public static Caps PadGetCapsStub(Pad pad) { return(Caps.FromString("video/x-raw-yuv")); }
public void StartAudioRx(int recvPort, int codec) { Caps caps; string depayloaderName, decoderName; switch (codec) { case 0: depayloaderName = "rtppcmudepay"; decoderName = "mulawdec"; caps = Caps.FromString("application/x-rtp,clock-rate=8000,payload=0"); break; case 3: depayloaderName = "rtpgsmdepay"; decoderName = "gsmdec"; caps = Caps.FromString("application/x-rtp,clock-rate=8000,payload=3"); break; case 8: depayloaderName = "rtppcmadepay"; decoderName = "alawdec"; caps = Caps.FromString("application/x-rtp,clock-rate=8000,payload=8"); break; case 14: depayloaderName = "rtpmpadepay"; decoderName = "mad"; caps = Caps.FromString("application/x-rtp,media=(string)audio,clock-rate=(int)90000,encoding-name=(string)MPA,payload=(int)96"); break; default: depayloaderName = "rtppcmudepay"; decoderName = "mulawdec"; caps = Caps.FromString("application/x-rtp,clock-rate=8000,payload=0"); break; } _audioRxPipeline = new Pipeline("audiorx-pipeline"); Bin bin = (Bin)_audioRxPipeline; //bin.Bus.AddWatch(new BusFunc(BusCall)); _audioRxPipeline.Bus.AddSignalWatch(); _audioRxPipeline.Bus.Message += (o, args) => BusCall(args.Message); Element udpSrc = ElementFactory.Make("udpsrc", "udp_src"); Element depayloader = ElementFactory.Make(depayloaderName, depayloaderName); Element decoder = ElementFactory.Make(decoderName, decoderName); Element audioconvert = ElementFactory.Make("audioconvert", "audioconvert"); Element audioresample = ElementFactory.Make("audioresample", "audio_resample"); Element directsoundsink = ElementFactory.Make("directsoundsink", "directsoundsink"); if ((udpSrc == null) || (depayloader == null) || (decoder == null) || (audioconvert == null) || (audioresample == null) || (directsoundsink == null)) { MessageBox.Show("Error Creating Gstreamer Elements for Audio Rx pipeline!"); } else { udpSrc["port"] = recvPort; bin.Add(udpSrc, depayloader, decoder, audioconvert, audioresample, directsoundsink); if (!udpSrc.LinkFiltered(depayloader, caps)) { Console.WriteLine("link failed between camera_src and tee"); } if (!Element.Link(depayloader, decoder, audioconvert, audioresample, directsoundsink)) { Console.WriteLine("link failed between udp_src and directsoundsink"); } _audioRxPipeline.SetState(State.Playing); } }
public void StartVideoTx(string destIP, int destPort) { VideoWindow.Dispatcher.Invoke( System.Windows.Threading.DispatcherPriority.Normal, new Action( () => VideoWindow.Show())); Element cameraSrc; _videoTxPipeline = new Pipeline("videotx-pipeline"); Bin bin = (Bin)_videoTxPipeline; //bin.Bus.AddWatch(new BusFunc(BusCall)); _videoTxPipeline.Bus.AddSignalWatch(); _videoTxPipeline.Bus.Message += (o, args) => BusCall(args.Message); if ((cameraSrc = ElementFactory.Make("videotestsrc", "video_src_tx")) == null) { Console.WriteLine("Could not create webcam-source"); } Element cspFilter = ElementFactory.Make("ffmpegcolorspace", "filter_tx"); Element cspFilter2 = ElementFactory.Make("ffmpegcolorspace", "filter2_tx"); Element screenSink = ElementFactory.Make("dshowvideosink", "video-sink_tx"); Element tee = ElementFactory.Make("tee", "tee_tx"); Element screenQueue = ElementFactory.Make("queue", "screen-queue_tx"); Element udpQueue = ElementFactory.Make("queue", "udp-queue_tx"); Element h263Encoder = ElementFactory.Make("ffenc_h263p", "ffenc_h263p_tx"); Element rtpH263Payloader = ElementFactory.Make("rtph263ppay", "rtp_payloader_tx"); Element udpSink = ElementFactory.Make("udpsink", "udp_sink_tx"); if ((_videoTxPipeline == null) || (cameraSrc == null) || (screenSink == null) || (cspFilter == null) || (cspFilter2 == null) || (h263Encoder == null) || (rtpH263Payloader == null) || (udpSink == null) || (udpQueue == null) || (screenQueue == null)) { MessageBox.Show("Error Creating Gstreamer Elements for sending video!"); } else { udpSink["host"] = destIP; udpSink["port"] = destPort; bin.Add(cameraSrc, screenSink, cspFilter, cspFilter2, tee, h263Encoder, rtpH263Payloader, udpSink, udpQueue, screenQueue); Caps caps = Caps.FromString("video/x-raw-rgb,width=" + _settings.videocall_width + ",height=" + _settings.videocall_height); if (!cameraSrc.LinkFiltered(tee, caps)) { Console.WriteLine("link failed between camera_src and tee"); } if (!Element.Link(tee, cspFilter, screenQueue, screenSink)) { Console.WriteLine("link failed between tee and screen_sink"); } if (!Element.Link(tee, cspFilter2, udpQueue, h263Encoder, rtpH263Payloader, udpSink)) { Console.WriteLine("link failed between tee and udp_sink"); } Gst.Interfaces.XOverlayAdapter xadapter = new Gst.Interfaces.XOverlayAdapter(screenSink.Handle); VideoWindow.wfhost1.Dispatcher.Invoke( System.Windows.Threading.DispatcherPriority.Normal, new Action( delegate { VideoWindow.local_video_label.Content = "Local Video: " + _settings.ims_public_user_identity; xadapter.XwindowId = (ulong)VideoWindow.video_tx_canvas.Handle; })); _videoTxPipeline.SetState(State.Playing); } }