private void GetStreamUri(INvtSession session, Profile profile) { var streamSetup = new StreamSetup() { stream = StreamType.rtpUnicast, transport = new Transport() { protocol = TransportProtocol.tcp } }; disposables.Add( session .GetStreamUri(streamSetup, profile.token) .ObserveOnCurrentDispatcher() .Subscribe( muri => { videoSize = new Size(profile.videoEncoderConfiguration.resolution.width, profile.videoEncoderConfiguration.resolution.height); InitializePlayer(muri.uri.ToString(), credentials, videoSize); }, err => { } ) ); }
private void listBox_SelectionChanged(object sender, SelectionChangedEventArgs e) { bool flag = profiles != null && combobox.SelectedIndex >= 0; if (flag) { StreamSetup streamSetup = new StreamSetup(); streamSetup.Stream = StreamType.RTPUnicast; streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = TransportProtocol.RTSP; MediaUri mediaUri = new MediaUri(); mediaUri = mediaClient.GetStreamUri(streamSetup, profiles[combobox.SelectedIndex].token); UriBuilder uriBuilder = new UriBuilder(mediaUri.Uri); uriBuilder.Scheme = "rtsp"; string[] options = new string[] { ":rtsp-http", ":rtsp-http-port=" + uriBuilder.Port.ToString(), ":rtsp-user="******":rtsp-pwd=" + campassword, ":network-caching=300" }; control.Play(uriBuilder.Uri, options); stop_play.IsEnabled = true; playingrui = uriBuilder.Uri; } recordcheckbox.IsEnabled = true; }
private void onvifVideo(string url, string user, string password) { try { AddInfo("Efetuando login em " + url); // http://stackoverflow.com/questions/31007828/onvif-getstreamurl-c-sharp // Adicionar a referencia de serviço: http://www.onvif.org/onvif/ver10/media/wsdl/media.wsdl // http://stackoverflow.com/questions/32779467/onvif-api-capture-image-in-c-sharp var messageElement = new TextMessageEncodingBindingElement(); messageElement.MessageVersion = MessageVersion.CreateVersion(EnvelopeVersion.Soap12, AddressingVersion.None); HttpTransportBindingElement httpBinding = new HttpTransportBindingElement(); httpBinding.AuthenticationScheme = AuthenticationSchemes.Basic; CustomBinding bind = new CustomBinding(messageElement, httpBinding); EndpointAddress mediaAddress = new EndpointAddress(url + "/onvif/Media"); MediaClient mediaClient = new MediaClient(bind, mediaAddress); mediaClient.ClientCredentials.UserName.UserName = user; mediaClient.ClientCredentials.UserName.Password = password; Profile[] profiles = mediaClient.GetProfiles(); StreamSetup streamSetup = new StreamSetup(); streamSetup.Stream = StreamType.RTPUnicast; streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = TransportProtocol.RTSP; var uri = mediaClient.GetStreamUri(streamSetup, profiles[1].token); AddInfo(uri.Uri); } catch (Exception ex) { AddInfo(ex); } }
private Uri GetMulticastUri(Camera cam, MediaClient mediaClient, Profile mediaProfile) { if (mediaProfile?.VideoEncoderConfiguration?.Multicast != null && mediaProfile?.VideoEncoderConfiguration?.Multicast.Port != 0) { // Check for any URI supporting multicast foreach (TransportProtocol protocol in Enum.GetValues(typeof(TransportProtocol))) { // Get stream URI for the requested transport/protocol and insert the User/Password if present Transport transport = new Transport() { Protocol = protocol }; StreamSetup ss = new StreamSetup() { Stream = StreamType.RTPMulticast }; ss.Transport = transport; try { MediaUri mu = mediaClient.GetStreamUri(ss, MediaProfile.token); log.Debug(string.Format("Camera #{0} [{1}] Onvif media profile ({2}) capable of multicast [multicast URI: {3}]", cam.Number, cam.IP, mediaProfile.Name, mu.Uri)); return(new Uri(mu.Uri)); } catch { } // Ignore exception and continue checking for a multicast URI } } else { log.Debug(string.Format("Camera #{0} [{1}] Onvif media profile ({2}) does not support multicast", cam.Number, cam.IP, mediaProfile.Name)); } return(null); }
public void GetMediaUri( Profile profile, VideoSourceConfiguration videoSourceConfig, VideoEncoderConfiguration videoEncoderConfig, AudioSourceConfiguration audioSourceConfig, AudioEncoderConfiguration audioEncoderConfig, TransportProtocol protocol) { RunInBackground(new Action(() => { if (profile == null) { profile = CreateProfile(TestMediaProfileName); } ConfigureProfile(profile, videoSourceConfig, videoEncoderConfig, audioSourceConfig, audioEncoderConfig); StreamSetup streamSetup = new StreamSetup(); streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = protocol; streamSetup.Stream = StreamType.RTPUnicast; MediaUri streamUri = Client.GetStreamUri(streamSetup, profile.token); if (OnMediaUriReceived != null) { OnMediaUriReceived(streamUri, videoEncoderConfig, audioEncoderConfig); } })); }
void Reload(INvtSession session) { var vs = AppDefaults.visualSettings; //vidBuff = new VideoBuffer(resolution.Width, resolution.Height); var streamSetup = new StreamSetup() { transport = new Transport() { protocol = AppDefaults.visualSettings.Transport_Type } }; //TODO: provide a way of cancelation //VideoInfo.MediaUri = model.uri; //VideoStartup(VideoInfo); VideoStartup(); //subscription.Add(session.GetStreamUri(strSetup, profile.token) // .ObserveOnCurrentDispatcher() // .Subscribe(uri => { // VideoInfo.MediaUri = uri.Uri; // VideoStartup(VideoInfo); // }, err => { // })); }
private void TestByCodecAndTransport(string codec, string filename, StreamType streamType, TransportProtocol protocol) { Profile deletedProfile = null; Profile createdProfile = null; Profile modifiedProfile = null; Profile profile = null; MediaConfigurationChangeLog changeLog = new MediaConfigurationChangeLog(); RunTest( () => { profile = FindSuitableProfile(changeLog, codec); if ((profile == null) || !TuneBackchannelProfile(profile, changeLog, codec)) { profile = CreateProfileByAnnex3("testprofileX", null, out deletedProfile, out createdProfile, out modifiedProfile); PrepareBackchannelProfile(profile, changeLog, codec); } StreamSetup streamSetup = new StreamSetup(); streamSetup.Stream = streamType; streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = protocol; MediaUri uri = GetStreamUri(streamSetup, profile.token); DoSequence(codec, filename, uri, streamType, protocol); }, () => { RestoreMediaConfiguration(changeLog); RestoreProfileByAnnex3(deletedProfile, createdProfile, modifiedProfile); } ); }
public void FaultMessageForInvalidProfileTest() { RunTest(() => { Profile[] profiles = GetProfiles(); Assert(profiles != null, "No profiles returned", "Check if DUT returned profiles"); StringBuilder invalidProfileToken = new StringBuilder(); // construct invalid name int i = 0; foreach (Profile profile in profiles) { char c; if (profile.token.Length > i) { c = profile.token.ToLower()[i]; switch (c) { case 'z': c = 'a'; break; case '9': c = '0'; break; default: c++; break; } } else { c = 'X'; } invalidProfileToken.Append(c); i++; } invalidProfileToken.Append('X'); // get stream uri StreamSetup setup = new StreamSetup(); setup.Transport = new Transport(); setup.Transport.Protocol = TransportProtocol.UDP; setup.Stream = StreamType.RTPUnicast; RunStep( () => { Client.GetStreamUri(setup, invalidProfileToken.ToString()); }, "Get Stream URI - negative test", "Sender/InvalidArgVal/NoProfile"); DoRequestDelay(); }); }
public void FaultMessageForInvalidRequestTest() { RunTest(() => { Profile[] profiles = GetProfiles(); Assert((profiles != null && profiles.Length > 0), "No profile available", "Check if DUT returned at least one profile"); StreamSetup setup = new StreamSetup(); setup.Transport = new Transport(); setup.Transport.Protocol = TransportProtocol.UDP; setup.Stream = StreamType.RTPUnicast; MessageSpoiler spoiler = new MessageSpoiler(); Dictionary <string, string> namespaces = new Dictionary <string, string>(); namespaces.Add("s", "http://www.w3.org/2003/05/soap-envelope"); namespaces.Add("media", "http://www.onvif.org/ver10/media/wsdl"); namespaces.Add("onvif", "http://www.onvif.org/ver10/schema"); Dictionary <string, string> replacements = new Dictionary <string, string>(); replacements.Add("/s:Envelope/s:Body/media:GetStreamUri/media:StreamSetup/onvif:Transport/onvif:Protocol", "RTP"); spoiler.Namespaces = namespaces; spoiler.NodesToReplace = replacements; SetBreakingBehaviour(spoiler); try { GetStreamUri(setup, profiles[0].token); } catch (FaultException exception) { bool fault = exception.IsValidOnvifFault(); string reason = "A SOAP 1.2 fault message is invalid"; SaveStepFault(exception); if (!fault) { AssertException ex = new AssertException(reason); StepFailed(ex); throw ex; } else { StepPassed(); } } //RunStep( // () => // { // Client.GetStreamUri(setup, profiles[0].token); // }, // "Get Stream URI - negative test", // "Sender/InvalidArgVal/InvalidStreamSetup"); ResetBreakingBehaviour(); }); }
public VideoPlayerActivityModel(StreamSetup streamSetup, Profile profile, bool showStreamUrl) { this.profile = profile; this.profileToken = profile.token; this.streamSetup = streamSetup; this.showStreamUrl = showStreamUrl; this.metadataReceiver = null; }
public VideoPlayerActivityModel(StreamSetup streamSetup, string profileToken, bool showStreamUrl, IMetadataReceiver metadataReceiver) { this.profile = null; this.profileToken = profileToken; this.streamSetup = streamSetup; this.showStreamUrl = showStreamUrl; this.metadataReceiver = metadataReceiver; }
public string GetReplayUri(StreamSetup StreamSetup, string RecordingToken) { GetReplayUriRequest inValue = new GetReplayUriRequest(); inValue.StreamSetup = StreamSetup; inValue.RecordingToken = RecordingToken; GetReplayUriResponse retVal = ((ReplayPort)(this)).GetReplayUri(inValue); return(retVal.Uri); }
public Model( StreamSetup streamSetup, MediaUri mediaUri, VideoResolution encoderResolution, bool isUriEnabled, IMetadataReceiver metadataReceiver ) { this.streamSetup = streamSetup; this.mediaUri = mediaUri; this.encoderResolution = encoderResolution; this.isUriEnabled = isUriEnabled; this.metadataReceiver = metadataReceiver; }
void VideoStartup(Model model, VideoResolution resolution) { //subscribe to metadata IMetadataReceiver metadataReceiver = null; if (AppDefaults.visualSettings.EnableGraphicAnnotation) { string vaConfToken = model.engineConfToken; var eventMetadataProcessor = new EventMetadataProcessor(); //eventMetadataProcessor.Processors.Add(new ObjectMotionMetadataProcessor(null, vaConfToken, movingObjectsHolder.EntityInitialized, movingObjectsHolder.EntityChanged, movingObjectsHolder.EntityDeleted)); eventMetadataProcessor.Processors.Add(new MotionAlarmMetadataProcessor(null, vaConfToken, alarmsHolder.EntityInitialized, alarmsHolder.EntityChanged, alarmsHolder.EntityDeleted)); eventMetadataProcessor.Processors.Add(new RegionMotionAlarmMetadataProcessor(null, vaConfToken, alarmsHolder.EntityInitialized, alarmsHolder.EntityChanged, alarmsHolder.EntityDeleted)); eventMetadataProcessor.Processors.Add(new LoiteringAlarmMetadataProcessor(null, vaConfToken, alarmsHolder.EntityInitialized, alarmsHolder.EntityChanged, alarmsHolder.EntityDeleted)); eventMetadataProcessor.Processors.Add(new AbandonedItemAlarmMetadataProcessor(null, vaConfToken, alarmsHolder.EntityInitialized, alarmsHolder.EntityChanged, alarmsHolder.EntityDeleted)); eventMetadataProcessor.Processors.Add(new TripwireAlarmMetadataProcessor(null, vaConfToken, alarmsHolder.EntityInitialized, alarmsHolder.EntityChanged, alarmsHolder.EntityDeleted)); eventMetadataProcessor.Processors.Add(new TamperingDetectorAlarmMetadataProcessor(null, vaConfToken, alarmsHolder.EntityInitialized, alarmsHolder.EntityChanged, alarmsHolder.EntityDeleted)); var sceneMetadataProcessor = new SceneMetadataProcessor(movingObjectsHolder.EntityInitialized, movingObjectsHolder.EntityChanged, movingObjectsHolder.EntityDeleted); var metadataProcessor = new MetadataProcessor(eventMetadataProcessor, sceneMetadataProcessor); metadataReceiver = new MetadataFramer(metadataProcessor.Process); } vidBuff = new VideoBuffer(resolution.width, resolution.height); var streamSetup = new StreamSetup() { transport = new Transport() { protocol = AppDefaults.visualSettings.Transport_Type } }; VideoPlayerView playview = new VideoPlayerView(); disposables.Add(playview); player.Child = playview; playview.Init(new VideoPlayerView.Model( streamSetup: streamSetup, mediaUri: new MediaUri() { uri = model.uri }, encoderResolution: new VideoResolution() { height = resolution.height, width = resolution.width }, isUriEnabled: false, //TODO if true then annotation is not positioned correctly metadataReceiver: metadataReceiver )); uriString.Visibility = System.Windows.Visibility.Visible; uriString.Text = model.uri; }
/// <summary> /// Sets up the connection to the camera, enquires to get metadata from the Onvif service /// </summary> /// <param name="camera"></param> /// <returns></returns> static async Task AsyncHelper(BaseOnvifPtzCamera camera) { Globals.Log.Debug(string.Format("Connecting to camera at {0}", camera.HostAddress)); camera.Camera = await OnvifClientFactory.CreateDeviceClientAsync(camera.HostAddress, camera.UserName, camera.Password); camera.MediaClient = await OnvifClientFactory.CreateMediaClientAsync(camera.HostAddress, camera.UserName, camera.Password);; camera.PtzController = await OnvifClientFactory.CreatePTZClientAsync(camera.HostAddress, camera.UserName, camera.Password); Mictlanix.DotNet.Onvif.Media.GetProfilesResponse profiles = await camera.MediaClient.GetProfilesAsync(); camera.MediaProfile = profiles.Profiles.FirstOrDefault(); if (camera.MediaProfile != null) { StreamSetup streamSetup = new StreamSetup { Stream = StreamType.RTPUnicast, Transport = new Transport() }; streamSetup.Transport.Protocol = TransportProtocol.TCP; MediaUri videoStreamUriObject = await camera.MediaClient.GetStreamUriAsync(streamSetup, camera.MediaProfile.Name); camera.VideoStreamUri = videoStreamUriObject.Uri; } Mictlanix.DotNet.Onvif.Device.GetNetworkProtocolsRequest request = new Mictlanix.DotNet.Onvif.Device.GetNetworkProtocolsRequest(); Mictlanix.DotNet.Onvif.Device.GetNetworkProtocolsResponse response = await camera.Camera.GetNetworkProtocolsAsync(request); // store http and rtsp ports foreach (NetworkProtocol protocol in response.NetworkProtocols) { string protocolName = protocol.Name.ToString(); switch (protocolName) { case "HTTP": camera.HttpPort = protocol.Port[0]; break; case "RTSP": camera.RtspPort = protocol.Port[0]; break; } } Mictlanix.DotNet.Onvif.Media.GetVideoSourcesResponse video_sources = await camera.MediaClient.GetVideoSourcesAsync(); Globals.Log.Debug("Camera connected"); }
public async Task <GetStreamUriResponse> GetStreamUriAsync(StreamSetup streamSetup, string profileToken) { if (profileToken == null) { throw new ArgumentNullException(nameof(profileToken)); } var request = new GetStreamUri { StreamSetup = streamSetup, ProfileToken = profileToken }; var response = await ExecuteAsync <GetStreamUri, GetStreamUriResponse>(request, MediaActions.GetStreamUri); return(response); }
public void ReplayServiceInvalidTokenTest() { RunTest(() => { StreamSetup streamSetup = new StreamSetup(); streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = TransportProtocol.UDP; streamSetup.Stream = StreamType.RTPUnicast; RunStep( () => { Client.GetReplayUri(streamSetup, Guid.NewGuid().ToString().Substring(0, 8)); }, "GetReplayURI - invalid token", OnvifFaults.InvalidToken, true); }); }
public async Task <GetReplayUriResponse> GetStreamUriAsync(StreamSetup streamSetup, string recordingToken) { if (recordingToken == null) { throw new ArgumentNullException(nameof(recordingToken)); } var request = new GetReplayUri { StreamSetup = streamSetup, RecordingToken = recordingToken }; var response = await ExecuteAsync <GetReplayUri, GetReplayUriResponse>(request, ReplayActions.GetReplayUri); return(response); }
public string GetVideoURL() { StreamSetup streamSetup = new StreamSetup(); streamSetup.Stream = StreamType.RTPUnicast; streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = TransportProtocol.HTTP; var value = CallCamera(() => mediaClient.GetStreamUri(streamSetup, CurrentProfile.Token)); if (value == null) { return(null); } return(value.Uri); }
public static Model Create( StreamSetup streamSetup, MediaUri mediaUri, VideoResolution encoderResolution, bool isUriEnabled, IMetadataReceiver metadataReceiver ) { var _this = new Model(); _this.streamSetup = streamSetup; _this.mediaUri = mediaUri; _this.encoderResolution = encoderResolution; _this.isUriEnabled = isUriEnabled; _this.metadataReceiver = metadataReceiver; return(_this); }
public void GetMediaUri( Profile profile, TransportProtocol protocol) { RunInBackground(new Action(() => { StreamSetup streamSetup = new StreamSetup(); streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = protocol; streamSetup.Stream = StreamType.RTPUnicast; MediaUri streamUri = Client.GetStreamUri(streamSetup, profile.token); if (OnMediaUriReceived != null) { OnMediaUriReceived(streamUri, profile.VideoEncoderConfiguration, profile.AudioEncoderConfiguration); } })); }
static int Main(string[] args) { if (args.Length < 3) { Console.WriteLine("Usage: OnvifCamReader.exe <url> <login> <password>"); return(1); } string url = args[0]; string login = args[1]; string password = args[2]; var messageElement = new TextMessageEncodingBindingElement(); messageElement.MessageVersion = MessageVersion.CreateVersion(EnvelopeVersion.Soap12, AddressingVersion.None); HttpTransportBindingElement httpBinding = new HttpTransportBindingElement(); httpBinding.AuthenticationScheme = AuthenticationSchemes.Basic; CustomBinding bind = new CustomBinding(messageElement, httpBinding); EndpointAddress mediaAddress = new EndpointAddress(url); MediaClient mediaClient = new MediaClient(bind, mediaAddress); mediaClient.ClientCredentials.UserName.UserName = login; mediaClient.ClientCredentials.UserName.Password = password; Profile[] profiles = mediaClient.GetProfiles(); string profileToken = profiles[0].token; MediaUri mediaUri = mediaClient.GetSnapshotUri(profileToken); StreamSetup streamSetup = new StreamSetup(); streamSetup.Stream = StreamType.RTPUnicast; streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = TransportProtocol.RTSP; MediaUri mediaStreamUri = mediaClient.GetStreamUri(streamSetup, profileToken); Console.WriteLine(mediaUri.Uri.ToString()); Console.WriteLine(mediaStreamUri.Uri.ToString()); return(0); }
public static string GetStreamUri(CameraInfo cameraInfo) { var cameraUrl = BuildCameraUrl(cameraInfo.Url); NvtSessionFactory factory = new NvtSessionFactory(new NetworkCredential(cameraInfo.User, cameraInfo.Password)); var session = factory .CreateSession(new Uri(cameraUrl)); var profiles = session .GetProfiles() .RunSynchronously(); if (profiles == null || profiles.Length == 0) { return(null); } var streamSetup = new StreamSetup() { stream = StreamType.rtpUnicast, transport = new Transport() { protocol = TransportProtocol.udp } }; var profile = profiles.First(); var streamUri = session .GetStreamUri(streamSetup, profile.token) .RunSynchronously(); if (streamUri == null) { return(null); } return(streamUri.ToString()); }
public static async Task GetMediaInfoAsync(string address) { var messageElement = new TextMessageEncodingBindingElement { MessageVersion = MessageVersion.CreateVersion(EnvelopeVersion.Soap12, AddressingVersion.None) }; HttpTransportBindingElement httpBinding = new HttpTransportBindingElement(); httpBinding.MaxReceivedMessageSize = 1024 * 1024 * 1024; //httpBinding.AuthenticationScheme = AuthenticationSchemes.Digest; CustomBinding bind = new CustomBinding(messageElement, httpBinding); //绑定服务地址 EndpointAddress serviceAddress = new EndpointAddress(address); MediaClient mediaClient = new MediaClient(bind, serviceAddress); //给每个请求都添加认证信息 mediaClient.Endpoint.EndpointBehaviors.Add(new CustomEndpointBehavior()); //不要少了下面这行,会报异常 var channel = mediaClient.ChannelFactory.CreateChannel(); //查看系统时间 var profilesResponse = await mediaClient.GetProfilesAsync(); foreach (var profile in profilesResponse.Profiles) { var step = new StreamSetup { Transport = new Transport() { Protocol = TransportProtocol.RTSP }, Stream = StreamType.RTPUnicast }; var streamUri = await mediaClient.GetStreamUriAsync(step, profile.token); Console.WriteLine(streamUri.Uri); } }
public void GuarenteedNumberOfVideoEncoderInstancesTCPReduced() { MediaConfigurationChangeLog changeLog = new MediaConfigurationChangeLog(); RunTest( () => { MultipleStreamTestReduced(changeLog, (ref Profile profile) => { StreamSetup streamSetup = new StreamSetup(); streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = TransportProtocol.RTSP; streamSetup.Stream = StreamType.RTPUnicast; return(streamSetup); }); }, () => { RestoreMediaConfiguration(changeLog); } ); }
public void FaultMessageForInvalidProfileTest() { RunTest(() => { Profile[] profiles = GetProfiles(); Assert(profiles != null, "No profiles returned", "Check if DUT returned profiles"); var invalidProfileToken = profiles.Select(e => e.token).GetNonMatchingString(); // get stream uri StreamSetup setup = new StreamSetup(); setup.Transport = new Transport(); setup.Transport.Protocol = TransportProtocol.UDP; setup.Stream = StreamType.RTPUnicast; RunStep(() => Client.GetStreamUri(setup, invalidProfileToken), "Get Stream URI - negative test", "Sender/InvalidArgVal/NoProfile"); // strict check DoRequestDelay(); }); }
protected void AdjustVideo(string recordingToken, TransportProtocol protocol) { StreamSetup streamSetup = new StreamSetup(); streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = protocol; streamSetup.Stream = StreamType.RTPUnicast; MediaUri replayUri = new MediaUri(); CreateReplayPortClient(); RunStep(() => { replayUri.Uri = ReplayClient.GetReplayUri(streamSetup, recordingToken); LogStepEvent("Replay URI = " + replayUri.Uri); }, "Get Replay Uri"); VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout, streamSetup.Transport.Protocol, streamSetup.Stream, replayUri, null); }
public void GetMediaUri() { RunInBackground(new Action(() => { Profile profile = GetVideoProfile(); if (profile == null) { throw new Exception("Profile with video source encoder configuration not found"); } StreamSetup streamSetup = new StreamSetup(); streamSetup.Transport = new Transport(); streamSetup.Transport.Protocol = TransportProtocol.UDP; streamSetup.Stream = StreamType.RTPUnicast; MediaUri streamUri = Client.GetStreamUri(streamSetup, profile.token); if (OnMediaUriReceived != null) { OnMediaUriReceived(streamUri, profile.VideoEncoderConfiguration, null); } })); }
public async Task <string> GetMediaInfoAsync(string mediaAddress, string userName, string password) { var messageElement = new TextMessageEncodingBindingElement { MessageVersion = MessageVersion.CreateVersion(EnvelopeVersion.Soap12, AddressingVersion.None) }; HttpTransportBindingElement httpBinding = new HttpTransportBindingElement { MaxReceivedMessageSize = 1024 * 1024 * 1024 }; CustomBinding bind = new CustomBinding(messageElement, httpBinding); //绑定服务地址 EndpointAddress serviceAddress = new EndpointAddress(mediaAddress); MediaClient mediaClient = new MediaClient(bind, serviceAddress); //给每个请求都添加认证信息 mediaClient.Endpoint.EndpointBehaviors.Add(new CustomEndpointBehavior(userName, password)); //不要少了下面这行,会报异常 var channel = mediaClient.ChannelFactory.CreateChannel(); //查看系统时间 var profilesResponse = await mediaClient.GetProfilesAsync(); foreach (var profile in profilesResponse.Profiles) { } var step = new StreamSetup { Transport = new Transport() { Protocol = TransportProtocol.RTSP }, Stream = StreamType.RTPUnicast }; var streamUri = await mediaClient.GetStreamUriAsync(step, profilesResponse.Profiles[0].token); return(streamUri.Uri); }
/// <summary> /// Retrieves Onvif video stream URIs from the device and stores them in the StreamUris list /// </summary> /// <param name="onvifPort">Port to connect on (normally HTTP - 80)</param> private void GetStreamUris(Camera cam, int onvifPort, StreamType sType, TransportProtocol tProtocol, int StreamIndex) { //StreamUris.Clear(); MediaClient mc = OnvifServices.GetOnvifMediaClient(ServiceUris[OnvifNamespace.MEDIA], DeviceTimeOffset, cam.User, cam.Password); Profile[] mediaProfiles = mc.GetProfiles(); StreamSetup ss = new StreamSetup(); Transport transport = new Transport() { Protocol = tProtocol }; string uri = string.Empty; // Only store the Profile related to the StreamIndex from the XML file MediaProfile = mediaProfiles[StreamIndex - 1]; // Get stream URI for the requested transport/protocol and insert the User/Password if present ss.Stream = sType; ss.Transport = transport; Uri mu = new Uri(mc.GetStreamUri(ss, MediaProfile.token).Uri); StreamUri = RTSP_Viewer.Classes.Utilities.InsertUriCredentials(mu, cam.User, cam.Password); // Get multicast uri (if available) along with requested protocol/stream type MulticastUri = GetMulticastUri(cam, mc, MediaProfile); // Not being used currently MulticastUri = RTSP_Viewer.Classes.Utilities.InsertUriCredentials(MulticastUri, cam.User, cam.Password); // A PTZ may not have a PTZ configuration for a particular media profile // Disable PTZ access in that case IsPtzEnabled = IsPtz; if (MediaProfile.PTZConfiguration == null && IsPtz) { log.Warn(string.Format("Camera #{0} [{1}] Disabling PTZ control based on the PTZConfiguration being null for stream profile {0}", cam.Number, cam.IP, StreamUri)); IsPtzEnabled = false; } }