/// <summary> /// 关闭音视频 /// </summary> public static void Close_AV(ConversationWindow conversationWindow) { try { AVModality avModality = (AVModality)conversationWindow.Conversation.SelfParticipant.Modalities[ModalityTypes.AudioVideo]; if (avModality != null) { AudioChannel audioChannel = avModality.AudioChannel; if (audioChannel != null && audioChannel.CanInvoke(ChannelAction.Stop)) { audioChannel.BeginStop(null, null); } VideoChannel videoChannel = avModality.VideoChannel; if (videoChannel != null && videoChannel.CanInvoke(ChannelAction.Stop)) { videoChannel.BeginStop(null, null); } } } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); } finally { } }
internal override void HandleAddedInternal() { //saves the AVModality, AudioChannel and VideoChannel, just for the sake of readability _avModality = (AVModality)Conversation.Modalities[ModalityTypes.AudioVideo]; _audioChannel = _avModality.AudioChannel; _videoChannel = _avModality.VideoChannel; //subscribes to modality action availability events (all audio button except DTMF) _avModality.ActionAvailabilityChanged += OnAvModalityActionAvailabilityChanged; //subscribes to the modality state changes so that the status bar gets updated with the new state _avModality.ModalityStateChanged += OnAvModalityModalityStateChanged; //subscribes to the audio channel action availability events (DTMF only) _audioChannel.ActionAvailabilityChanged += OnAudioChannelActionAvailabilityChanged; //subscribes to the video channel state changes so that the status bar gets updated with the new state _audioChannel.StateChanged += OnAudioChannelStateChanged; //subscribes to the video channel action availability events _videoChannel.ActionAvailabilityChanged += OnVideoChannelActionAvailabilityChanged; //subscribes to the video channel state changes so that the video feed can be presented _videoChannel.StateChanged += OnVideoChannelStateChanged; //foreach (var item in Conversation.Participants) //{ // InitParticipant(item); //} }
public void UpdateVideoWindow(VideoChannel videoChannel, VideoWindow videoWindow, bool isCapture) { var item = ParticipantItems.Where(p => p.IsMatch(videoChannel)).SingleOrDefault(); if (item != null) { if (isCapture) { if (item.CaptureVideoWindow == null) { item.CaptureVideoWindow = videoWindow; item.CaptureVideoWindowOriginHeight = videoWindow.Height; item.CaptureVideoWindowOriginWidth = videoWindow.Width; } } else { if (item.RenderVideoWindow == null) { item.RenderVideoWindow = videoWindow; item.RenderVideoWindowOriginHeight = videoWindow.Height; item.RenderVideoWindowOriginWidth = videoWindow.Width; } } } }
public ClassicPlayer( GenieNanoCamera genieNanoCamera, MockCamera mockCamera, MotorController motorController, IWriteable <ClassicPlayerSettings> settings, IWriteable <ClassicImageProcessorSettings> imgProcSettings, ILogger <ClassicPlayer> logger, ILoggerFactory loggerFactory) { _settings = settings; _logger = logger; _imgProcessor = new ClassicImageProcessor(imgProcSettings); _motorController = motorController; var channels = new VideoChannel[] { new VideoChannel("Image", "Raw Image"), new VideoChannel("EdgeDetection", "Image after EdgeDetection"), }; _videoInterface = new VideoInterface(channels, loggerFactory, nameof(ClassicPlayer)); if (genieNanoCamera.PeripheralState == PeripheralState.Ready) { _camera = genieNanoCamera; } else { _camera = mockCamera; } }
public void Dispose() { videoChannel.StateChanged -= videoChannel_StateChanged; this.client = null; this.conversation = null; this.videoChannel = null; }
private void SetParticipantVideoWindow(VideoChannel channel, VideoWindow window) { var model = ParticipantCollection.GetItem(channel); if (model != null && model != LocalParticipantVideoModel) { model.View = window; RemoteConnectParticipantVideoModel = model; } }
internal ParticipantItem GetItem(VideoChannel channel) { var item = ParticipantItems.Where(p => p.IsMatch(channel)).SingleOrDefault(); if (item == null) { return(null); } return(item); }
protected override void OnModelCreating(DbModelBuilder modelBuilder) { base.OnModelCreating(modelBuilder); modelBuilder.Configurations.Add(ServerMetadata.GetConfiguration()); modelBuilder.Configurations.Add(ServerIpV4.GetIpV4Configuration()); modelBuilder.Configurations.Add(ServerIpV6.GetIpV6Configuration()); modelBuilder.Configurations.Add(Server.GetConfiguration()); modelBuilder.Configurations.Add(VideoChannel.GetConfiguration()); modelBuilder.Configurations.Add(RecognitionLog.GetConfiguration()); }
/// <summary> /// 检查livevideo,stopvideo的相关参数 /// </summary> /// <param name="deviceId"></param> /// <param name="channelId"></param> /// <param name="rs"></param> /// <param name="mediaServer"></param> /// <param name="videoChannel"></param> /// <param name="sipChannel"></param> /// <param name="sipDevice"></param> /// <returns></returns> private static bool CheckIt(string deviceId, string channelId, out ResponseStruct rs, out ServerInstance mediaServer, out VideoChannel videoChannel, out SipChannel sipChannel, out SipDevice sipDevice) { mediaServer = null; videoChannel = null; sipChannel = null; sipDevice = null; rs = new ResponseStruct() { Code = ErrorNumber.None, Message = ErrorMessage.ErrorDic ![ErrorNumber.None],
/// <summary> /// Initiates the window for the specified conversation. /// </summary> public ConversationWindow(Conversation conversation, LyncClient client) { InitializeComponent(); //saves the client reference this.client = client; //saves the conversation reference this.conversation = conversation; //saves the AVModality, AudioChannel and VideoChannel, just for the sake of readability avModality = (AVModality)conversation.Modalities[ModalityTypes.AudioVideo]; audioChannel = avModality.AudioChannel; videoChannel = avModality.VideoChannel; //show the current conversation and modality states in the UI toolStripStatusLabelConvesation.Text = conversation.State.ToString(); toolStripStatusLabelModality.Text = avModality.State.ToString(); //enables and disables the checkbox associated with the ConversationProperty.AutoTerminateOnIdle property //based on whether the Lync client is running in InSuppressedMode //se more details in the checkBoxAutoTerminateOnIdle_CheckStateChanged() method checkBoxAutoTerminateOnIdle.Enabled = client.InSuppressedMode; //registers for conversation state updates conversation.StateChanged += conversation_StateChanged; //registers for participant events conversation.ParticipantAdded += conversation_ParticipantAdded; conversation.ParticipantRemoved += conversation_ParticipantRemoved; //subscribes to the conversation action availability events (for the ability to add/remove participants) conversation.ActionAvailabilityChanged += conversation_ActionAvailabilityChanged; //subscribes to modality action availability events (all audio button except DTMF) avModality.ActionAvailabilityChanged += avModality_ActionAvailabilityChanged; //subscribes to the modality state changes so that the status bar gets updated with the new state avModality.ModalityStateChanged += avModality_ModalityStateChanged; //subscribes to the audio channel action availability events (DTMF only) audioChannel.ActionAvailabilityChanged += audioChannel_ActionAvailabilityChanged; //subscribes to the video channel state changes so that the status bar gets updated with the new state audioChannel.StateChanged += audioChannel_StateChanged; //subscribes to the video channel action availability events videoChannel.ActionAvailabilityChanged += videoChannel_ActionAvailabilityChanged; //subscribes to the video channel state changes so that the video feed can be presented videoChannel.StateChanged += videoChannel_StateChanged; }
private static void videoChannelEndStart(IAsyncResult result) { try { VideoChannel channel = (VideoChannel)result.AsyncState; channel.EndStart(result); RaiseVideoAvailable(channel.CaptureVideoWindow, VideoDirection.Outgoing); } catch (Exception ex) { Console.WriteLine(ex); } }
internal ConversationInfo(Conversation conversation, LyncClient client, IntPtr parentHandle, Rectangle bounds, AsyncCallback conversationEndedCallback, AsyncCallback videoCallStartedCallback) { this.conversation = conversation; this.client = client; this.parentHandle = parentHandle; this.bounds = bounds; this.conversationEndedCallback = conversationEndedCallback; this.videoCallStartedCallback = videoCallStartedCallback; this.videoChannel = ((AVModality)conversation.Modalities[ModalityTypes.AudioVideo]).VideoChannel; //subscribes to the video channel state changes so that the video feed can be presented videoChannel.StateChanged += videoChannel_StateChanged; }
/// <summary> /// /// </summary> /// <returns></returns> private async Task WaitForChannelsAsync() { Task <ChannelOpen> video = WaitForChannelOpenAsync(NanoChannel.Video); Task <ChannelOpen> audio = WaitForChannelOpenAsync(NanoChannel.Audio); Task <ChannelOpen> chatAudio = WaitForChannelOpenAsync(NanoChannel.ChatAudio); Task <ChannelOpen> control = WaitForChannelOpenAsync(NanoChannel.Control); await Task.WhenAll(video, audio, chatAudio, control); Video = new VideoChannel(_transport, video.Result, FireVideoFrameAvailable); Audio = new AudioChannel(_transport, audio.Result, FireAudioFrameAvailable); ChatAudio = new ChatAudioChannel(_transport, chatAudio.Result); Control = new ControlChannel(_transport, control.Result); }
protected virtual void initializeProject(string dataFolderPrefix) { m_dataFolderPrefix = dataFolderPrefix; m_Project = new Project(); m_Project.PrettyFormat = m_XukPrettyFormat; Presentation presentation = m_Project.AddNewPresentation(new Uri(m_outDirectory), dataFolderPrefix); PCMFormatInfo pcmFormat = presentation.MediaDataManager.DefaultPCMFormat; //.Copy(); pcmFormat.Data.SampleRate = (ushort)m_audioProjectSampleRate; pcmFormat.Data.NumberOfChannels = m_audioStereo ? (ushort)2 : (ushort)1; presentation.MediaDataManager.DefaultPCMFormat = pcmFormat; //presentation.MediaDataFactory.DefaultAudioMediaDataType = typeof(WavAudioMediaData); //presentation.MediaDataManager.EnforceSinglePCMFormat = true; TextChannel textChannel = presentation.ChannelFactory.CreateTextChannel(); textChannel.Name = "The Text Channel"; DebugFix.Assert(textChannel == presentation.ChannelsManager.GetOrCreateTextChannel()); AudioChannel audioChannel = presentation.ChannelFactory.CreateAudioChannel(); audioChannel.Name = "The Audio Channel"; DebugFix.Assert(audioChannel == presentation.ChannelsManager.GetOrCreateAudioChannel()); ImageChannel imageChannel = presentation.ChannelFactory.CreateImageChannel(); imageChannel.Name = "The Image Channel"; DebugFix.Assert(imageChannel == presentation.ChannelsManager.GetOrCreateImageChannel()); VideoChannel videoChannel = presentation.ChannelFactory.CreateVideoChannel(); videoChannel.Name = "The Video Channel"; DebugFix.Assert(videoChannel == presentation.ChannelsManager.GetOrCreateVideoChannel()); /*string dataPath = presentation.DataProviderManager.DataFileDirectoryFullPath; * if (Directory.Exists(dataPath)) * { * Directory.Delete(dataPath, true); * }*/ }
/// <summary> /// 启动视频 /// </summary> /// <param name="conversationWindow"></param> public static void StartVideo(ConversationWindow conversationWindow) { try { ThreadPool.QueueUserWorkItem((o) => { AVModality avModality = (AVModality)conversationWindow.Conversation.SelfParticipant.Modalities[ModalityTypes.AudioVideo]; if (avModality != null) { VideoChannel videoChannel = avModality.VideoChannel; //object obV = videoChannel.InnerObject; if (videoChannel != null) { if (videoChannel != null && videoChannel.CanInvoke(ChannelAction.Start)) { videoChannel.BeginStart(null, null); } TimerJob.StartRun(new Action(() => { if (videoChannel.State != ChannelState.Connecting) { if (videoChannel != null && videoChannel.CanInvoke(ChannelAction.Start)) { videoChannel.BeginStart(null, null); } } }), 1500); LyncHelper.ExitFullScreen(); LyncHelper.FullScreen(); } } }); } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); } finally { } }
/// <summary> /// 电子白板、ppt共享【音视频、应用程序共享】 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> protected static void ConversationCard_ActionAvailabilityChanged(object sender, ModalityActionAvailabilityChangedEventArgs e) { try { if (MainConversationInCallBack != null) { MainConversationInCallBack(new Action <ConversationWindow>((conversationWindow) => { if (conversationWindow != null) { switch (e.Action) { case ModalityAction.Accept: var modalities = conversationWindow.Conversation.Modalities; //视频通道 VideoChannel videoChannel = ((AVModality)modalities[ModalityTypes.AudioVideo]).VideoChannel; //音频通道 AudioChannel audioChannel = ((AVModality)modalities[ModalityTypes.AudioVideo]).AudioChannel; //内容共享 ContentSharingModality shareContent = (ContentSharingModality)modalities[ModalityTypes.ContentSharing]; //程序共享 ApplicationSharingModality applicationSharing = (ApplicationSharingModality)modalities[ModalityTypes.ApplicationSharing]; //视频 if (videoChannel != null && videoChannel.State == ChannelState.Notified) { App.Current.Dispatcher.BeginInvoke(new Action(() => { try { //接受 ((AVModality)modalities[ModalityTypes.AudioVideo]).Accept(); } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); }; })); } //语音 else if (audioChannel != null && audioChannel.State == ChannelState.Notified) { ((AVModality)modalities[ModalityTypes.AudioVideo]).Accept(); } //共享ppt、电子白板 else if (shareContent != null && shareContent.State == ModalityState.Notified) { shareContent.Accept(); } //共享应用程序 else if (applicationSharing != null && applicationSharing.State == ModalityState.Notified) { applicationSharing.Accept(); } break; default: break; } } })); } } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); } finally { } }
public static Task StartAsync(this VideoChannel videoChannel) { return(Task.Factory.FromAsync(videoChannel.BeginStart, videoChannel.EndStart, null)); }
protected virtual void parseContentDocuments(List <string> spineOfContentDocuments, Dictionary <string, string> spineAttributes, List <Dictionary <string, string> > spineItemsAttributes, string coverImagePath, string navDocPath) { if (spineOfContentDocuments == null || spineOfContentDocuments.Count <= 0) { return; } Presentation spinePresentation = m_Project.Presentations.Get(0); spinePresentation.RootNode.GetOrCreateXmlProperty().SetQName("spine", ""); if (!string.IsNullOrEmpty(m_OPF_ContainerRelativePath)) { spinePresentation.RootNode.GetOrCreateXmlProperty().SetAttribute(OPF_ContainerRelativePath, "", m_OPF_ContainerRelativePath); } foreach (KeyValuePair <string, string> spineAttribute in spineAttributes) { spinePresentation.RootNode.GetOrCreateXmlProperty().SetAttribute(spineAttribute.Key, "", spineAttribute.Value); } if (m_PackagePrefixAttr != null) { spinePresentation.RootNode.GetOrCreateXmlProperty().SetAttribute("prefix", "", m_PackagePrefixAttr.Value); } // Audio files may be shared between chapters of a book! m_OriginalAudioFile_FileDataProviderMap.Clear(); Presentation spineItemPresentation = null; int index = -1; foreach (string docPath in spineOfContentDocuments) { index++; reportProgress(-1, String.Format(UrakawaSDK_daisy_Lang.ReadXMLDoc, docPath)); //DirectoryInfo opfParentDir = Directory.GetParent(m_Book_FilePath); //string dirPath = opfParentDir.ToString(); string fullDocPath = Path.Combine(Path.GetDirectoryName(m_Book_FilePath), docPath); fullDocPath = FileDataProvider.NormaliseFullFilePath(fullDocPath).Replace('/', '\\'); if (!File.Exists(fullDocPath)) { #if DEBUG Debugger.Break(); #endif //DEBUG continue; } addOPF_GlobalAssetPath(fullDocPath); TreeNode spineChild = spinePresentation.TreeNodeFactory.Create(); TextMedia txt = spinePresentation.MediaFactory.CreateTextMedia(); txt.Text = docPath; // Path.GetFileName(fullDocPath); spineChild.GetOrCreateChannelsProperty().SetMedia(spinePresentation.ChannelsManager.GetOrCreateTextChannel(), txt); spinePresentation.RootNode.AppendChild(spineChild); spineChild.GetOrCreateXmlProperty().SetQName("metadata", ""); foreach (KeyValuePair <string, string> spineItemAttribute in spineItemsAttributes[index]) { spineChild.GetOrCreateXmlProperty().SetAttribute(spineItemAttribute.Key, "", spineItemAttribute.Value); } string ext = Path.GetExtension(fullDocPath); if (docPath == coverImagePath) { DebugFix.Assert(ext.Equals(DataProviderFactory.IMAGE_SVG_EXTENSION, StringComparison.OrdinalIgnoreCase)); spineChild.GetOrCreateXmlProperty().SetAttribute("cover-image", "", "true"); } if (docPath == navDocPath) { DebugFix.Assert( ext.Equals(DataProviderFactory.XHTML_EXTENSION, StringComparison.OrdinalIgnoreCase) || ext.Equals(DataProviderFactory.HTML_EXTENSION, StringComparison.OrdinalIgnoreCase)); spineChild.GetOrCreateXmlProperty().SetAttribute("nav", "", "true"); } if ( !ext.Equals(DataProviderFactory.XHTML_EXTENSION, StringComparison.OrdinalIgnoreCase) && !ext.Equals(DataProviderFactory.HTML_EXTENSION, StringComparison.OrdinalIgnoreCase) && !ext.Equals(DataProviderFactory.DTBOOK_EXTENSION, StringComparison.OrdinalIgnoreCase) && !ext.Equals(DataProviderFactory.XML_EXTENSION, StringComparison.OrdinalIgnoreCase) ) { DebugFix.Assert(ext.Equals(DataProviderFactory.IMAGE_SVG_EXTENSION, StringComparison.OrdinalIgnoreCase)); bool notExistYet = true; foreach (ExternalFiles.ExternalFileData externalFileData in m_Project.Presentations.Get(0).ExternalFilesDataManager.ManagedObjects.ContentsAs_Enumerable) { if (!string.IsNullOrEmpty(externalFileData.OriginalRelativePath)) { bool notExist = docPath != externalFileData.OriginalRelativePath; notExistYet = notExistYet && notExist; if (!notExist) { break; } } } DebugFix.Assert(notExistYet); if (notExistYet) { ExternalFiles.ExternalFileData externalData = null; if (docPath == coverImagePath) { externalData = m_Project.Presentations.Get(0).ExternalFilesDataFactory.Create <ExternalFiles.CoverImageExternalFileData>(); } else { externalData = m_Project.Presentations.Get(0).ExternalFilesDataFactory.Create <ExternalFiles.GenericExternalFileData>(); } if (externalData != null) { externalData.InitializeWithData(fullDocPath, docPath, true, null); addOPF_GlobalAssetPath(fullDocPath); } } continue; } spineChild.GetOrCreateXmlProperty().SetAttribute("xuk", "", "true"); XmlDocument xmlDoc = XmlReaderWriterHelper.ParseXmlDocument(fullDocPath, true, true); if (RequestCancellation) { return; } m_PublicationUniqueIdentifier = null; m_PublicationUniqueIdentifierNode = null; Project spineItemProject = new Project(); spineItemProject.PrettyFormat = m_XukPrettyFormat; string dataFolderPrefix = FileDataProvider.EliminateForbiddenFileNameCharacters(docPath); spineItemPresentation = spineItemProject.AddNewPresentation(new Uri(m_outDirectory), //Path.GetFileName(fullDocPath) dataFolderPrefix ); PCMFormatInfo pcmFormat = spineItemPresentation.MediaDataManager.DefaultPCMFormat; //.Copy(); pcmFormat.Data.SampleRate = (ushort)m_audioProjectSampleRate; pcmFormat.Data.NumberOfChannels = m_audioStereo ? (ushort)2 : (ushort)1; spineItemPresentation.MediaDataManager.DefaultPCMFormat = pcmFormat; //presentation.MediaDataManager.EnforceSinglePCMFormat = true; //presentation.MediaDataFactory.DefaultAudioMediaDataType = typeof(WavAudioMediaData); TextChannel textChannel = spineItemPresentation.ChannelFactory.CreateTextChannel(); textChannel.Name = "The Text Channel"; DebugFix.Assert(textChannel == spineItemPresentation.ChannelsManager.GetOrCreateTextChannel()); AudioChannel audioChannel = spineItemPresentation.ChannelFactory.CreateAudioChannel(); audioChannel.Name = "The Audio Channel"; DebugFix.Assert(audioChannel == spineItemPresentation.ChannelsManager.GetOrCreateAudioChannel()); ImageChannel imageChannel = spineItemPresentation.ChannelFactory.CreateImageChannel(); imageChannel.Name = "The Image Channel"; DebugFix.Assert(imageChannel == spineItemPresentation.ChannelsManager.GetOrCreateImageChannel()); VideoChannel videoChannel = spineItemPresentation.ChannelFactory.CreateVideoChannel(); videoChannel.Name = "The Video Channel"; DebugFix.Assert(videoChannel == spineItemPresentation.ChannelsManager.GetOrCreateVideoChannel()); /*string dataPath = presentation.DataProviderManager.DataFileDirectoryFullPath; * if (Directory.Exists(dataPath)) * { * Directory.Delete(dataPath, true); * }*/ //AudioLibPCMFormat previousPcm = null; if (m_AudioConversionSession != null) { //previousPcm = m_AudioConversionSession.FirstDiscoveredPCMFormat; RemoveSubCancellable(m_AudioConversionSession); m_AudioConversionSession = null; } m_AudioConversionSession = new AudioFormatConvertorSession( //AudioFormatConvertorSession.TEMP_AUDIO_DIRECTORY, spineItemPresentation.DataProviderManager.DataFileDirectoryFullPath, spineItemPresentation.MediaDataManager.DefaultPCMFormat, m_autoDetectPcmFormat, m_SkipACM); //if (previousPcm != null) //{ // m_AudioConversionSession.FirstDiscoveredPCMFormat = previousPcm; //} AddSubCancellable(m_AudioConversionSession); TreenodesWithoutManagedAudioMediaData = new List <TreeNode>(); //foreach (var key in m_OriginalAudioFile_FileDataProviderMap.Keys) //{ // FileDataProvider dataProv = (FileDataProvider)presentation.DataProviderFactory.Create(DataProviderFactory.AUDIO_WAV_MIME_TYPE); //VERSUS// // FileDataProvider dataProv = new FileDataProvider(); // dataProv.MimeType = DataProviderFactory.AUDIO_WAV_MIME_TYPE; //} //m_Project.Presentations.Get(0).ExternalFilesDataManager.ManagedObjects.ContentsAs_Enumerable if (RequestCancellation) { return; } if (parseContentDocParts(fullDocPath, spineItemProject, xmlDoc, docPath, DocumentMarkupType.NA)) { return; // user cancel } //if (RequestCancellation) return; //reportProgress(-1, String.Format(UrakawaSDK_daisy_Lang.ParsingMetadata, docPath)); //parseMetadata(fullDocPath, project, xmlDoc); //if (RequestCancellation) return; //ParseHeadLinks(fullDocPath, project, xmlDoc); //reportProgress(-1, String.Format(UrakawaSDK_daisy_Lang.ParsingContent, docPath)); //parseContentDocument(fullDocPath, project, xmlDoc, null, fullDocPath, null, DocumentMarkupType.NA); string title = GetTitle(spineItemPresentation); if (!string.IsNullOrEmpty(title)) { spineChild.GetOrCreateXmlProperty().SetAttribute("title", "", title); } if (false) // do not copy metadata from project to individual chapter { foreach (Metadata metadata in m_Project.Presentations.Get(0).Metadatas.ContentsAs_Enumerable) { Metadata md = spineItemPresentation.MetadataFactory.CreateMetadata(); md.NameContentAttribute = metadata.NameContentAttribute.Copy(); foreach (MetadataAttribute metadataAttribute in metadata.OtherAttributes.ContentsAs_Enumerable) { MetadataAttribute mdAttr = metadataAttribute.Copy(); md.OtherAttributes.Insert(md.OtherAttributes.Count, mdAttr); } spineItemPresentation.Metadatas.Insert(spineItemPresentation.Metadatas.Count, md); } } //XmlNodeList listOfBodies = xmlDoc.GetElementsByTagName("body"); //if (listOfBodies.Count == 0) //{ // listOfBodies = xmlDoc.GetElementsByTagName("book"); //} //XmlNode bodyElement = XmlDocumentHelper.GetFirstChildElementOrSelfWithName(xmlDoc, true, "body", null); //if (bodyElement == null) //{ // bodyElement = XmlDocumentHelper.GetFirstChildElementOrSelfWithName(xmlDoc, true, "book", null); //} //if (bodyElement == null) //{ // continue; //} // TODO: return hierarchical outline where each node points to a XUK relative path, + XukAble.Uid (TreeNode are not corrupted during XukAbleManager.RegenerateUids(); foreach (KeyValuePair <string, string> spineItemAttribute in spineItemsAttributes[index]) { if (spineItemAttribute.Key == "media-overlay") { string opfDirPath = Path.GetDirectoryName(m_Book_FilePath); string overlayPath = spineItemAttribute.Value; reportProgress(-1, String.Format(UrakawaSDK_daisy_Lang.ParsingMediaOverlay, overlayPath)); string fullOverlayPath = Path.Combine(opfDirPath, overlayPath); if (!File.Exists(fullOverlayPath)) { continue; } XmlDocument overlayXmlDoc = XmlReaderWriterHelper.ParseXmlDocument(fullOverlayPath, false, false); IEnumerable <XmlNode> audioElements = XmlDocumentHelper.GetChildrenElementsOrSelfWithName(overlayXmlDoc, true, "audio", null, false); if (audioElements == null) { continue; } foreach (XmlNode audioNode in audioElements) { XmlAttributeCollection attrs = audioNode.Attributes; if (attrs == null) { continue; } XmlNode attrSrc = attrs.GetNamedItem("src"); if (attrSrc == null) { continue; } //XmlNode attrBegin = attrs.GetNamedItem("clipBegin"); //XmlNode attrEnd = attrs.GetNamedItem("clipEnd"); //string overlayDirPath = Path.GetDirectoryName(fullOverlayPath); //string fullAudioPath = Path.Combine(overlayDirPath, attrSrc.Value); //if (!File.Exists(fullAudioPath)) //{ // continue; //} //if (RequestCancellation) return; //reportProgress(-1, String.Format(UrakawaSDK_daisy_Lang.DecodingAudio, Path.GetFileName(fullAudioPath))); TreeNode textTreeNode = null; XmlNodeList children = audioNode.ParentNode.ChildNodes; foreach (XmlNode child in children) { if (child == audioNode) { continue; } if (child.LocalName != "text") { continue; } XmlAttributeCollection textAttrs = child.Attributes; if (textAttrs == null) { continue; } XmlNode textSrc = textAttrs.GetNamedItem("src"); if (textSrc == null) { continue; } string urlDecoded = FileDataProvider.UriDecode(textSrc.Value); if (urlDecoded.IndexOf('#') > 0) { string[] srcParts = urlDecoded.Split('#'); if (srcParts.Length != 2) { continue; } string fullTextRefPath = Path.Combine(Path.GetDirectoryName(fullOverlayPath), srcParts[0]); fullTextRefPath = FileDataProvider.NormaliseFullFilePath(fullTextRefPath).Replace('/', '\\'); if (!fullTextRefPath.Equals(fullDocPath, StringComparison.OrdinalIgnoreCase)) { //#if DEBUG // Debugger.Break(); //#endif //DEBUG continue; } string txtId = srcParts[1]; textTreeNode = spineItemPresentation.RootNode.GetFirstDescendantWithXmlID(txtId); } else { string fullTextRefPath = Path.Combine(Path.GetDirectoryName(fullOverlayPath), urlDecoded); fullTextRefPath = FileDataProvider.NormaliseFullFilePath(fullTextRefPath).Replace('/', '\\'); if (!fullTextRefPath.Equals(fullDocPath, StringComparison.OrdinalIgnoreCase)) { //#if DEBUG // Debugger.Break(); //#endif //DEBUG continue; } textTreeNode = spineItemPresentation.RootNode; } } if (textTreeNode != null) { addAudio(textTreeNode, audioNode, false, fullOverlayPath); } } } } spinePresentation.MediaDataManager.DefaultPCMFormat = spineItemPresentation.MediaDataManager.DefaultPCMFormat; //copied! string xuk_FilePath = GetXukFilePath_SpineItem(m_outDirectory, docPath, title, index); string xukFileName = Path.GetFileName(xuk_FilePath); spineChild.GetOrCreateXmlProperty().SetAttribute("xukFileName", "", xukFileName); //deleteDataDirectoryIfEmpty(); string dataFolderPath = spineItemPresentation.DataProviderManager.DataFileDirectoryFullPath; spineItemPresentation.DataProviderManager.SetCustomDataFileDirectory(Path.GetFileNameWithoutExtension(xuk_FilePath)); string newDataFolderPath = spineItemPresentation.DataProviderManager.DataFileDirectoryFullPath; DebugFix.Assert(Directory.Exists(newDataFolderPath)); if (newDataFolderPath != dataFolderPath) { try { if (Directory.Exists(newDataFolderPath)) { FileDataProvider.TryDeleteDirectory(newDataFolderPath, false); } Directory.Move(dataFolderPath, newDataFolderPath); } catch (Exception ex) { #if DEBUG Debugger.Break(); #endif // DEBUG Console.WriteLine(ex.Message); Console.WriteLine(ex.StackTrace); spineItemPresentation.DataProviderManager.SetCustomDataFileDirectory(dataFolderPrefix); } } spineItemProject.PrettyFormat = m_XukPrettyFormat; SaveXukAction action = new SaveXukAction(spineItemProject, spineItemProject, new Uri(xuk_FilePath), true); action.ShortDescription = UrakawaSDK_daisy_Lang.SavingXUKFile; action.LongDescription = UrakawaSDK_daisy_Lang.SerializeDOMIntoXUKFile; action.Progress += new EventHandler <ProgressEventArgs>( delegate(object sender, ProgressEventArgs e) { double val = e.Current; double max = e.Total; int percent = -1; if (val != max) { percent = (int)((val / max) * 100); } reportProgress_Throttle(percent, val + "/" + max); //reportProgress(-1, action.LongDescription); if (RequestCancellation) { e.Cancel(); } } ); action.Finished += new EventHandler <FinishedEventArgs>( delegate(object sender, FinishedEventArgs e) { reportProgress(100, UrakawaSDK_daisy_Lang.XUKSaved); } ); action.Cancelled += new EventHandler <CancelledEventArgs>( delegate(object sender, CancelledEventArgs e) { reportProgress(0, UrakawaSDK_daisy_Lang.CancelledXUKSaving); } ); action.DoWork(); //if (first) //{ // Presentation presentation = m_Project.Presentations.Get(0); // XmlProperty xmlProp = presentation.PropertyFactory.CreateXmlProperty(); // xmlProp.LocalName = "book"; // presentation.PropertyFactory.DefaultXmlNamespaceUri = bodyElement.NamespaceURI; // xmlProp.NamespaceUri = presentation.PropertyFactory.DefaultXmlNamespaceUri; // TreeNode treeNode = presentation.TreeNodeFactory.Create(); // treeNode.AddProperty(xmlProp); // presentation.RootNode = treeNode; // first = false; //} //foreach (XmlNode childOfBody in bodyElement.ChildNodes) //{ // parseContentDocument(childOfBody, m_Project.Presentations.Get(0).RootNode, fullDocPath); //} } }
/// <summary> /// 通道响应(accept) /// </summary> /// <param name="conversationWindow">会话窗体</param> private static void Modality_Response_Accept(ConversationWindow conversationWindow) { try { var modalities = conversationWindow.Conversation.Modalities; //视频通道 VideoChannel videoChannel = null; //音频通道 AudioChannel audioChannel = null; AVModality avModality = ((AVModality)modalities[ModalityTypes.AudioVideo]); if (avModality != null) { //视频通道 videoChannel = avModality.VideoChannel; //音频通道 audioChannel = avModality.AudioChannel; } //内容共享 ContentSharingModality shareContent = (ContentSharingModality)modalities[ModalityTypes.ContentSharing]; //程序共享 ApplicationSharingModality applicationSharing = (ApplicationSharingModality)modalities[ModalityTypes.ApplicationSharing]; //视频 if (videoChannel != null && videoChannel.State == ChannelState.Receive) { Application.Current.Dispatcher.BeginInvoke(new Action(() => { try { if (avModality.CanInvoke(ModalityAction.Accept)) { //接受 avModality.Accept(); if (videoChannel.CanInvoke(ChannelAction.Start)) { videoChannel.BeginStart(null, null); } } } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); }; })); } //语音 else if (audioChannel != null && audioChannel.State == ChannelState.Receive) { if (avModality.CanInvoke(ModalityAction.Accept)) { //接受 avModality.Accept(); } } //共享ppt、电子白板 else if (shareContent != null && shareContent.State == ModalityState.Notified) { shareContent.Accept(); } //共享应用程序 else if (applicationSharing != null && applicationSharing.State == ModalityState.Notified) { applicationSharing.Accept(); if (AddContent_Type_CallBack != null) { AddContent_Type_CallBack(SharingType.Application); } } } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); } finally { } }
private void InitializeConversation() { //saves the AVModality, AudioChannel and VideoChannel, just for the sake of readability avModality = (AVModality)conversation.Modalities[ModalityTypes.AudioVideo]; audioChannel = avModality.AudioChannel; videoChannel = avModality.VideoChannel; // TODO: fix the UI //show the current conversation and modality states in the UI this.SetConversationStatus(conversation.State.ToString()); this.SetModalityStatus(avModality.State.ToString()); this.SetAudioStatus("Disconnected"); this.SetVideoStatus("Disconnected"); //registers for conversation state updates conversation.StateChanged += this.ConversationStateChanged; //subscribes to modality action availability events (all audio button except DTMF) avModality.ActionAvailabilityChanged += this.AvModalityActionAvailabilityChanged; //subscribes to the modality state changes so that the status bar gets updated with the new state avModality.ModalityStateChanged += this.AvModalityModalityStateChanged; //subscribes to the video channel state changes so that the status bar gets updated with the new state audioChannel.StateChanged += this.AudioChannelStateChanged; //subscribes to the video channel state changes so that the video feed can be presented videoChannel.StateChanged += this.VideoChannelStateChanged; }
public void SetVideoChannel(VideoChannel channel) { _commandQueue.Enqueue(new ConfigCommand("video:video_channel", channel)); }
private static void Modality_Response_Connect(ConversationWindow conversationWindow) { try { var modalities = conversationWindow.Conversation.Modalities; //视频通道 VideoChannel videoChannel = null; //音频通道 AudioChannel audioChannel = null; AVModality avModality = ((AVModality)modalities[ModalityTypes.AudioVideo]); if (avModality != null) { //视频通道 videoChannel = avModality.VideoChannel; //音频通道 audioChannel = avModality.AudioChannel; } //视频 if (videoChannel != null && videoChannel.State == ChannelState.Connecting) { Application.Current.Dispatcher.BeginInvoke(new Action(() => { try { if (avModality.CanInvoke(ModalityAction.Accept)) { //接受 avModality.Accept(); if (videoChannel.CanInvoke(ChannelAction.Start)) { videoChannel.BeginStart(null, null); } } } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); }; })); } //语音 else if (audioChannel != null && audioChannel.State == ChannelState.Connecting) { if (avModality.CanInvoke(ModalityAction.Accept)) { //接受 avModality.Accept(); } } } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); } finally { } }
/// <summary> /// 会话加载 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public static void ConversationManager_ConversationAdded(object sender, Microsoft.Lync.Model.Conversation.ConversationManagerEventArgs e) { //查看是否已进入一个会议【倘若未进入会议,则直接进行拒绝,该逻辑有待斟酌】 bool canOpenTheConversation = true; if (HasConferenceCallBack != null) { HasConferenceCallBack(new Action <bool>((hasConference) => { //if (hasConference) //{ // canOpenTheConversation = false; //} })); } //子线程不可直接调用主线程的UI(需要通过异步委托的机制去执行) Application.Current.Dispatcher.BeginInvoke(new Action(() => { try { #region 响应会话 //获取发起人的会话模式 IDictionary <ModalityTypes, Modality> modalities = e.Conversation.Modalities; //通知模式 //NotifyType notifyType = NotifyType.InstantMessage; AVModality avModality = (AVModality)modalities[ModalityTypes.AudioVideo]; //视频通道 VideoChannel videoChannel = ((AVModality)modalities[ModalityTypes.AudioVideo]).VideoChannel; //音频通道 AudioChannel audioChannel = ((AVModality)modalities[ModalityTypes.AudioVideo]).AudioChannel; //IM类型 var instantMessage = modalities[ModalityTypes.InstantMessage]; //查看当前视频会话具体应用 if (videoChannel.State == ChannelState.Notified) { if (canOpenTheConversation) { ((AVModality)modalities[ModalityTypes.AudioVideo]).Accept(); //notifyType = NotifyType.Video; TimerJob.StartRun(new Action(() => { //判断是否可以执行该操作 if (videoChannel.CanInvoke(ChannelAction.Start)) { ////接受请求,开启摄像头 videoChannel.BeginStart(null, null); //停止计时器 timerAcept.Stop(); } }), 500, out timerAcept); } else { //拒绝音频会话 ((AVModality)modalities[ModalityTypes.AudioVideo]).Reject(ModalityDisconnectReason.NotAcceptableHere); return; } } //查看当前音频会话具体应用 else if (audioChannel.State == ChannelState.Notified) { if (canOpenTheConversation) { //接受音频会话 ((AVModality)modalities[ModalityTypes.AudioVideo]).Accept(); //notifyType = NotifyType.Autio; } else { //拒绝音频会话 ((AVModality)modalities[ModalityTypes.AudioVideo]).Reject(ModalityDisconnectReason.NotAcceptableHere); } } //IMM文本会话,查看是否为通知状态(避免与语音、视频通话相互之间发生冲突) if (instantMessage.State == ModalityState.Notified || instantMessage.State == ModalityState.Connected) { if (canOpenTheConversation) { ((InstantMessageModality)modalities[ModalityTypes.InstantMessage]).Accept(); #region old solution //if (e.Conversation.Participants.Count <= 2) //{ // //模仿鼠标点击 // Win32API.SetCursorPos(System.Windows.Forms.Screen.PrimaryScreen.WorkingArea.Width - 100, System.Windows.Forms.Screen.PrimaryScreen.WorkingArea.Height - 160); // Win32API.mouse_event(Win32API.MouseEventFlag.LeftDown, 0, 0, 0, UIntPtr.Zero); // Win32API.mouse_event(Win32API.MouseEventFlag.LeftUp, 0, 0, 0, UIntPtr.Zero); //} #endregion } else { ((InstantMessageModality)modalities[ModalityTypes.InstantMessage]).Reject(ModalityDisconnectReason.NotAcceptableHere); } } #endregion #region 设置窗体位置和尺寸 //获取会话窗体 ConversationWindow window = null; //获取会话窗体 window = ConversationCodeEnterEntity.lyncAutomation.GetConversationWindow(e.Conversation); window.NeedsSizeChange -= window_NeedsSizeChange; window.NeedsSizeChange += window_NeedsSizeChange; if (MainConversationOutCallBack != null) { MainConversationOutCallBack(window); } //设置会话窗体的事件 SettingConversationWindowEvent(window); #endregion #region 共享设置 window.StateChanged -= MainConversation_StateChanged; //状态更改 window.StateChanged += MainConversation_StateChanged; //为共享做准备 TimerJob.StartRun(new Action(() => { //连接共享 var modaly = ((ContentSharingModality)e.Conversation.SelfParticipant.Conversation.Modalities[ModalityTypes.ContentSharing]); if (modaly.CanInvoke(ModalityAction.Accept)) { modaly.Accept(); } if (modaly.CanInvoke(ModalityAction.Connect)) { modaly.BeginConnect(null, null); } })); #endregion #region 会话加载完成事件激活 if (ConversationAddCompleateCallBack != null) { ConversationAddCompleateCallBack(); } #endregion } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); } })); }
public bool IsMatch(VideoChannel channel) { return(channel.GetHashCode() == VideoChannel.GetHashCode()); }
/// <summary> /// Switches the video channel. The ARDrone has two cameras, a horizontal and a vertical one. /// Calling this method with the appropriate parameter results in images from either one of the cameras /// or a composite image from both cameras at the same time. /// </summary> /// <param name="videoChannel">An enumerated value indicating what kind of image to produce..</param> public void SwitchVideoChannel(VideoChannel videoChannel) { CommandCenter.SwitchVideoChannel(videoChannel); }
private void SelectedVideoChannelChanged(object sender, VideoChannel channel) { _SynchronizationContext.Post((o) => { ActiveVideoChannel = channel; }, null); }
/// <summary> /// 收到设备目录时 /// </summary> /// <param name="sipChannel"></param> public static void OnCatalogReceived(SipChannel sipChannel) { Logger.Debug( $"[{Common.LoggerHead}]->收到一条设备目录通知->{sipChannel.RemoteEndPoint.Address.MapToIPv4().ToString()}-{sipChannel.ParentId}:{sipChannel.DeviceId}"); if (sipChannel.SipChannelType.Equals(SipChannelType.VideoChannel) && sipChannel.SipChannelStatus != DevStatus.OFF) //只有视频设备并且是可用状态的进数据库 { var obj = ORMHelper.Db.Select <VideoChannel>().Where(x => x.ChannelId.Equals(sipChannel.DeviceId) && x.DeviceId.Equals(sipChannel.ParentId) && x.DeviceStreamType.Equals(DeviceStreamType.GB28181)).First(); if (obj != null) { return; } var videoChannel = new VideoChannel(); videoChannel.Enabled = false; videoChannel.AutoRecord = false; videoChannel.AutoVideo = true; videoChannel.ChannelId = sipChannel.DeviceId; if (sipChannel.SipChannelDesc != null && !string.IsNullOrEmpty(sipChannel.SipChannelDesc.Name)) { videoChannel.ChannelName = sipChannel.SipChannelDesc.Name.Trim(); } else { videoChannel.ChannelName = sipChannel.DeviceId; } videoChannel.CreateTime = DateTime.Now; videoChannel.App = "rtp"; videoChannel.Vhost = "__defaultVhost__"; videoChannel.DepartmentId = ""; videoChannel.DepartmentName = ""; videoChannel.DeviceId = sipChannel.ParentId; videoChannel.HasPtz = false; videoChannel.UpdateTime = DateTime.Now; videoChannel.DeviceNetworkType = DeviceNetworkType.Fixed; videoChannel.DeviceStreamType = DeviceStreamType.GB28181; videoChannel.DefaultRtpPort = false; videoChannel.IpV4Address = sipChannel.RemoteEndPoint.Address.MapToIPv4().ToString(); videoChannel.IpV6Address = sipChannel.RemoteEndPoint.Address.MapToIPv6().ToString(); videoChannel.MediaServerId = $"unknown_server_{DateTime.Now.Ticks}"; videoChannel.NoPlayerBreak = false; videoChannel.PDepartmentId = ""; videoChannel.PDepartmentName = ""; videoChannel.RtpWithTcp = false; videoChannel.VideoSrcUrl = null; videoChannel.MethodByGetStream = MethodByGetStream.None; videoChannel.MainId = sipChannel.Stream; videoChannel.VideoDeviceType = VideoDeviceType.UNKNOW; try { var ret = ORMHelper.Db.Insert(videoChannel).ExecuteAffrows(); if (ret > 0) { Logger.Debug( $"[{Common.LoggerHead}]->写入一条新的设备目录到数据库,需激活后使用->{sipChannel.RemoteEndPoint.Address.MapToIPv4().ToString()}-{sipChannel.ParentId}:{sipChannel.DeviceId}"); } } catch (Exception ex) { Logger.Error($"[{Common.LoggerHead}]->数据库写入异常->{ex.Message}\r\n{ex.StackTrace}"); } } }
internal static void SwitchVideoChannel(VideoChannel videoChannel) { EnqueueCommand(ATCommands.SwitchVideoChannel, (int)videoChannel); }