private Bitmap bitmap; // cached bitmap of the waveform /// <summary> /// Create a waveform with no data to display yet. /// </summary> public WaveformCanvas() { InitializeComponent(); DoubleBuffered = true; this.audio = null; this.bitmap = null; }
// Access a channel which we know exist and is the only channel by this name. //sdk2 //internal Channel GetSingleChannelByName(string name) //{ // List<Channel> channels = getChannelsManager().getListOfChannels(name); // if (channels.Count == 0) throw new Exception(String.Format("No channel named \"{0}\"", name)); // if (channels.Count > 1) throw new Exception(String.Format("Expected 1 channel for {0}, got {1}.", // name, channels.Count)); // return channels[0]; //} // Create a media object from a sound file. private ManagedAudioMedia ImportAudioFromFile(string path) { string dataProviderDirectory = DataProviderManager.DataFileDirectoryFullPath; //EnforceSinglePCMFormat is always true //if (!MediaDataManager.EnforceSinglePCMFormat) // { // Stream input = File.OpenRead ( path ); // PCMDataInfo info = PCMDataInfo.parseRiffWaveHeader ( input ); // input.Close (); // DataManager.setDefaultBitDepth ( info.getBitDepth () ); // DataManager.setDefaultNumberOfChannels ( info.getNumberOfChannels () ); // DataManager.setDefaultSampleRate ( info.getSampleRate () ); // DataManager.setEnforceSinglePCMFormat ( true ); // } AudioMediaData data = MediaDataFactory.CreateAudioMediaData(); if (Path.GetFullPath(path).StartsWith(Path.GetFullPath(dataProviderDirectory))) { FileDataProvider dataProv = (FileDataProvider)DataProviderFactory.Create(urakawa.data.DataProviderFactory.AUDIO_WAV_MIME_TYPE); dataProv.InitByMovingExistingFile(path); data.AppendPcmData(dataProv); } else { data.AppendPcmData_RiffHeader(path); } ManagedAudioMedia media = MediaFactory.CreateManagedAudioMedia(); media.AudioMediaData = data; return(media); }
/// <summary> /// Import an audio file to the project by creating a new node with audio from the file. /// The node is created but not actually added but a command is returned. /// </summary> /// <param name="path">Full path to the audio file to import.</param> /// <param name="contextNode">The context node before which to import the audio file. /// If null, add at the end.</param> /// <returns>The command for adding the node.</returns> public Commands.AddTreeNode ImportAudioFileCommand(string path, TreeNode contextNode) { Stream input = File.OpenRead(path); PCMDataInfo info = PCMDataInfo.parseRiffWaveHeader(input); input.Close(); getPresentation().getMediaDataManager().getDefaultPCMFormat().setBitDepth(info.getBitDepth()); getPresentation().getMediaDataManager().getDefaultPCMFormat().setNumberOfChannels(info.getNumberOfChannels()); getPresentation().getMediaDataManager().getDefaultPCMFormat().setSampleRate(info.getSampleRate()); AudioMediaData data = (AudioMediaData) getPresentation().getMediaDataFactory().createMediaData(typeof(AudioMediaData)); data.appendAudioDataFromRiffWave(path); ManagedAudioMedia media = (ManagedAudioMedia)getPresentation().getMediaFactory().createAudioMedia(); media.setMediaData(data); Channel audio = GetSingleChannelByName(AUDIO_CHANNEL_NAME); ChannelsProperty prop = getPresentation().getPropertyFactory().createChannelsProperty(); prop.setMedia(audio, media); TreeNode node = getPresentation().getTreeNodeFactory().createNode(); node.setProperty(prop); TreeNode root = getPresentation().getRootNode(); Commands.AddTreeNode command = new Commands.AddTreeNode(node, root, contextNode == null ? root.getChildCount() : contextNode.getParent().indexOf(contextNode)); return(command); }
/// <summary> /// Merges <c>this</c> with a given other <see cref="AudioMediaData"/>, /// appending the audio data of the other <see cref="AudioMediaData"/> to <c>this</c>, /// leaving the other <see cref="AudioMediaData"/> without audio data /// </summary> /// <param name="other">The given other AudioMediaData</param> /// <exception cref="exception.MethodParameterIsNullException"> /// Thrown when <paramref name="other"/> is <c>null</c> /// </exception> /// <exception cref="exception.InvalidDataFormatException"> /// Thrown when the PCM format of <c>this</c> is not compatible with that of <paramref name="other"/> /// </exception> public override void MergeWith(AudioMediaData other) { if (other == this) { throw new exception.OperationNotValidException("Can not merge a AudioMediaData with itself"); } if (other is WavAudioMediaData) { if (!PCMFormat.Data.IsCompatibleWith(other.PCMFormat.Data)) { throw new exception.InvalidDataFormatException( "Can not merge this with a WavAudioMediaData with incompatible audio data"); } Time thisInsertPoint = AudioDuration; WavAudioMediaData otherWav = (WavAudioMediaData)other; mWavClips.AddRange(otherWav.mWavClips); Time dur = otherWav.AudioDuration; NotifyAudioDataInserted(this, thisInsertPoint, dur); otherWav.RemovePcmData(Time.Zero); } else { base.MergeWith(other); } }
/// <summary> /// Start recording. Stop monitoring before starting recording. /// </summary> public void Record() { if (mRecorder.CurrentState == AudioLib.AudioRecorder.State.Stopped) { mSessionOffset = mAudioList.Count; mPhraseMarks = new List <double>(); mSectionMarks = new List <int>(); mDeletedTime.Clear(); m_PhraseIndexesToDelete.Clear(); m_PhDetectorBytesReceivedFromRecorder = 0; m_PhDetectorEstimatedBytesRecorded = 0; m_MemStreamArray = null; AudioMediaData asset = (AudioMediaData)mPresentation.MediaDataFactory.Create <WavAudioMediaData>(); mSessionMedia = (ManagedAudioMedia)mPresentation.MediaFactory.CreateManagedAudioMedia(); //mSessionMedia.setMediaData(asset); mSessionMedia.MediaData = asset; mRecorder.AudioRecordingFinished += OnAudioRecordingFinished; mRecorder.StartRecording(asset.PCMFormat.Data); if (StartingPhrase != null) { StartingPhrase(this, new PhraseEventArgs(mSessionMedia, mSessionOffset, 0.0)); } mRecordingUpdateTimer.Enabled = true; } }
public override void Execute() { ManagedAudioMedia audioMedia = m_SelectionData.m_TreeNode.GetManagedAudioMedia(); AudioMediaData mediaData = audioMedia.AudioMediaData; Time timeBegin = SelectionData.m_LocalStreamLeftMark == -1 ? Time.Zero : new Time(mediaData.PCMFormat.Data.ConvertBytesToTime(SelectionData.m_LocalStreamLeftMark)); Time timeEnd = SelectionData.m_LocalStreamRightMark == -1 ? Time.Zero : new Time(mediaData.PCMFormat.Data.ConvertBytesToTime(SelectionData.m_LocalStreamRightMark)); if (SelectionData.TimeBeginEndEqualClipDuration(timeBegin, timeEnd, mediaData)) { ChannelsProperty chProp = m_SelectionData.m_TreeNode.GetChannelsProperty(); chProp.SetMedia(ChannelOfOriginalMedia, null); } else if (SelectionData.TimeBeginEndEqualClipDuration(new Time(), timeEnd, mediaData)) { mediaData.RemovePcmData(timeBegin); } else { mediaData.RemovePcmData(timeBegin, timeEnd); } }
public UrakawaManagedWaveFormDataProvider(AudioMediaData source) { if (source == null) { throw new MethodParameterIsNullException("The source AudioMediaData cannot be null"); } mSourceAudioMediaData = source; }
/// <summary> /// Play an audio media data object from beginning to end. /// </summary> public void Play(AudioMediaData newAudioMediaData) { if (CanPlay) { Stop(); SetAudioMediaData(newAudioMediaData); PlayDataStream(0, 0); } }
/// <summary> /// Start monitoring the audio input. /// This may happen at the beginning of the session, /// or when recording is paused. /// Create a new asset to "record" in (it gets discarded anyway.) /// </summary> public void StartMonitoring() { if (mRecorder.CurrentState == AudioLib.AudioRecorder.State.Stopped) { AudioMediaData asset = (AudioMediaData)mPresentation.MediaDataFactory.Create <WavAudioMediaData>(); mRecorder.StartMonitoring(asset.PCMFormat.Data); } }
/// <summary> /// Set a new audio media data object. /// </summary> private void SetAudioMediaData(AudioMediaData newAudioMediaData) { if (mState != PlayerState.Stopped) { throw new Exception("Player is not stopped."); } mCurrentAudioMediaData = newAudioMediaData; CreateBuffersForCurrentAudioMediaData(); }
public override bool CanAddManagedObject(MediaData data) { if (data is WavAudioMediaData) { AudioMediaData amdata = (AudioMediaData)data; if (EnforceSinglePCMFormat && !amdata.PCMFormat.Data.IsCompatibleWith(DefaultPCMFormat.Data)) { return(false); } } return(true); }
public void play(AudioMediaData data) { setDevicePCMFormat(data.PCMFormat); mPlaybackAudioDevice.PlayEnded += new EventHandler <global::AudioEngine.EndedEventArgs>(PlaybackAudioDevice_PlayEnded); try { mPlaybackAudioDevice.play(data.GetAudioData()); } catch (Exception e) { mPlaybackAudioDevice.PlayEnded -= new EventHandler <global::AudioEngine.EndedEventArgs>(PlaybackAudioDevice_PlayEnded); throw e; } }
public void SetDescriptionAudio(AlternateContent altContent, ManagedAudioMedia manMedia, TreeNode node) { if (manMedia == null || manMedia.HasActualAudioMediaData && !manMedia.Duration.IsGreaterThan(Time.Zero)) { if (altContent.Audio != null) { AlternateContentRemoveManagedMediaCommand cmd1 = node.Presentation.CommandFactory.CreateAlternateContentRemoveManagedMediaCommand(node, altContent, altContent.Audio); node.Presentation.UndoRedoManager.Execute(cmd1); } } else { ManagedAudioMedia audio1 = node.Presentation.MediaFactory.CreateManagedAudioMedia(); AudioMediaData audioData1 = node.Presentation.MediaDataFactory.CreateAudioMediaData(); audio1.AudioMediaData = audioData1; // WARNING: WavAudioMediaData implementation differs from AudioMediaData: // the latter is naive and performs a stream binary copy, the latter is optimized and re-uses existing WavClips. // WARNING 2: The audio data from the given parameter gets emptied ! //audio1.AudioMediaData.MergeWith(manMedia.AudioMediaData); if (!audio1.AudioMediaData.PCMFormat.Data.IsCompatibleWith(manMedia.AudioMediaData.PCMFormat.Data)) { throw new InvalidDataFormatException( "Can not merge description audio with a AudioMediaData with incompatible audio data"); } Stream stream = manMedia.AudioMediaData.OpenPcmInputStream(); try { audio1.AudioMediaData.AppendPcmData(stream, null); //manMedia.AudioMediaData.AudioDuration } finally { stream.Close(); } AlternateContentSetManagedMediaCommand cmd22 = node.Presentation.CommandFactory.CreateAlternateContentSetManagedMediaCommand(node, altContent, audio1); node.Presentation.UndoRedoManager.Execute(cmd22); } RaisePropertyChanged(() => Descriptions); }
/// <summary> /// Create a new audio player /// </summary> public Player() { mCurrentAudioMediaData = null; mDevice = null; mEnableEvents = true; mFastPlayFactor = 1.0f; mFwdRwdRate = 1; mMonitoringTimer = new System.Windows.Forms.Timer(); mMonitoringTimer.Tick += new System.EventHandler(this.MonitoringTimer_Tick); mMonitoringTimer.Interval = MONITORING_TIMER_INTERVAL; mPlaybackMode = PlaybackModes.Normal; mPreviewTimer = new System.Windows.Forms.Timer(); mPreviewTimer.Tick += new System.EventHandler(this.mPreviewTimer_Tick); mPreviewTimer.Interval = PREVIEW_TIMER_INTERVAL; mState = PlayerState.NotReady; mPausePosition = 0; mStartPosition = 0; }
public bool TimeBeginEndEqualClipDuration(Time timeBegin, Time timeEnd, AudioMediaData mediaData) { bool equal = ( timeBegin.IsEqualTo(Time.Zero) //mediaData.PCMFormat.Data.TimesAreEqualWithMillisecondsTolerance(timeBegin.AsLocalUnits, Time.Zero.AsLocalUnits) ) && ( timeEnd.IsEqualTo(Time.Zero) || timeEnd.GetDifference(timeBegin).IsEqualTo(mediaData.AudioDuration) //mediaData.PCMFormat.Data.TimesAreEqualWithMillisecondsTolerance(timeEnd.AsLocalUnits, Time.Zero.AsLocalUnits) //|| mediaData.PCMFormat.Data.TimesAreEqualWithMillisecondsTolerance(timeEnd.GetDifference(timeBegin).AsLocalUnits, mediaData.AudioDuration.AsLocalUnits) ); if (equal) { return(true); } bool rightOk = false; if (m_LocalStreamRightMark == -1) { rightOk = true; } else { long timeBytes = mediaData.PCMFormat.Data.ConvertTimeToBytes(mediaData.AudioDuration.AsLocalUnits); rightOk = //m_LocalStreamRightMark == timeBytes mediaData.PCMFormat.Data.BytesAreEqualWithMillisecondsTolerance(m_LocalStreamRightMark, timeBytes) ; } bool leftOk = m_LocalStreamLeftMark == -1 //|| m_LocalStreamLeftMark == 0 || mediaData.PCMFormat.Data.BytesAreEqualWithMillisecondsTolerance(0, m_LocalStreamLeftMark) ; return(leftOk && rightOk); }
/// <summary> /// Constructor setting the source <see cref="AudioMediaData"/> of the event, /// the point from which the audio data was removed and the duration of the audio data that was removed /// </summary> /// <param name="source">The source <see cref="AudioMediaData"/> of the event</param> /// <param name="fromPoint">The point from which the audio data was removed</param> /// <param name="dur">The duration of the audio data that was removed</param> public AudioDataRemovedEventArgs(AudioMediaData source, Time fromPoint, Time dur) : base(source) { RemovedFromPoint = fromPoint.Copy(); Duration = dur.Copy(); }
private void diagramXmlParseBody_(XmlNode diagramElementNode, string xmlFilePath, TreeNode treeNode, int objectIndex) { string diagramElementName = diagramElementNode.Name; AlternateContent altContent = treeNode.Presentation.AlternateContentFactory.CreateAlternateContent(); AlternateContentAddCommand cmd_AltContent = treeNode.Presentation.CommandFactory.CreateAlternateContentAddCommand(treeNode, altContent); treeNode.Presentation.UndoRedoManager.Execute(cmd_AltContent); Metadata diagramElementName_Metadata = treeNode.Presentation.MetadataFactory.CreateMetadata(); diagramElementName_Metadata.NameContentAttribute = new MetadataAttribute(); diagramElementName_Metadata.NameContentAttribute.Name = DiagramContentModelHelper.DiagramElementName; diagramElementName_Metadata.NameContentAttribute.NamespaceUri = null; diagramElementName_Metadata.NameContentAttribute.Value = diagramElementName; AlternateContentMetadataAddCommand cmd_AltContent_diagramElementName_Metadata = treeNode.Presentation.CommandFactory.CreateAlternateContentMetadataAddCommand( treeNode, null, altContent, diagramElementName_Metadata, null ); treeNode.Presentation.UndoRedoManager.Execute(cmd_AltContent_diagramElementName_Metadata); if (diagramElementNode.Attributes != null) { for (int i = 0; i < diagramElementNode.Attributes.Count; i++) { XmlAttribute attribute = diagramElementNode.Attributes[i]; if (attribute.Name.StartsWith(XmlReaderWriterHelper.NS_PREFIX_XMLNS + ":")) { // } else if (attribute.Name == XmlReaderWriterHelper.NS_PREFIX_XMLNS) { // } else if (attribute.Name == DiagramContentModelHelper.TOBI_Audio) { string fullPath = null; if (FileDataProvider.isHTTPFile(attribute.Value)) { fullPath = FileDataProvider.EnsureLocalFilePathDownloadTempDirectory(attribute.Value); } else { fullPath = Path.Combine(Path.GetDirectoryName(xmlFilePath), attribute.Value); } if (fullPath != null && File.Exists(fullPath)) { string extension = Path.GetExtension(fullPath); bool isWav = extension.Equals(DataProviderFactory.AUDIO_WAV_EXTENSION, StringComparison.OrdinalIgnoreCase); AudioLibPCMFormat wavFormat = null; if (isWav) { Stream fileStream = File.Open(fullPath, FileMode.Open, FileAccess.Read, FileShare.Read); try { uint dataLength; wavFormat = AudioLibPCMFormat.RiffHeaderParse(fileStream, out dataLength); } finally { fileStream.Close(); } } string originalFilePath = null; DebugFix.Assert(treeNode.Presentation.MediaDataManager.EnforceSinglePCMFormat); bool wavNeedsConversion = false; if (wavFormat != null) { wavNeedsConversion = !wavFormat.IsCompatibleWith(treeNode.Presentation.MediaDataManager.DefaultPCMFormat.Data); } if (!isWav || wavNeedsConversion) { originalFilePath = fullPath; var audioFormatConvertorSession = new AudioFormatConvertorSession( //AudioFormatConvertorSession.TEMP_AUDIO_DIRECTORY, treeNode.Presentation.DataProviderManager.DataFileDirectoryFullPath, treeNode.Presentation.MediaDataManager.DefaultPCMFormat, false, m_UrakawaSession.IsAcmCodecsDisabled); //filePath = m_AudioFormatConvertorSession.ConvertAudioFileFormat(filePath); bool cancelled = false; var converter = new AudioClipConverter(audioFormatConvertorSession, fullPath); bool error = ShellView.RunModalCancellableProgressTask(true, "Converting audio...", converter, () => { m_Logger.Log(@"Audio conversion CANCELLED", Category.Debug, Priority.Medium); cancelled = true; }, () => { m_Logger.Log(@"Audio conversion DONE", Category.Debug, Priority.Medium); cancelled = false; }); if (cancelled) { //DebugFix.Assert(!result); break; } fullPath = converter.ConvertedFilePath; if (string.IsNullOrEmpty(fullPath)) { break; } m_Logger.Log(string.Format("Converted audio {0} to {1}", originalFilePath, fullPath), Category.Debug, Priority.Medium); //Stream fileStream = File.Open(fullPath, FileMode.Open, FileAccess.Read, FileShare.Read); //try //{ // uint dataLength; // wavFormat = AudioLibPCMFormat.RiffHeaderParse(fileStream, out dataLength); //} //finally //{ // fileStream.Close(); //} } ManagedAudioMedia manAudioMedia = treeNode.Presentation.MediaFactory.CreateManagedAudioMedia(); AudioMediaData audioMediaData = treeNode.Presentation.MediaDataFactory.CreateAudioMediaData(DataProviderFactory.AUDIO_WAV_EXTENSION); manAudioMedia.AudioMediaData = audioMediaData; FileDataProvider dataProv = (FileDataProvider)treeNode.Presentation.DataProviderFactory.Create(DataProviderFactory.AUDIO_WAV_MIME_TYPE); dataProv.InitByCopyingExistingFile(fullPath); audioMediaData.AppendPcmData(dataProv); // Stream wavStream = null; // try // { // wavStream = File.Open(fullPath, FileMode.Open, FileAccess.Read, FileShare.Read); // uint dataLength; // AudioLibPCMFormat pcmInfo = AudioLibPCMFormat.RiffHeaderParse(wavStream, out dataLength); // if (!treeNode.Presentation.MediaDataManager.DefaultPCMFormat.Data.IsCompatibleWith(pcmInfo)) // { //#if DEBUG // Debugger.Break(); //#endif //DEBUG // wavStream.Close(); // wavStream = null; // var audioFormatConvertorSession = // new AudioFormatConvertorSession( // //AudioFormatConvertorSession.TEMP_AUDIO_DIRECTORY, // treeNode.Presentation.DataProviderManager.DataFileDirectoryFullPath, // treeNode.Presentation.MediaDataManager.DefaultPCMFormat, m_UrakawaSession.IsAcmCodecsDisabled); // string newfullWavPath = audioFormatConvertorSession.ConvertAudioFileFormat(fullPath); // FileDataProvider dataProv = (FileDataProvider)treeNode.Presentation.DataProviderFactory.Create(DataProviderFactory.AUDIO_WAV_MIME_TYPE); // dataProv.InitByMovingExistingFile(newfullWavPath); // audioMediaData.AppendPcmData(dataProv); // } // else // use original wav file by copying it to data directory // { // FileDataProvider dataProv = (FileDataProvider)treeNode.Presentation.DataProviderFactory.Create(DataProviderFactory.AUDIO_WAV_MIME_TYPE); // dataProv.InitByCopyingExistingFile(fullPath); // audioMediaData.AppendPcmData(dataProv); // } // } // finally // { // if (wavStream != null) wavStream.Close(); // } AlternateContentSetManagedMediaCommand cmd_AltContent_diagramElement_Audio = treeNode.Presentation.CommandFactory.CreateAlternateContentSetManagedMediaCommand(treeNode, altContent, manAudioMedia); treeNode.Presentation.UndoRedoManager.Execute(cmd_AltContent_diagramElement_Audio); //SetDescriptionAudio(altContent, audio, treeNode); } } else { Metadata diagramElementAttribute_Metadata = treeNode.Presentation.MetadataFactory.CreateMetadata(); diagramElementAttribute_Metadata.NameContentAttribute = new MetadataAttribute(); diagramElementAttribute_Metadata.NameContentAttribute.Name = attribute.Name; diagramElementAttribute_Metadata.NameContentAttribute.NamespaceUri = attribute.NamespaceURI; diagramElementAttribute_Metadata.NameContentAttribute.Value = attribute.Value; AlternateContentMetadataAddCommand cmd_AltContent_diagramElementAttribute_Metadata = treeNode.Presentation.CommandFactory.CreateAlternateContentMetadataAddCommand( treeNode, null, altContent, diagramElementAttribute_Metadata, null ); treeNode.Presentation.UndoRedoManager.Execute( cmd_AltContent_diagramElementAttribute_Metadata); } } } int nObjects = -1; XmlNode textNode = diagramElementNode; if (diagramElementName == DiagramContentModelHelper.D_SimplifiedImage || diagramElementName == DiagramContentModelHelper.D_Tactile) { string localTourName = DiagramContentModelHelper.StripNSPrefix(DiagramContentModelHelper.D_Tour); XmlNode tour = XmlDocumentHelper.GetFirstChildElementOrSelfWithName(diagramElementNode, false, localTourName, DiagramContentModelHelper.NS_URL_DIAGRAM); textNode = tour; IEnumerable <XmlNode> objects = XmlDocumentHelper.GetChildrenElementsOrSelfWithName(diagramElementNode, false, DiagramContentModelHelper. Object, DiagramContentModelHelper. NS_URL_ZAI, false); nObjects = 0; foreach (XmlNode obj in objects) { nObjects++; } int i = -1; foreach (XmlNode obj in objects) { i++; if (i != objectIndex) { continue; } if (obj.Attributes == null || obj.Attributes.Count <= 0) { break; } for (int j = 0; j < obj.Attributes.Count; j++) { XmlAttribute attribute = obj.Attributes[j]; if (attribute.Name.StartsWith(XmlReaderWriterHelper.NS_PREFIX_XMLNS + ":")) { // } else if (attribute.Name == XmlReaderWriterHelper.NS_PREFIX_XMLNS) { // } else if (attribute.Name == DiagramContentModelHelper.Src) { // } else if (attribute.Name == DiagramContentModelHelper.SrcType) { // } else { Metadata diagramElementAttribute_Metadata = treeNode.Presentation.MetadataFactory.CreateMetadata(); diagramElementAttribute_Metadata.NameContentAttribute = new MetadataAttribute(); diagramElementAttribute_Metadata.NameContentAttribute.Name = attribute.Name; diagramElementAttribute_Metadata.NameContentAttribute.NamespaceUri = attribute.NamespaceURI; diagramElementAttribute_Metadata.NameContentAttribute.Value = attribute.Value; AlternateContentMetadataAddCommand cmd_AltContent_diagramElementAttribute_Metadata = treeNode.Presentation.CommandFactory.CreateAlternateContentMetadataAddCommand( treeNode, null, altContent, diagramElementAttribute_Metadata, null ); treeNode.Presentation.UndoRedoManager.Execute( cmd_AltContent_diagramElementAttribute_Metadata); } } XmlAttribute srcAttr = (XmlAttribute)obj.Attributes.GetNamedItem(DiagramContentModelHelper.Src); if (srcAttr != null) { XmlAttribute srcType = (XmlAttribute)obj.Attributes.GetNamedItem(DiagramContentModelHelper.SrcType); ManagedImageMedia img = treeNode.Presentation.MediaFactory.CreateManagedImageMedia(); string imgFullPath = null; if (FileDataProvider.isHTTPFile(srcAttr.Value)) { imgFullPath = FileDataProvider.EnsureLocalFilePathDownloadTempDirectory(srcAttr.Value); } else { imgFullPath = Path.Combine(Path.GetDirectoryName(xmlFilePath), srcAttr.Value); } if (imgFullPath != null && File.Exists(imgFullPath)) { string extension = Path.GetExtension(imgFullPath); ImageMediaData imgData = treeNode.Presentation.MediaDataFactory.CreateImageMediaData(extension); if (imgData != null) { imgData.InitializeImage(imgFullPath, Path.GetFileName(imgFullPath)); img.ImageMediaData = imgData; AlternateContentSetManagedMediaCommand cmd_AltContent_Image = treeNode.Presentation.CommandFactory.CreateAlternateContentSetManagedMediaCommand( treeNode, altContent, img); treeNode.Presentation.UndoRedoManager.Execute(cmd_AltContent_Image); } } } } } if (textNode != null) { string strText = textNode.InnerXml; if (!string.IsNullOrEmpty(strText)) { strText = strText.Trim(); strText = Regex.Replace(strText, @"\s+", " "); strText = strText.Replace("\r\n", "\n"); } if (!string.IsNullOrEmpty(strText)) { TextMedia txtMedia = treeNode.Presentation.MediaFactory.CreateTextMedia(); txtMedia.Text = strText; AlternateContentSetManagedMediaCommand cmd_AltContent_Text = treeNode.Presentation.CommandFactory.CreateAlternateContentSetManagedMediaCommand(treeNode, altContent, txtMedia); treeNode.Presentation.UndoRedoManager.Execute(cmd_AltContent_Text); } } if (nObjects > 0 && ++objectIndex <= nObjects - 1) { diagramXmlParseBody_(diagramElementNode, xmlFilePath, treeNode, objectIndex); } }
public ManagedAudioMedia ExtractManagedAudioMedia() { #if ENABLE_SEQ_MEDIA Media audioMedia = m_TreeNode.GetManagedAudioMediaOrSequenceMedia(); #else ManagedAudioMedia audioMedia = m_TreeNode.GetManagedAudioMedia(); #endif if (audioMedia == null) { Debug.Fail("This should never happen !"); throw new Exception("TreeNode doesn't have managed audio media ?!"); } #if ENABLE_SEQ_MEDIA else if (audioMedia is SequenceMedia) { Debug.Fail("SequenceMedia is normally removed at import time...have you tried re-importing the DAISY book ?"); throw new NotImplementedException("TODO: implement support for SequenceMedia of ManagedAudioMedia in audio delete functionality !"); //var seqManAudioMedia = (SequenceMedia)audioMedia; //double timeOffset = 0; //long sumData = 0; //long sumDataPrev = 0; //foreach (Media media in seqManAudioMedia.ChildMedias.ContentsAs_YieldEnumerable) //{ // var manangedMediaSeqItem = (ManagedAudioMedia)media; // if (!manangedMediaSeqItem.HasActualAudioMediaData) // { // continue; // } // AudioMediaData audioData = manangedMediaSeqItem.AudioMediaData; // sumData += audioData.PCMFormat.Data.ConvertTimeToBytes(audioData.AudioDuration.AsMilliseconds); // if (SelectionData.m_LocalStreamLeftMark < sumData) // { // timeOffset = audioData.PCMFormat.Data.ConvertBytesToTime(SelectionData.m_LocalStreamLeftMark - sumDataPrev); // break; // } // sumDataPrev = sumData; //} } else if (audioMedia is ManagedAudioMedia) { AudioMediaData mediaData = ((ManagedAudioMedia)audioMedia).AudioMediaData; #else else { AudioMediaData mediaData = audioMedia.AudioMediaData; #endif //ENABLE_SEQ_MEDIA if (mediaData == null) { Debug.Fail("This should never happen !"); throw new Exception("ManagedAudioMedia has empty MediaData ?!"); } Time timeBegin = m_LocalStreamLeftMark == -1 ? Time.Zero : new Time(mediaData.PCMFormat.Data.ConvertBytesToTime(m_LocalStreamLeftMark)); Time timeEnd = m_LocalStreamRightMark == -1 ? Time.Zero : new Time(mediaData.PCMFormat.Data.ConvertBytesToTime(m_LocalStreamRightMark)); if (TimeBeginEndEqualClipDuration(timeBegin, timeEnd, mediaData)) { return(((ManagedAudioMedia)audioMedia).Copy()); } else { ManagedAudioMedia managedAudioMediaBackup = m_TreeNode.Presentation.MediaFactory.CreateManagedAudioMedia(); //var mediaDataBackup = (WavAudioMediaData)m_TreeNode.Presentation.MediaDataFactory.CreateAudioMediaData(); WavAudioMediaData mediaDataBackup = ((WavAudioMediaData)mediaData).Copy(timeBegin, timeEnd); managedAudioMediaBackup.AudioMediaData = mediaDataBackup; //Stream streamToBackup = timeEnd.IsEqualTo(Time.Zero) // ? mediaData.OpenPcmInputStream(timeBegin) // : mediaData.OpenPcmInputStream(timeBegin, timeEnd); //try //{ // //Time timeDelta = mediaData.AudioDuration.Substract(new Time(timeBegin.TimeAsMillisecondFloat)); // mediaDataBackup.AppendPcmData(streamToBackup, null); //} //finally //{ // streamToBackup.Close(); //} return(managedAudioMediaBackup); } } return(null); }
/// <summary> /// Constructor setting the source <see cref="AudioMediaData"/> of the event, /// the insertion point and the duration of the audio data that was inserted /// </summary> /// <param name="source">The source <see cref="AudioMediaData"/> of the event</param> /// <param name="insPoint">The insertion point at which the audio data was inserted</param> /// <param name="dur">The duration of the data that was inserted</param> public AudioDataInsertedEventArgs(AudioMediaData source, Time insPoint, Time dur) : base(source) { InsertPoint = insPoint.Copy(); Duration = dur.Copy(); }
/// <summary> /// Constructor setting the source <see cref="AudioMediaData"/> of the event /// and the previous+new PCMFormat /// </summary> /// <param name="source">The source <see cref="AudioMediaData"/> of the event</param> /// <param name="newFormat">The new PCMFormat</param> /// <param name="prevFormat">The PCMFormat prior to the change</param> public PCMFormatChangedEventArgs(AudioMediaData source, PCMFormatInfo newFormat, PCMFormatInfo prevFormat) : base(source) { NewPCMFormat = newFormat; PreviousPCMFormat = prevFormat; }
private void parseSmil(string fullSmilPath) { if (RequestCancellation) { return; } XmlDocument smilXmlDoc = XmlReaderWriterHelper.ParseXmlDocument(fullSmilPath, false, false); if (RequestCancellation) { return; } //we skip SMIL metadata parsing (we get publication metadata only from OPF and DTBOOK/XHTMLs) //parseMetadata(smilXmlDoc); //XmlNodeList allTextNodes = smilXmlDoc.GetElementsByTagName("text"); //if (allTextNodes.Count == 0) //{ // return; //} //reportProgress(-1, "Parsing SMIL: [" + Path.GetFileName(fullSmilPath) + "]"); foreach (XmlNode textNode in XmlDocumentHelper.GetChildrenElementsOrSelfWithName(smilXmlDoc, true, "text", null, false)) { XmlAttributeCollection textNodeAttrs = textNode.Attributes; if (textNodeAttrs == null || textNodeAttrs.Count == 0) { continue; } XmlNode textNodeAttrSrc = textNodeAttrs.GetNamedItem("src"); if (textNodeAttrSrc == null || String.IsNullOrEmpty(textNodeAttrSrc.Value)) { continue; } string src = FileDataProvider.UriDecode(textNodeAttrSrc.Value); int index = src.LastIndexOf('#'); if (index == src.Length - 1) { return; } string srcFragmentId = src.Substring(index + 1); TreeNode textTreeNode = m_Project.Presentations.Get(0).RootNode.GetFirstDescendantOrSelfWithXmlID(srcFragmentId); if (textTreeNode == null) { continue; } ManagedAudioMedia textTreeNodeAudio = textTreeNode.GetManagedAudioMedia(); if (textTreeNodeAudio != null) { //Ignore. continue; } XmlNode parent = textNode.ParentNode; if (parent != null && parent.LocalName == "a") { parent = parent.ParentNode; } if (parent == null) { continue; } if (parent.LocalName != "par") { //System.Diagnostics.Debug.Fail("Text node in SMIL has no parallel time container as parent ! {0}", parent.Name); continue; } XmlNodeList textPeers = parent.ChildNodes; foreach (XmlNode textPeerNode in textPeers) { if (RequestCancellation) { return; } if (textPeerNode.NodeType != XmlNodeType.Element) { continue; } if (textPeerNode.LocalName == "audio") { addAudio(textTreeNode, textPeerNode, false, fullSmilPath); break; } else if (textPeerNode.LocalName == "a") { XmlNodeList aChildren = textPeerNode.ChildNodes; foreach (XmlNode aChild in aChildren) { if (aChild.LocalName == "audio") { addAudio(textTreeNode, aChild, false, fullSmilPath); break; } } } else if (textPeerNode.LocalName == "seq") { #if DEBUG Debugger.Break(); #endif //DEBUG XmlNodeList seqChildren = textPeerNode.ChildNodes; foreach (XmlNode seqChild in seqChildren) { if (seqChild.LocalName == "audio") { addAudio(textTreeNode, seqChild, true, fullSmilPath); } } #if ENABLE_SEQ_MEDIA SequenceMedia seqManAudioMedia = textTreeNode.GetManagedAudioSequenceMedia(); if (seqManAudioMedia == null) { Debug.Fail("This should never happen !"); break; } ManagedAudioMedia managedAudioMedia = textTreeNode.Presentation.MediaFactory.CreateManagedAudioMedia(); AudioMediaData mediaData = textTreeNode.Presentation.MediaDataFactory.CreateAudioMediaData(); managedAudioMedia.AudioMediaData = mediaData; foreach (Media seqChild in seqManAudioMedia.ChildMedias.ContentsAs_Enumerable) { ManagedAudioMedia seqManMedia = (ManagedAudioMedia)seqChild; // WARNING: WavAudioMediaData implementation differs from AudioMediaData: // the latter is naive and performs a stream binary copy, the latter is optimized and re-uses existing WavClips. // WARNING 2: The audio data from the given parameter gets emptied ! mediaData.MergeWith(seqManMedia.AudioMediaData); //Stream stream = seqManMedia.AudioMediaData.OpenPcmInputStream(); //try //{ // mediaData.AppendPcmData(stream, null); //} //finally //{ // stream.Close(); //} //seqManMedia.AudioMediaData.Delete(); // doesn't actually removes the FileDataProviders (need to call Presentation.Cleanup()) ////textTreeNode.Presentation.DataProviderManager.RemoveDataProvider(); } ChannelsProperty chProp = textTreeNode.GetChannelsProperty(); chProp.SetMedia(m_audioChannel, null); chProp.SetMedia(m_audioChannel, managedAudioMedia); #endif //ENABLE_SEQ_MEDIA break; } } } }
public override bool PreVisit(TreeNode node) { if (m_RootNode == null) { m_RootNode = node; } if (TreeNodeMustBeSkipped(node)) { return(false); } if (TreeNodeTriggersNewAudioFile(node)) { createNextAudioFile(node); } if (node.HasProperties(typeof(ChannelsProperty))) { ChannelsProperty chProp = node.GetChannelsProperty(); ManagedAudioMedia mam = chProp.GetMedia(SourceChannel) as ManagedAudioMedia; if (mam != null) { AudioMediaData amd = mam.AudioMediaData; if (mCurrentAudioFileStream == null) { createNextAudioFile(node); } else if (mCurrentAudioFilePCMFormat != null && !mCurrentAudioFilePCMFormat.Data.IsCompatibleWith(amd.PCMFormat.Data)) { #if DEBUG Debugger.Break(); #endif createNextAudioFile(node); } if (mCurrentAudioFileStream != null && mCurrentAudioFilePCMFormat == null) { writeInitialHeader(amd.PCMFormat); } Time durationFromRiffHeader = amd.AudioDuration; Time clipBegin = new Time(mCurrentAudioFilePCMFormat.Data.ConvertBytesToTime(mCurrentAudioFileStream.Position - mCurrentAudioFileStreamRiffWaveHeaderLength)); Time clipEnd = new Time(clipBegin.AsTimeSpanTicks + durationFromRiffHeader.AsTimeSpanTicks, true); //BinaryReader rd = new BinaryReader(stream); Stream stream = amd.OpenPcmInputStream(); try { const uint BUFFER_SIZE = 1024 * 1024 * 3; // 3 MB MAX BUFFER StreamUtils.Copy(stream, 0, mCurrentAudioFileStream, BUFFER_SIZE); } finally { stream.Close(); } ExternalAudioMedia eam = node.Presentation.MediaFactory.Create <ExternalAudioMedia>(); if (eam == null) { throw new exception.FactoryCannotCreateTypeException(String.Format( "The media facotry cannot create a ExternalAudioMedia matching QName {1}:{0}", XukAble.GetXukName(typeof(ExternalAudioMedia), true) ?? typeof(ExternalAudioMedia).Name, node.Presentation.Project.GetXukNamespace())); } eam.Language = mam.Language; eam.Src = node.Presentation.RootUri.MakeRelativeUri(GetCurrentAudioFileUri()).ToString(); eam.ClipBegin = clipBegin; eam.ClipEnd = clipEnd; if (chProp.GetMedia(DestinationChannel) != null) { #if DEBUG Debugger.Break(); #endif chProp.SetMedia(DestinationChannel, null); } chProp.SetMedia(DestinationChannel, eam); } } return(true); }
/// <summary> /// Constructor setting the source <see cref="AudioMediaData"/> of the event /// </summary> /// <param name="source">The source <see cref="AudioMediaData"/> of the event</param> public AudioMediaDataEventArgs(AudioMediaData source) : base(source) { SourceAudioMediaData = source; }
public override string ToString() { StringBuilder sb = new StringBuilder(); sb.Append('['); sb.AppendFormat("At: {0}", Date); if (IsEdited) { sb.AppendFormat(" Edited at: {0}", Edited); } if (IsForwarded) { sb.AppendFormat(" Forwarded: {0}", ForwardedFrom); } sb.AppendFormat(" By: {0}", From); sb.Append("]: "); switch (MediaType) { case MediaType.Photo: { PhotoMediaData data = MediaInfo as PhotoMediaData; sb.AppendFormat("Photo[{0}x{1}]", data.Width, data.Height); break; } case MediaType.Sticker: { StickerMediaData data = MediaInfo as StickerMediaData; sb.AppendFormat("Sticker[{0}. {1}x{2}]", data.Emoji, data.Width, data.Height); break; } case MediaType.Audio_file: { AudioMediaData data = MediaInfo as AudioMediaData; sb.AppendFormat("Audio[{0} - {1}. {2}s]", data.Performer, data.Title, data.Duration); break; } case MediaType.Video_file: case MediaType.Animation: case MediaType.Video_message: { VideoMediaData data = MediaInfo as VideoMediaData; sb.AppendFormat("Audio[{0}x{1}. {2}s]", data.Width, data.Height, data.Duration); break; } case MediaType.Voice_message: { VoiceMediaData data = MediaInfo as VoiceMediaData; sb.AppendFormat("Voice[{0}s]", data.Duration); break; } case MediaType.File: { FileMediaData data = MediaInfo as FileMediaData; sb.AppendFormat("File[{0}]", data.MimeType); break; } case MediaType.Null: default: break; } if (HasText) { sb.AppendFormat("\"{0}\"", Text); } if (HasLinks) { sb.AppendFormat(". Links: {0}", Links.Count); } return(sb.ToString()); }