/// <summary> /// Replaces with audio from a RIFF Wave file of a given duration at a given replace point /// </summary> /// <param name="riffWaveStream">The RIFF Wave file</param> /// <param name="replacePoint">The given replace point</param> /// <param name="duration">The duration of the audio to replace</param> public void ReplacePcmData_RiffHeader(Stream riffWaveStream, Time replacePoint, Time duration) { if (OriginalRelativePath != null && DataProvider != null) { throw new NotImplementedException(); } uint dataLength; AudioLibPCMFormat format = AudioLibPCMFormat.RiffHeaderParse(riffWaveStream, out dataLength); if (!format.IsCompatibleWith(PCMFormat.Data)) { throw new exception.InvalidDataFormatException( String.Format("RIFF WAV file has incompatible PCM format")); } Time fileDuration = new Time(format.ConvertBytesToTime(dataLength)); if (fileDuration.IsLessThan(duration)) { throw new exception.MethodParameterIsOutOfBoundsException(String.Format( "Can not insert {0} of audio from RIFF Wave file since the file's duration is only {1}", duration, fileDuration)); } ReplacePcmData(riffWaveStream, replacePoint, duration); }
private void AppendAudioData(string filename, ManagedAudioMedia amd) { uint dataLength; AudioLibPCMFormat info = GetInfo("audiotest1-mono-22050Hz-16bits.wav", out dataLength); mManagedAudioMedia1.AudioMediaData.PCMFormat = new PCMFormatInfo(info); Stream fs = GetRawStream("audiotest1-mono-22050Hz-16bits.wav"); try { amd.AudioMediaData.AppendPcmData(fs, new TimeDelta(info.ConvertBytesToTime(dataLength))); } finally { fs.Close(); } }
/// <summary> /// Appends audio data from a RIFF Wave file /// </summary> /// <param name="riffWaveStream">The RIFF Wave file</param> public void AppendPcmData_RiffHeader(Stream riffWaveStream) { if (OriginalRelativePath != null && DataProvider != null) { throw new NotImplementedException(); } uint dataLength; AudioLibPCMFormat format = AudioLibPCMFormat.RiffHeaderParse(riffWaveStream, out dataLength); if (dataLength <= 0) { dataLength = (uint)(riffWaveStream.Length - riffWaveStream.Position); } if (!format.IsCompatibleWith(PCMFormat.Data)) { throw new exception.InvalidDataFormatException( String.Format("RIFF WAV file has incompatible PCM format")); } AppendPcmData(riffWaveStream, new Time(format.ConvertBytesToTime(dataLength))); }
/// <summary> /// Gets an input <see cref="Stream"/> providing read access to the raw PCM audio data /// between given sub-clip begin and end times /// </summary> /// <param name="subClipBegin">The beginning of the sub-clip</param> /// <param name="subClipEnd">The end of the sub-clip</param> /// <returns>The raw PCM audio data <see cref="Stream"/></returns> /// <remarks> /// <para>Sub-clip times must be in the interval <c>[0;this.getAudioDuration()]</c>.</para> /// <para> /// The sub-clip is /// relative to clip begin of the WavClip, that if <c>this.getClipBegin()</c> /// returns <c>00:00:10</c>, <c>this.getClipEnd()</c> returns <c>00:00:50</c>, /// <c>x</c> and <c>y</c> is <c>00:00:05</c> and <c>00:00:30</c> respectively, /// then <c>this.GetAudioData(x, y)</c> will get the audio in the underlying wave audio between /// <c>00:00:15</c> and <c>00:00:40</c> /// </para> /// </remarks> public Stream OpenPcmInputStream(Time subClipBegin, Time subClipEnd) { if (subClipBegin == null) { throw new exception.MethodParameterIsNullException("subClipBegin must not be null"); } if (subClipEnd == null) { throw new exception.MethodParameterIsNullException("subClipEnd must not be null"); } if ( subClipBegin.IsLessThan(Time.Zero) || subClipEnd.IsLessThan(subClipBegin) || subClipEnd.IsGreaterThan(Duration) ) { string msg = String.Format( "subClipBegin/subClipEnd [{0};{1}] not within ([0;{2}])", subClipBegin, subClipEnd, Duration); throw new exception.MethodParameterIsOutOfBoundsException(msg); } Stream raw = DataProvider.OpenInputStream(); uint dataLength; AudioLibPCMFormat format = AudioLibPCMFormat.RiffHeaderParse(raw, out dataLength); Time rawEndTime = new Time(format.ConvertBytesToTime(dataLength)); #if DEBUG DebugFix.Assert(rawEndTime.IsEqualTo(MediaDuration)); #endif //Time rawEndTime = Time.Zero.Add(MediaDuration); // We don't call this to avoid unnecessary I/O (Strem.Open() twice) if ( ClipBegin.IsLessThan(Time.Zero) || ClipBegin.IsGreaterThan(ClipEnd) || ClipEnd.IsGreaterThan(rawEndTime) ) { string msg = String.Format( "WavClip [{0};{1}] is empty or not within the underlying wave data stream ([0;{2}])", ClipBegin, ClipEnd, rawEndTime); throw new exception.InvalidDataFormatException(msg); } /* * Time clipDuration = Duration; * if (subClipBegin.IsEqualTo(Time.Zero) && subClipEnd.IsEqualTo(Time.Zero.Add(clipDuration))) * { * // Stream.Position is at the end of the RIFF header, we need to bring it back to the begining * return new SubStream( * raw, * raw.Position, raw.Length - raw.Position); * } */ //Time rawClipBegin = new Time(ClipBegin.AsTimeSpan + subClipBegin.AsTimeSpan); //Time rawClipEnd = new Time(ClipBegin.AsTimeSpan + subClipEnd.AsTimeSpan); long ClipBegin_AsLocalUnits = ClipBegin.AsLocalUnits; long posRiffHeader = raw.Position; //44 long beginPos = posRiffHeader + format.ConvertTimeToBytes(ClipBegin_AsLocalUnits + subClipBegin.AsLocalUnits); long endPos = posRiffHeader + format.ConvertTimeToBytes(ClipBegin_AsLocalUnits + subClipEnd.AsLocalUnits); long rawLen = raw.Length; #if DEBUG long rawLenCheck = posRiffHeader + dataLength; DebugFix.Assert(rawLen == rawLenCheck); #endif if (endPos > rawLen) { //#if DEBUG // Debugger.Break(); //#endif endPos = rawLen; } long len = endPos - beginPos; return(new SubStream( raw, beginPos, len, DataProvider is FileDataProvider ? ((FileDataProvider)DataProvider).DataFileFullPath : null)); }