/// <summary> /// WaveStream to resample using the DMO Resampler /// </summary> /// <param name="inputProvider">Input Stream</param> /// <param name="outputFormat">Desired Output Format</param> public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat) { this.inputProvider = inputProvider; inputStream = inputProvider as WaveStream; this.outputFormat = outputFormat; dmoResampler = new DmoResampler(); if (!dmoResampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat)) { throw new ArgumentException("Unsupported Input Stream format", nameof(inputProvider)); } dmoResampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat); if (!dmoResampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat)) { throw new ArgumentException("Unsupported Output Stream format", nameof(outputFormat)); } dmoResampler.MediaObject.SetOutputWaveFormat(0, outputFormat); if (inputStream != null) { position = InputToOutputPosition(inputStream.Position); } inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond); outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond); }
private bool IsResamplerConversionSupported(WaveFormat from, WaveFormat to) { DmoResampler dmoResampler = new DmoResampler(); // need to set an input format before we can ask for an output format to dmoResampler.MediaObject.SetInputWaveFormat(0, from); return(dmoResampler.MediaObject.SupportsOutputWaveFormat(0, to)); }
private WaveFormat SetupWaveFormat(WaveFormat waveFormat, AudioClient audioClient) { WaveFormatExtensible closestMatch; WaveFormat finalFormat = waveFormat; if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch)) { if (closestMatch == null) { WaveFormat mixformat = audioClient.GetMixFormat(); if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat)) { WaveFormatExtensible[] possibleFormats = new WaveFormatExtensible[] { new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm) }; if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat)) { //no format found... possibleFormats = new WaveFormatExtensible[] { new WaveFormatExtensible(waveFormat.SampleRate, 32, 2, DMO.MediaTypes.MEDIATYPE_IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, 2, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, 2, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, 2, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 32, 1, DMO.MediaTypes.MEDIATYPE_IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, 1, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, 1, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, 1, DMO.MediaTypes.MEDIATYPE_Pcm) }; if (CheckForSupportedFormat(audioClient, possibleFormats, out mixformat)) { throw new NotSupportedException("Could not find a supported format."); } } } finalFormat = mixformat; //todo: implement channel matrix DmoResampler resampler = new DmoResampler(_source, finalFormat); resampler.Quality = 60; _source = resampler; _createdResampler = true; } else { finalFormat = closestMatch; } } return(finalFormat); }
//IIqProcessor /// <summary>Initializes the resampler for processing data from the specified source.</summary> /// <param name="source">The data source.</param> public void Initialize(ISampleStream source) { Format = new SignalFormat(source.Format); Format.SamplingRate = outputSamplingRate; var wrapper = new SampleSourceWrapper(source).ToWaveSource(); dmo = new DmoResampler(wrapper, outputSamplingRate); dmo.Quality = Quality; resampler = dmo.ToSampleSource(); }
public void ResamplerCanCallProcessInput() { DmoResampler dmoResampler = new DmoResampler(); dmoResampler.MediaObject.SetInputWaveFormat(0, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2)); dmoResampler.MediaObject.SetOutputWaveFormat(0, WaveFormat.CreateIeeeFloatWaveFormat(48000, 2)); using (MediaBuffer buffer = new MediaBuffer(44100 * 2 * 4)) { buffer.Length = 8000; dmoResampler.MediaObject.ProcessInput(0, buffer, DmoInputDataBufferFlags.None, 0, 0); } }
public void DmoResamplerTest() { var source = new SineGenerator().ToWaveSource(16); using (DmoResampler resampler = new DmoResampler(source, 11500)) { byte[] buffer = new byte[source.WaveFormat.BytesPerSecond / 2]; if (resampler.Read(buffer, 0, buffer.Length) != buffer.Length) { throw new Exception("Could not fill the whole buffer"); } } }
public void CanExamineOutputTypesOnResampler() { DmoResampler dmoResampler = new DmoResampler(); Assert.AreEqual(dmoResampler.MediaObject.OutputStreamCount, 1); foreach (DmoMediaType mediaType in dmoResampler.MediaObject.GetOutputTypes(0)) { Debug.WriteLine(String.Format("{0}:{1}:{2}", mediaType.MajorTypeName, mediaType.SubTypeName, mediaType.FormatTypeName)); } }
private bool DetectWindowsN() { try { var dmoResampler = new DmoResampler(); dmoResampler.Dispose(); return(false); } catch (Exception) { return(true); } }
private bool DetectWindowsN() { try { var dmoResampler = new DmoResampler(); dmoResampler.Dispose(); return(false); } catch (Exception) { Logger.Warn("Windows N Detected - using inbuilt resampler"); return(true); } }
protected override void Dispose(bool disposing) { if (this.inputMediaBuffer != null) { this.inputMediaBuffer.Dispose(); this.inputMediaBuffer = null; } this.outputBuffer.Dispose(); if (this.dmoResampler != null) { this.dmoResampler = null; } base.Dispose(disposing); }
public void ResamplerCanGetInputAndOutputBufferSizes() { DmoResampler dmoResampler = new DmoResampler(); dmoResampler.MediaObject.SetInputWaveFormat(0, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2)); dmoResampler.MediaObject.SetOutputWaveFormat(0, WaveFormat.CreateIeeeFloatWaveFormat(48000, 2)); MediaObjectSizeInfo inputSizeInfo = dmoResampler.MediaObject.GetInputSizeInfo(0); Assert.IsNotNull(inputSizeInfo, "Input Size Info"); Debug.WriteLine(inputSizeInfo.ToString()); MediaObjectSizeInfo outputSizeInfo = dmoResampler.MediaObject.GetOutputSizeInfo(0); Assert.IsNotNull(outputSizeInfo, "Output Size Info"); Debug.WriteLine(outputSizeInfo.ToString()); }
/// <summary> /// Dispose /// </summary> /// <param name="disposing">True if disposing (not from finalizer)</param> protected override void Dispose(bool disposing) { if (inputMediaBuffer != null) { inputMediaBuffer.Dispose(); inputMediaBuffer = null; } outputBuffer.Dispose(); if (dmoResampler != null) { //resampler.Dispose(); s dmoResampler = null; } base.Dispose(disposing); }
public void ResamplerCanCallProcessOutput() { DmoResampler dmoResampler = new DmoResampler(); WaveFormat inputFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2); WaveFormat outputFormat = WaveFormat.CreateIeeeFloatWaveFormat(48000, 2); dmoResampler.MediaObject.SetInputWaveFormat(0, inputFormat); dmoResampler.MediaObject.SetOutputWaveFormat(0, outputFormat); dmoResampler.MediaObject.AllocateStreamingResources(); using (MediaBuffer inputBuffer = new MediaBuffer(inputFormat.AverageBytesPerSecond)) { inputBuffer.Length = inputFormat.AverageBytesPerSecond / 10; Debug.WriteLine(String.Format("Input Length {0}", inputBuffer.Length)); dmoResampler.MediaObject.ProcessInput(0, inputBuffer, DmoInputDataBufferFlags.None, 0, 0); Debug.WriteLine(String.Format("Input Length {0}", inputBuffer.Length)); Debug.WriteLine(String.Format("Input Lookahead {0}", dmoResampler.MediaObject.GetInputSizeInfo(0).MaxLookahead)); //Debug.WriteLine(String.Format("Input Max Latency {0}", resampler.MediaObject.GetInputMaxLatency(0))); using (DmoOutputDataBuffer outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond)) { // one buffer for each output stream dmoResampler.MediaObject.ProcessOutput(DmoProcessOutputFlags.None, 1, new DmoOutputDataBuffer[] { outputBuffer }); Debug.WriteLine(String.Format("Converted length: {0}", outputBuffer.Length)); Debug.WriteLine(String.Format("Converted flags: {0}", outputBuffer.StatusFlags)); //Assert.AreEqual((int)(inputBuffer.Length * 48000.0 / inputFormat.SampleRate), outputBuffer.Length, "Converted buffer length"); } using (DmoOutputDataBuffer outputBuffer = new DmoOutputDataBuffer(48000 * 2 * 4)) { // one buffer for each output stream dmoResampler.MediaObject.ProcessOutput(DmoProcessOutputFlags.None, 1, new DmoOutputDataBuffer[] { outputBuffer }); Debug.WriteLine(String.Format("Converted length: {0}", outputBuffer.Length)); Debug.WriteLine(String.Format("Converted flags: {0}", outputBuffer.StatusFlags)); //Assert.AreEqual((int)(inputBuffer.Length * 48000.0 / inputFormat.SampleRate), outputBuffer.Length, "Converted buffer length"); } } dmoResampler.MediaObject.FreeStreamingResources(); }
public void CanCreateResamplerMediaObject() { DmoResampler dmoResampler = new DmoResampler(); }
private WaveFormat SetupWaveFormat(WaveFormat waveFormat, AudioClient audioClient) { WaveFormatExtensible closestMatch; WaveFormat finalFormat = waveFormat; if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch)) { if (closestMatch == null) { WaveFormat mixformat = audioClient.GetMixFormat(); if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat)) { WaveFormatExtensible[] possibleFormats = new WaveFormatExtensible[] { new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, waveFormat.Channels, DMO.MediaTypes.MEDIATYPE_Pcm) }; if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat)) { //no format found... possibleFormats = new WaveFormatExtensible[] { new WaveFormatExtensible(waveFormat.SampleRate, 32, 2, DMO.MediaTypes.MEDIATYPE_IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, 2, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, 2, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, 2, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 32, 1, DMO.MediaTypes.MEDIATYPE_IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, 1, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, 1, DMO.MediaTypes.MEDIATYPE_Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, 1, DMO.MediaTypes.MEDIATYPE_Pcm) }; if (CheckForSupportedFormat(audioClient, possibleFormats, out mixformat)) { throw new NotSupportedException("Could not find a supported format."); } } } finalFormat = mixformat; //todo: implement channel matrix DmoResampler resampler = new DmoResampler(_source, finalFormat); resampler.Quality = 60; _source = resampler; _createdResampler = true; } else { finalFormat = closestMatch; } } return finalFormat; }
private bool IsResamplerInputFormatSupported(WaveFormat waveFormat) { DmoResampler dmoResampler = new DmoResampler(); return(dmoResampler.MediaObject.SupportsInputWaveFormat(0, waveFormat)); }
private void InitializeInternal() { Debug.WriteLine("Initialize, thread id: " + Thread.CurrentThread.ManagedThreadId); _callbackThread = null; var supportedFormats = new Queue <WaveFormat>(Device.SupportedFormats .OrderBy(x => Math.Abs(x.SampleRate - _source.WaveFormat.SampleRate)) .ThenBy(x => Math.Abs(x.BitsPerSample - _source.WaveFormat.BitsPerSample)) .ThenBy(x => Math.Abs(x.Channels - _source.WaveFormat.Channels))); var finalFormat = _source.WaveFormat; do { try { _waveOutHandle = CreateWaveOutHandle(finalFormat); } catch (MmException exception) { if (exception.Result == MmResult.BadFormat && supportedFormats.Count > 0) { finalFormat = supportedFormats.Dequeue(); } else if (exception.Result == MmResult.BadFormat && supportedFormats.Count == 0) { throw new Exception("No valid format could be found.", exception); } else { throw; } } } while (_waveOutHandle == IntPtr.Zero); if (finalFormat != _source.WaveFormat) { //the original format of the source is not supported //we have to convert the source //todo: test channel matrix conversion ChannelMatrix channelMatrix = null; if (UseChannelMixingMatrices) { try { channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat); } catch (Exception) { Debug.WriteLine("No channelmatrix was found."); } } DmoResampler resampler = channelMatrix != null ? new DmoChannelResampler(_source, channelMatrix, finalFormat) : new DmoResampler(_source, finalFormat); resampler.Quality = 60; _source = resampler; } _failedBuffers.Clear(); var bufferSize = (int)WaveSource.WaveFormat.MillisecondsToBytes(_latency); _buffers = new WaveOutBuffer[BufferCount]; for (int i = 0; i < _buffers.Length; i++) { _buffers[i] = new WaveOutBuffer(_waveOutHandle, bufferSize, (IntPtr)i); } }
private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient) { WaveFormat waveFormat = source.WaveFormat; WaveFormat closestMatch; WaveFormat finalFormat = waveFormat; if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch)) { if (closestMatch == null) { WaveFormat mixformat = audioClient.GetMixFormat(); if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat)) { WaveFormatExtensible[] possibleFormats = { new WaveFormatExtensible(waveFormat.SampleRate, 32, waveFormat.Channels, AudioSubTypes.IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, waveFormat.Channels, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, waveFormat.Channels, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, waveFormat.Channels, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 32, 2, AudioSubTypes.IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, 2, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, 2, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, 2, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 32, 1, AudioSubTypes.IeeeFloat), new WaveFormatExtensible(waveFormat.SampleRate, 24, 1, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 16, 1, AudioSubTypes.Pcm), new WaveFormatExtensible(waveFormat.SampleRate, 8, 1, AudioSubTypes.Pcm) }; if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat)) { throw new NotSupportedException("Could not find a supported format."); } } finalFormat = mixformat; } else { finalFormat = closestMatch; } //todo: test channel matrix conversion ChannelMatrix channelMatrix = null; if (UseChannelMixingMatrices) { try { channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat); } catch (Exception) { Debug.WriteLine("No channelmatrix was found."); } } DmoResampler resampler = channelMatrix != null ? new DmoChannelResampler(_source, channelMatrix, finalFormat) : new DmoResampler(_source, finalFormat); resampler.Quality = 60; _source = resampler; _createdResampler = true; return(finalFormat); } return(finalFormat); }
private void output_to_file() { streamReader = new WaveFileReader(textOutputFile.Text); Console.WriteLine(streamReader.WaveFormat); resampler = new DmoResampler(streamReader, sampleRate); //resampler = new DmoResampler(streamReader, new WaveFormat(sampleRate, 16, capture.WaveFormat.Channels)); //resampler.WriteToFile("C:\\Users\\Witty-me\\Desktop\\tmp.wav"); //streamReader.Dispose(); //resampler.WriteToFile(textOutputFile.Text); //resampler.Dispose(); //streamReader = new WaveFileReader("C:\\Users\\Witty-me\\Desktop\\tmp"); //streamReader = new WaveFileReader(textOutputFile.Text); //Pcm16BitToSample con = new Pcm16BitToSample(streamReader); //Console.WriteLine(streamReader.WaveFormat); //Console.WriteLine(resampler.BaseStream.WaveFormat); //Pcm16BitToSample con = new Pcm16BitToSample(resampler.BaseStream); /*byte[] array = new byte[resampler.Length]; * //con.Read(array, 0, (int)con.Length); * resampler.Read(array, 0, (int)resampler.Length); * resampler.Dispose();*/ byte[] array = new byte[bufsize]; //Console.WriteLine(resampler.BaseStream.Length); //Console.WriteLine(streamReader.Length); //Console.WriteLine(resampler.Length); //streamReader.Read(array, 0, (int)streamReader.Length); //streamReader.Dispose(); sw = new StreamWriter(textOutputFile.Text.Insert (textOutputFile.Text.Length - 4, "_resample")); while (true) { int rdcnt = resampler.Read(array, 0, bufsize); if (rdcnt <= 0) { break; } for (int i = 0; i < array.Length; i += 8) { sw.WriteLine(BitConverter.ToSingle(array, i) + "\t" + BitConverter.ToSingle(array, i + 4)); } } resampler.Dispose(); streamReader.Dispose(); sw.Dispose(); streamReader = new WaveFileReader(textOutputFile.Text); array = new byte[streamReader.Length]; streamReader.Read(array, 0, (int)streamReader.Length); streamReader.Dispose(); sw = new StreamWriter(textOutputFile.Text); for (int i = 0; i < array.Length; i += 8) { sw.WriteLine(BitConverter.ToSingle(array, i) + "\t" + BitConverter.ToSingle(array, i + 4)); } sw.Dispose(); }
private WaveFormat SetupWaveFormat(IWaveSource source, AudioClient audioClient) { WaveFormat waveFormat = source.WaveFormat; WaveFormat closestMatch; WaveFormat finalFormat = waveFormat; //check whether initial format is supported if (!audioClient.IsFormatSupported(_shareMode, waveFormat, out closestMatch)) { //initial format is not supported -> maybe there was some kind of close match ... if (closestMatch == null) { //no match ... check whether the format of the windows audio mixer is supported //yes ... this gets executed for shared and exclusive mode streams WaveFormat mixformat = audioClient.GetMixFormat(); if (mixformat == null || !audioClient.IsFormatSupported(_shareMode, mixformat)) { //mixformat is not supported //start generating possible formats mixformat = null; WaveFormatExtensible[] possibleFormats; if (_shareMode == AudioClientShareMode.Exclusive) { //for exclusive mode streams use the DeviceFormat of the initialized MMDevice //as base for further possible formats var deviceFormat = Device.DeviceFormat; //generate some possible formats based on the samplerate of the DeviceFormat possibleFormats = GetPossibleFormats(deviceFormat.SampleRate, deviceFormat.Channels); if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat)) { //none of the tested formats were supported //try some different samplerates List <WaveFormatExtensible> waveFormats = new List <WaveFormatExtensible>(); foreach (var sampleRate in new[] { 44100, 48000, 96000, 192000 }) { waveFormats.AddRange(GetPossibleFormats(sampleRate, deviceFormat.Channels)); } //assign the generated formats with samplerates 44.1kHz, 48kHz, 96kHz and 192kHz to //the possibleFormats array which will be used below possibleFormats = waveFormats.ToArray(); } } else { //for shared mode streams, generate some formats based on the initial waveFormat possibleFormats = GetPossibleFormats(waveFormat.SampleRate, waveFormat.Channels); } if (mixformat == null) { if (!CheckForSupportedFormat(audioClient, possibleFormats, out mixformat)) { throw new NotSupportedException("Could not find a supported format."); } } } finalFormat = mixformat; } else { finalFormat = closestMatch; } //todo: test channel matrix conversion ChannelMatrix channelMatrix = null; if (UseChannelMixingMatrices) { try { channelMatrix = ChannelMatrix.GetMatrix(_source.WaveFormat, finalFormat); } catch (Exception) { Debug.WriteLine("No channelmatrix was found."); } } DmoResampler resampler = channelMatrix != null ? new DmoChannelResampler(_source, channelMatrix, finalFormat) : new DmoResampler(_source, finalFormat); resampler.Quality = 60; _source = resampler; _createdResampler = true; return(finalFormat); } return(finalFormat); }