コード例 #1
0
ファイル: Audio.cs プロジェクト: tingzhushaohua/facefusion
        private void WriteSamplesToBuffer(float[] buffer, int offset, int samplesRead)
        {
            if (isBackwardsLoop)
            {
                for (int i = 0; i < samplesRead - 1; i += 2)
                {
                    int j = samplesRead - 2 - i;

                    float sample = overlapBuffer[j];
                    buffer[i + offset] = sample;
                    writer.WriteSample(sample);

                    sample = overlapBuffer[j + 1];
                    buffer[i + offset + 1] = sample;
                    writer.WriteSample(sample);
                }
            }
            else
            {
                for (int i = 0; i < samplesRead; i++)
                {
                    buffer[i + offset] = overlapBuffer[i];
                }

                writer.WriteSamples(overlapBuffer, 0, samplesRead);
            }
        }
コード例 #2
0
        public string SaveWavFile(AudioData[,] audioDataMap)
        {
            _sampleProvider.Data = audioDataMap;
            //string path = @"D:\UNIVERSITY\400\ENSE 479 Sound Art\test.wav";
            string path = _fileName.Substring(0, _fileName.LastIndexOf(".", StringComparison.Ordinal));

            switch (_mode)
            {
            case ProcessingMode.Brightness:
                path += "_Brightness.wav";
                break;

            case ProcessingMode.Darkness:
                path += "_Darkness.wav";
                break;
            }

            //Write each sample to file
            using (WaveFileWriter writer = new WaveFileWriter(path, _sampleProvider.WaveFormat))
            {
                var samples = _sampleProvider.GetSamples();
                foreach (float sample in samples)
                {
                    writer.WriteSample(sample);
                }
            }

            return(path);
        }
コード例 #3
0
ファイル: Program.cs プロジェクト: way-zer/my-public-project
 public void AddEmpty(float time)
 {
     for (var i = 0; i < _config.SampleRate * time; i++)
     {
         _writer.WriteSample(0);
     }
 }
コード例 #4
0
ファイル: MergeService.cs プロジェクト: jlleonr/mp3merger
        /// <summary>
        /// Merge two mp3 files and put the merged content in the specified path.
        /// </summary>
        /// <param name="fileName1"></param>
        /// <param name="fileName2"></param>
        /// <param name="outputDirectory"></param>
        public static void Mp3Merge(string fileName1, string fileName2, string outputDirectory)
        {
            var fileA = new AudioFileReader(fileName1);

            // Calculate our buffer size, since we're normalizing our samples to floats
            // we'll need to account for that by dividing the file's audio byte count
            // by its bit depth / 8.
            var bufferA = new float[fileA.Length / (fileA.WaveFormat.BitsPerSample / 8)];

            // Now let's populate our buffer with samples.
            fileA.Read(bufferA, 0, bufferA.Length);

            // Do it all over again for the other file.
            var fileB   = new AudioFileReader(fileName2);
            var bufferB = new float[fileB.Length / (fileB.WaveFormat.BitsPerSample / 8)];

            fileB.Read(bufferB, 0, bufferB.Length);

            // Calculate the largest file
            var maxLen = (long)Math.Max(bufferA.Length, bufferB.Length);
            var final  = new byte[maxLen];

            // For now, mix data to a wav file.
            using (MemoryStream ms = new MemoryStream())
            {
                var writer = new WaveFileWriter(ms, fileA.WaveFormat);

                for (var i = 0; i < maxLen; i++)
                {
                    float a, b;

                    if (i < bufferA.Length)
                    {
                        // Reduce the amplitude of the sample by 2
                        // to avoid clipping.
                        a = bufferA[i];// / 2;
                    }
                    else
                    {
                        a = 0;
                    }

                    if (i < bufferB.Length)
                    {
                        b = bufferB[i];// / 2;
                    }
                    else
                    {
                        b = 0;
                    }
                    writer.WriteSample(a + b);
                    writer.Flush();
                }
                ms.Seek(0, SeekOrigin.Begin);
                final = ms.ToArray();
            }
            ConvertWavStreamToMp3File(outputDirectory, final);
            MessageBox.Show("Finished");
        }
コード例 #5
0
        public void CanAccessSamplesIndividuallyInAMonoFile()
        {
            var ms = new MemoryStream();

            using (var writer = new WaveFileWriter(new IgnoreDisposeStream(ms), new WaveFormat(8000, 16, 1)))
            {
                writer.WriteSample(0.1f);
                writer.WriteSample(0.2f);
                writer.WriteSample(0.3f);
                writer.WriteSample(0.4f);
            }
            ms.Position = 0;
            using (var reader = new WaveFileReader(ms))
            {
                Assert.AreEqual(0.1f, reader.ReadNextSampleFrame()[0], 0.001f);
                Assert.AreEqual(0.2f, reader.ReadNextSampleFrame()[0], 0.001f);
                Assert.AreEqual(0.3f, reader.ReadNextSampleFrame()[0], 0.001f);
                Assert.AreEqual(0.4f, reader.ReadNextSampleFrame()[0], 0.001f);
                Assert.IsNull(reader.ReadNextSampleFrame());
            }
        }
コード例 #6
0
        /// ------------------------------------------------------------------------------------
        private void WriteAudioStreamToChannel(AnnotationChannel channel, WaveStream inputStream)
        {
            var silentBlocksForOrig = new float[_srcRecStreamProvider.Stream.WaveFormat.Channels];
            var blocksRead          = 0;
            var totalBlocks         = inputStream.Length / inputStream.WaveFormat.BlockAlign;
            var provider            = new SampleChannel(inputStream);
            var buffer = new float[provider.WaveFormat.Channels];

            while (provider.Read(buffer, 0, provider.WaveFormat.Channels) > 0 && blocksRead < totalBlocks)
            {
                blocksRead += 1;

                switch (channel)
                {
                case AnnotationChannel.Source:
                    _audioFileWriter.WriteSamples(buffer, 0, _srcRecStreamProvider.Stream.WaveFormat.Channels);
                    _audioFileWriter.WriteSample(0f);
                    _audioFileWriter.WriteSample(0f);
                    break;

                case AnnotationChannel.Careful:
                    _audioFileWriter.WriteSamples(silentBlocksForOrig, 0, silentBlocksForOrig.Length);
                    _audioFileWriter.WriteSample(buffer[0]);
                    _audioFileWriter.WriteSample(0f);
                    break;

                case AnnotationChannel.Translation:
                    _audioFileWriter.WriteSamples(silentBlocksForOrig, 0, silentBlocksForOrig.Length);
                    _audioFileWriter.WriteSample(0f);
                    _audioFileWriter.WriteSample(buffer[0]);
                    break;
                }
            }
        }
コード例 #7
0
ファイル: MusicForm.cs プロジェクト: RugnirViking/RCT2Tools
        private void ChangeVolume(object sender, EventArgs e)
        {
            if (this.listViewSongs.SelectedItems.Count > 0)
            {
                if (this.listViewSongs.SelectedItems[0].Group != this.listViewSongs.Groups["customMusic"])
                {
                    using (VolumeMessageBox messageBox = new VolumeMessageBox(this.lastVolume)) {
                        if (messageBox.ShowDialog(this) == DialogResult.OK)
                        {
                            this.lastVolume = messageBox.NewVolume;
                            SaveSettings(null, null);

                            if (this.listViewSongs.SelectedItems[0].ImageIndex == 1)
                            {
                                this.listViewSongs.SelectedItems[0].ImageIndex = 0;
                                this.currentSong.Stop();
                                this.playing         = false;
                                this.playingCustom   = false;
                                this.buttonPlay.Text = "Play";
                            }
                            if (this.lastVolume == 100)
                            {
                                if (File.Exists(Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "Custom Music", "Modified", this.listViewSongs.SelectedItems[0].SubItems[1].Text)))
                                {
                                    File.Delete(Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "Custom Music", "Modified", this.listViewSongs.SelectedItems[0].SubItems[1].Text));
                                }

                                this.listViewSongs.SelectedItems[0].SubItems[3].Text = "";
                            }
                            else
                            {
                                using (WaveFileReader reader = new WaveFileReader(Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "Custom Music", this.listViewSongs.SelectedItems[0].SubItems[1].Text))) {
                                    using (WaveFileWriter writer = new WaveFileWriter(Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), "Custom Music", "Modified", this.listViewSongs.SelectedItems[0].SubItems[1].Text), reader.WaveFormat)) {
                                        float[] samples = reader.ReadNextSampleFrame();
                                        while (samples != null)
                                        {
                                            for (int i = 0; i < samples.Length; i++)
                                            {
                                                writer.WriteSample(samples[i] * ((float)this.lastVolume / 100.0f));
                                            }
                                            samples = reader.ReadNextSampleFrame();
                                        }
                                    }
                                }

                                this.listViewSongs.SelectedItems[0].SubItems[3].Text = "*";
                            }
                        }
                    }
                }
            }
        }
コード例 #8
0
ファイル: WaveFileWriterTests.cs プロジェクト: tabjin0/audio
        public void CanUseWriteSampleToA16BitFile()
        {
            float amplitude = 0.25f;
            float frequency = 1000;

            using (var writer = new WaveFileWriter(new MemoryStream(), new WaveFormat(16000, 16, 1)))
            {
                for (int n = 0; n < 1000; n++)
                {
                    var sample = (float)(amplitude * Math.Sin((2 * Math.PI * n * frequency) / writer.WaveFormat.SampleRate));
                    writer.WriteSample(sample);
                }
            }
        }
コード例 #9
0
        public static int writeData(int sampleRate, string filename, double[] audioData)
        {
            var waveFormat = new WaveFormat(sampleRate, 16, 1);

            using (var writer = new WaveFileWriter(filename, waveFormat))
            {
                foreach (var sample in audioData)
                {
                    writer.WriteSample((float)sample);
                }
            }

            return(0);
        }
コード例 #10
0
        public void CanAccessSamplesIndividuallyInAStereo24BitFile()
        {
            var ms = new MemoryStream();

            using (var writer = new WaveFileWriter(new IgnoreDisposeStream(ms), new WaveFormat(44100, 24, 2)))
            {
                writer.WriteSample(0.1f);
                writer.WriteSample(0.2f);
                writer.WriteSample(0.3f);
                writer.WriteSample(0.4f);
            }
            ms.Position = 0;
            using (var reader = new WaveFileReader(ms))
            {
                var f1 = reader.ReadNextSampleFrame();
                Assert.AreEqual(0.1f, f1[0], 0.0001f);
                Assert.AreEqual(0.2f, f1[1], 0.0001f);
                var f2 = reader.ReadNextSampleFrame();
                Assert.AreEqual(0.3f, f2[0], 0.0001f);
                Assert.AreEqual(0.4f, f2[1], 0.0001f);
                Assert.IsNull(reader.ReadNextSampleFrame());
            }
        }
コード例 #11
0
        private static void SaveChannelsToWAV(List <float>[] channels, String filename)
        {
            WaveFormat waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(32000, channels.Length);

            using (WaveFileWriter writer = new WaveFileWriter(filename, waveFormat))
            {
                for (int i = 0; i < channels[0].Count; i++)
                {
                    //Left channel comes before right (same as in a WAV, so no reordering needed)
                    foreach (List <float> channel in channels)
                    {
                        writer.WriteSample(channel[i]);
                    }
                }
            }
        }
コード例 #12
0
        public void StartRecording(string filename)
        {
            CaptureInstance     = new WasapiLoopbackCapture();
            RecordedAudioWriter = new WaveFileWriter(filename, CaptureInstance.WaveFormat);
            RecordedAudioWriter.WriteSample(16000);

            CaptureInstance.DataAvailable += (s, a) =>
            {
                RecordedAudioWriter.Write(a.Buffer, 0, a.BytesRecorded);
            };

            CaptureInstance.RecordingStopped += (s, a) =>
            {
                RecordedAudioWriter.Dispose();
                RecordedAudioWriter = null;
                CaptureInstance.Dispose();
            };

            CaptureInstance.StartRecording();
        }
コード例 #13
0
ファイル: Program.cs プロジェクト: svaza/sipsorcery_examples
        private static void RtpSession_OnRtpPacketReceived(IPEndPoint remoteEndPoint, SDPMediaTypesEnum kind, RTPPacket pkt)
        {
            //Log.LogDebug($"{kind} RTP packet received {pkt.Header.SequenceNumber}.");

            if (kind == SDPMediaTypesEnum.audio && _ratio != double.NaN)
            {
                var sample = pkt.Payload;

                for (int index = 0; index < sample.Length; index++)
                {
                    short pcm = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(sample[index]);
                    float s16 = pcm / 32768f;

                    for (int i = 0; i < _ratio; i++)
                    {
                        _waveFile.WriteSample(s16);
                    }
                }
            }
        }
コード例 #14
0
        static string outputTempWaveFile(WaveData waveData, uint channel)
        {
            if (channel > waveData.channelSamples.Length)
            {
                throw new InvalidOperationException("Channel index must be less than " + waveData.channelSamples.Length);
            }

            string filename = "temp" + channel + ".wav";

            WaveFileWriter writer = new WaveFileWriter(filename, WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, waveData.sampleRate, 1, waveData.sampleRate * 2, 0, 16));

            float[] samples = waveData.channelSamples[channel];
            for (long i = 0; i < samples.LongLength; ++i)
            {
                writer.WriteSample(samples[i]);
            }
            writer.Close();

            return(filename);
        }
コード例 #15
0
        public override void Sample(ref float spl0, ref float spl1)
        {
            wf.WriteSample(spl0);
            //dppos = dppos+dppossc;
            //dpback = (sin(dppos)+1)*dpbacksc;
            //dpint = delaypos-dpback-1;
            //if(dpint < 0) dpint += delaylen;

            //dpint *= 2;

            //float os1 = buffer[(int)dpint+0];
            //float os2 = buffer[(int)dpint+1];

            //dpint = delaypos*2;

            //buffer[(int)dpint+0] = spl0 + os1*wetmix;
            //buffer[(int)dpint+1] = spl1 + os2*wetmix;
            //delaypos+=1;
            //if(delaypos >= delaylen) delaypos=0;

            //spl0=spl0*drymix2 + os1*wetmix2;
            //spl1=spl1*drymix2 + os2*wetmix2;
        }
コード例 #16
0
        private void SaveSound(IList <double> frequencies, int soundWindowSize, bool shouldPlay)
        {
            var filename = DateTime.Now.ToString("yyyy-MM-ddTHH-mm-ss") + ".wav";

            using (WaveFileWriter writer = new WaveFileWriter(filename, new WaveFormat(44100, 1)))
            {
                float amplitude = 0.25f;
                foreach (double FREQ in frequencies)
                {
                    for (int n = 0; n < soundWindowSize; n++)
                    {
                        float sample = (float)(amplitude * Math.Sin((2 * Math.PI * n * FREQ) / writer.WaveFormat.SampleRate));
                        writer.WriteSample(sample);
                    }
                }
            }

            if (!shouldPlay)
            {
                return;
            }

            Task.Run(() =>
            {
                using (var reader = new WaveFileReader(filename))
                {
                    using (var wo = new WaveOutEvent())
                    {
                        wo.Init(reader.ToSampleProvider());
                        wo.Play();
                        while (wo.PlaybackState == PlaybackState.Playing)
                        {
                        }
                    }
                }
            });
        }
コード例 #17
0
ファイル: MainWindow.xaml.cs プロジェクト: greg3d/EarTrainer
        private void button1_Click(object sender, RoutedEventArgs e)
        {
            settings.IsProcessing     = true;
            progBarMessage.Visibility = Visibility.Visible;

            settings.StartEnabled   = false;
            settings.ProcessEnabled = false;

            var calculate = Task.Factory.StartNew(() =>
            {
                prgrs.Report(0);

                audioFile = new AudioFileReader(settings.InputFile);
                audioFile.ToStereo();

                var n = audioFile.Length / (audioFile.WaveFormat.BitsPerSample / 8) / audioFile.WaveFormat.Channels;

                var outFormat = new WaveFormat(settings.SampleRate, 2);
                ISampleProvider wave;

                using (var resampler = new MediaFoundationResampler(audioFile, outFormat))
                {
                    wave = resampler.ToSampleProvider();
                }

                //MessageBox.Show(audioFile.WaveFormat.Channels.ToString());

                //audioFile.ToMono();
                //audioFile.ToStereo();

                var bits = wave.WaveFormat.BitsPerSample;
                var Fs   = wave.WaveFormat.SampleRate;

                //MessageBox.Show(Fs.ToString());

                var channels = wave.WaveFormat.Channels;

                //string outpath = @"E:\states.dat";
                string outwav = settings.OutputFile;

                float FS = settings.SampleRate;

                prgrs.Report(10);
                Thread.Sleep(200);

                using (WaveFileWriter outWavFile = new WaveFileWriter(outwav, new WaveFormat((int)FS, 16, 2)))
                {
                    float period    = settings.Period; // ms
                    double fullPath = period;

                    int bb    = 0;
                    int i     = 0;
                    bool mode = false;

                    do
                    {
                        if (i % 1000 == 0)
                        {
                            prgrs.Report((int)(i / (float)n * 100 * 0.9f + 10f));
                        }

                        float[] samples = new float[2];
                        bb           = wave.Read(samples, 0, samples.Length);
                        var leftVal  = samples[0];
                        var rightVal = samples[1];

                        var curPath = (i * 1000f / FS);

                        if (curPath >= fullPath)
                        {
                            mode     = !mode;
                            fullPath = fullPath + period;
                        }

                        if (mode == true)
                        {
                            leftVal = 0;
                        }
                        else
                        {
                            rightVal = 0;
                        }

                        //writer.Write(leftVal);
                        //writer.Write(rightVal);

                        outWavFile.WriteSample(leftVal);
                        outWavFile.WriteSample(rightVal);

                        i++;
                    } while (bb > 0);


                    prgrs.Report(95);
                    //writer.Flush();
                    //writer.Close();
                    outWavFile.Flush();
                    prgrs.Report(100);

                    outWavFile.Close();
                }

                //outputDevice.Init(audioFile);
            });

            calculate.GetAwaiter().OnCompleted(() =>
            {
                settings.IsProcessing     = false;
                progBarMessage.Visibility = Visibility.Hidden;

                settings.StartEnabled   = true;
                settings.ProcessEnabled = true;
                MessageBox.Show("Готово!");

                prgrs.Report(0);
            });
        }
コード例 #18
0
        private async Task <bool> MakeAudioConfigAsync(SpeechHandler handler)
        {
            Debug.Assert(handler.Device != null);

            // NAudio Setting
            var wavein        = CreateCaptureInstance(handler.Device);
            var waveoutFormat = new WaveFormat(16000, 16, 1);

            wavein.StartRecording();

            // Azure Cognitive Service Setting
            var audioformat = AudioStreamFormat.GetWaveFormatPCM((uint)waveoutFormat.SampleRate, (byte)waveoutFormat.BitsPerSample, (byte)waveoutFormat.Channels);

            handler.AudioInputStream = AudioInputStream.CreatePushStream(audioformat);
            handler.AudioConfig      = AudioConfig.FromStreamInput(handler.AudioInputStream);

            // Silence Generate
            DateTime preEvent    = DateTime.Now;
            var      silenceData = new byte[waveoutFormat.BlockAlign];

            // Appliation Preparation
            Hot.SetWavFormat(DisplayName, waveoutFormat);   // for file saving

            // NAudio Voice event
            wavein.DataAvailable += (s, e) =>
            {
                if (e.BytesRecorded > 0)
                {
                    var now = DateTime.Now;
                    using (var ms = new MemoryStream())
                    {
                        var memoryWriter = new WaveFileWriter(ms, waveoutFormat);
                        ms.SetLength(0);    // Delete file header.

                        var samples = Resample(wavein.WaveFormat, e.Buffer, e.BytesRecorded, waveoutFormat);
                        foreach (var sample in samples)
                        {
                            memoryWriter.WriteSample(sample);
                        }
                        Hot.AddWavToAllQueue(DisplayName, ms.GetBuffer(), (int)ms.Length, now); // for file saving
                        handler.AudioInputStream.Write(ms.GetBuffer(), (int)ms.Length);         // for Azure Cognitive Speech to Text
                    }
                    try
                    {
                        Token.Add(TokenWavDataQueued, this);    // TODO: Need Confirm it must be fixed with Tono.Gui.WinForm 1.1.2 - System.InvalidOperationException: 'Collection was modified; enumeration operation may not execute.'
                                                                // It must be not fixed yet. so I added try-catch.
                    }
                    catch
                    {
                        // No Action because the above token is a QoS 0 message. But it's necessary to disappear exception messages that's why catch them here.
                    }
                    preEvent = DateTime.Now;
                }
                else
                {
                    if (_talkID != null)
                    {
                        var spms = (double)waveoutFormat.SampleRate / 1000; // samples per ms
                        var n    = (int)(spms * (DateTime.Now - preEvent).TotalMilliseconds);

                        for (var i = n; i >= 0; i--)
                        {
                            handler.AudioInputStream.Write(silenceData, silenceData.Length);    // send silence to azure to get realtime event (othewise, azure will wait untile next event timing even if there is no event long time)
                        }
                    }
                    preEvent = DateTime.Now;
                }
            };

            handler.StopRequested += (s, e) =>
            {
                wavein.StopRecording();     // Stop NAudio recording
            };

            return(await Task.FromResult(true));
        }
コード例 #19
0
 public void AddSample(AudioFrame <short> frame)
 {
     _waveFile.WriteSample(frame.Left);
     _waveFile.WriteSample(frame.Right);
 }
コード例 #20
0
        /// <summary>
        /// Combine two stereo files to one quad file
        /// </summary>
        /// <param name="filePathLeft">file path to the left stereo file</param>
        /// <param name="filePathRight">file path to the right stereo file</param>
        /// <param name="combinedFileNamePath">file path to the combined quad file</param>
        /// <returns></returns>
        public static bool CombineStereoToQuad(string filePathLeft, string filePathRight, string combinedFileNamePath)
        {
            WaveFormat waveFormatLeft  = GetWaveFormat(filePathLeft);
            WaveFormat waveFormatRight = GetWaveFormat(filePathRight);

            if (!waveFormatLeft.Equals(waveFormatRight))
            {
                Console.Out.WriteLine("The two files to combine must have the same format");
                return(false);
            }
            if (waveFormatLeft.Channels != 2 || waveFormatRight.Channels != 2)
            {
                Console.Out.WriteLine("The two files to combine must be stereo");
                return(false);
            }

            int sampleRate = waveFormatLeft.SampleRate;

            float[] channel1;
            float[] channel2;
            float[] channel3;
            float[] channel4;
            SplitStereoWaveFileToMono(filePathLeft, out channel1, out channel2);
            SplitStereoWaveFileToMono(filePathRight, out channel3, out channel4);

            // find out what channel is longest
            int maxLength = Math.Max(channel1.Length, channel3.Length);

            using (WaveFileWriter wavWriter = new WaveFileWriter(combinedFileNamePath, WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 4)))
            {
                // write one and one float (interlaced), pad if neccesary
                for (int i = 0; i < maxLength; i++)
                {
                    if (i < channel1.Length)
                    {
                        wavWriter.WriteSample(channel1[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel2.Length)
                    {
                        wavWriter.WriteSample(channel2[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel3.Length)
                    {
                        wavWriter.WriteSample(channel3[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel4.Length)
                    {
                        wavWriter.WriteSample(channel4[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                }
            }
            return(true);
        }
コード例 #21
0
        static WaveData convertWaveData(WaveData inputData, ushort outputSampleRate, bool outputStereo)
        {
            // Step 1: Mix channels
            float[][] mixed;
            if ((inputData.channelSamples.Length > 1 && !outputStereo) || (inputData.channelSamples.Length > 2 && outputStereo))
            {
                long sampleCount = inputData.channelSamples[0].LongLength;
                mixed = new float[outputStereo ? 2 : 1][];
                for (int j = 0; j < mixed.Length; ++j)
                {
                    mixed[j] = new float[sampleCount];
                }
                for (long i = 0; i < sampleCount; ++i)
                {
                    for (int j = 0; j < inputData.channelSamples.Length; ++j)
                    {
                        mixed[j % mixed.Length][i] += inputData.channelSamples[j][i];
                    }
                    if (mixed.Length > 1)
                    {
                        mixed[0][i] /= ((inputData.channelSamples.Length + 1) / 2);
                        mixed[1][i] /= inputData.channelSamples.Length / 2;
                    }
                    else
                    {
                        mixed[0][i] /= inputData.channelSamples.Length;
                    }
                }
            }
            else
            {
                mixed = inputData.channelSamples;
            }

            // Step 2: Adjust sample rate
            WaveData result;

            if (outputSampleRate != inputData.sampleRate)
            {
                string tempFile1 = "resample1.wav";
                using (WaveFileWriter writer = new WaveFileWriter(tempFile1, WaveFormat.CreateIeeeFloatWaveFormat(inputData.sampleRate, mixed.Length))) {
                    for (long i = 0; i < mixed[0].LongLength; ++i)
                    {
                        for (int j = 0; j < mixed.Length; ++j)
                        {
                            writer.WriteSample(mixed[j][i]);
                        }
                    }
                }

                string tempFile2 = "resample2.wav";
                using (WaveFileReader reader = new WaveFileReader(tempFile1)) {
                    WaveFormat outFormat = new WaveFormat(outputSampleRate, reader.WaveFormat.Channels);
                    using (MediaFoundationResampler resampler = new MediaFoundationResampler(reader, outFormat)) {
                        WaveFileWriter.CreateWaveFile(tempFile2, resampler);
                    }
                }

                result = readWaveData(tempFile2);
                File.Delete(tempFile1);
                File.Delete(tempFile2);
            }
            else
            {
                result                = new WaveData();
                result.sampleRate     = inputData.sampleRate;
                result.channelSamples = mixed;
            }

            return(result);
        }