コード例 #1
0
        /// <summary>
        /// Get min amplitude ( for right choose silence threshold for method DetectSilenceLevel() )
        /// </summary>
        /// <returns>Min Amplitude</returns>
        public float GetMinAmplitude()
        {
            // safe old position of cursor

            long oldPosition = AudioReader.Position;

            // buffer

            float[] amplitudeArray = new float[AudioReader.WaveFormat.SampleRate];

            // end of file

            bool eof = false;

            float min = 0;

            while (!eof)
            {
                int ReadedSamples = AudioReader.Read(amplitudeArray, 0, amplitudeArray.Length);

                if (ReadedSamples == 0)
                {
                    eof = true;
                }
                for (int i = 0; i < ReadedSamples; i++)
                {
                    min = Math.Min(amplitudeArray[i], min);
                }
            }

            AudioReader.Position = oldPosition;
            return(min);
        }
コード例 #2
0
        /// ------------------------------------------------------------------------------------
        /// <summary>
        ///
        /// </summary>
        /// ------------------------------------------------------------------------------------
        private bool Initialize(string audioFilePath)
        {
            m_doc = SaAudioDocument.Load(audioFilePath, false, true);
            if (m_doc != null)
            {
                ResetSegmentEnumerators();
                return(true);
            }

            try
            {
                using (AudioReader audioReader = new AudioReader())
                {
                    AudioReader.InitResult result = audioReader.Initialize(audioFilePath);
                    if (result == AudioReader.InitResult.FileNotFound)
                    {
                        return(false);
                    }

                    if ((result == AudioReader.InitResult.InvalidFormat))
                    {
                        return(false);
                    }

                    // Try reading data from older SA audio files, converting
                    // it to Unicode along the way.
                    if (!audioReader.Read(true))
                    {
                        return(false);
                    }

                    // Now try reading the companion transcription file again.
                    m_doc = SaAudioDocument.Load(audioFilePath, false, false);
                    ResetSegmentEnumerators();
                }
            }
            catch
            {
                return(false);
            }

            return(true);
        }
コード例 #3
0
ファイル: MainWindow.xaml.cs プロジェクト: VoidXH/Cavern
        void ProcessImpulse(object sender, RoutedEventArgs e)
        {
            if (browser.ShowDialog().Value)
            {
                AudioReader reader  = AudioReader.Open(browser.FileName);
                float[]     impulse = reader.Read();
                float       gain    = 1;
                if (keepGain.IsChecked.Value)
                {
                    gain = WaveformUtils.GetPeak(impulse);
                }

                if (commonEQ.IsChecked.Value)
                {
                    ProcessCommon(reader, ref impulse);
                }
                else
                {
                    ProcessPerChannel(reader, ref impulse);
                }

                if (keepGain.IsChecked.Value)
                {
                    WaveformUtils.Gain(impulse, gain / WaveformUtils.GetPeak(impulse));
                }

                BitDepth bits = reader.Bits;
                if (forceFloat.IsChecked.Value)
                {
                    bits = BitDepth.Float32;
                }

                int targetLen = QMath.Base2Ceil((int)reader.Length);
                if (separateExport.IsChecked.Value)
                {
                    ReferenceChannel[] channels = ChannelPrototype.GetStandardMatrix(reader.ChannelCount);
                    for (int ch = 0; ch < reader.ChannelCount; ++ch)
                    {
                        string exportName  = Path.GetFileName(browser.FileName);
                        int    idx         = exportName.LastIndexOf('.');
                        string channelName = ChannelPrototype.Mapping[(int)channels[ch]].Name;
                        exporter.FileName = $"{exportName[..idx]} - {channelName}{exportName[idx..]}";
コード例 #4
0
        private void LoadFiles(object sender, RoutedEventArgs e)
        {
            AudioReader responseReader = Import("Response.wav");

            if (responseReader == null)
            {
                return;
            }
            AudioReader impulseReader = Import("Impulse.wav");

            if (impulseReader == null)
            {
                return;
            }

            float[] response = responseReader.Read(),
            impulse = impulseReader.Read();
            if (responseReader.SampleRate != impulseReader.SampleRate)
            {
                Error("The sample rate of the two clips don't match.");
                return;
            }
            int responseChannels = responseReader.ChannelCount,
                impulseChannels  = impulseReader.ChannelCount;

            if (impulseChannels != 1 && impulseChannels != responseChannels)
            {
                Error("The channel count of the two clips don't match. A single-channel impulse is also acceptable.");
                return;
            }

            int fftSize = Math.Max(
                QMath.Base2Ceil((int)responseReader.Length),
                QMath.Base2Ceil((int)impulseReader.Length)
                );

            if (padding.IsChecked.Value)
            {
                Array.Resize(ref response, fftSize + response.Length);
                Array.Copy(response, 0, response, fftSize, response.Length - fftSize);
                Array.Clear(response, 0, fftSize);

                fftSize = Math.Max(fftSize, QMath.Base2Ceil(response.Length));
            }

            Complex[] impulseFFT = new Complex[fftSize],
            responseFFT = new Complex[fftSize];
            FFTCache cache = new FFTCache(fftSize);

            float[] responseChannel = new float[response.Length / responseChannels];
            for (int channel = 0; channel < responseChannels; ++channel)
            {
                if (channel < impulseChannels)   // After the channel count check this runs once or for each channel
                {
                    float[] impulseChannel = impulse;
                    if (impulseChannels != 1)
                    {
                        impulseChannel = new float[impulseReader.Length];
                        WaveformUtils.ExtractChannel(impulse, impulseChannel, channel, impulseChannels);
                        Array.Clear(impulseFFT, 0, fftSize);
                    }
                    for (int sample = 0; sample < impulseChannel.Length; ++sample)
                    {
                        impulseFFT[sample].Real = impulseChannel[sample];
                    }
                    Measurements.InPlaceFFT(impulseFFT, cache);
                }

                if (responseChannels == 1)
                {
                    responseChannel = response;
                }
                else
                {
                    WaveformUtils.ExtractChannel(response, responseChannel, channel, responseChannels);
                }
                if (channel != 1)
                {
                    Array.Clear(responseFFT, 0, fftSize);
                }
                for (int sample = 0; sample < responseChannel.Length; ++sample)
                {
                    responseFFT[sample].Real = responseChannel[sample];
                }
                Measurements.InPlaceFFT(responseFFT, cache);

                for (int sample = 0; sample < fftSize; ++sample)
                {
                    responseFFT[sample].Divide(impulseFFT[sample]);
                }
                Measurements.InPlaceIFFT(responseFFT, cache);
                for (int i = 0, channels = responseChannels; i < responseChannel.Length; ++i)
                {
                    response[channels * i + channel] = responseFFT[i].Real;
                }
            }

            exporter.FileName = "Deconvolved.wav";
            if (exporter.ShowDialog().Value)
            {
                BinaryWriter handler = new BinaryWriter(File.OpenWrite(exporter.FileName));
                using RIFFWaveWriter writer = new RIFFWaveWriter(handler, responseChannels, responseReader.Length,
                                                                 responseReader.SampleRate, responseReader.Bits);
                writer.Write(response);
            }
        }
コード例 #5
0
        /// <summary>
        /// detect volume level in audio file
        /// </summary>
        /// <param name="amplitudeSilenceThreshold">amplitude Threshold ( between 1 and 0 )</param>
        /// <param name="Millisec">we split all audio on millisec blocks and detect this block as silence or sound</param>
        /// <returns>
        /// List of Time duration and Volume level ( Sound or Silence )
        /// </returns>
        private List <TimeSpanVolume> DetectVolumeLevel(
            float amplitudeSilenceThreshold,
            int Millisec)
        {
            if (amplitudeSilenceThreshold > 1 || amplitudeSilenceThreshold < 0)
            {
                throw new ArgumentOutOfRangeException($"amplitudeSilenceThreshold ({amplitudeSilenceThreshold}) can't be more than 1 or less than 0");
            }

            List <TimeSpanVolume> TimeSpanVolumes = new List <TimeSpanVolume>();

            // safe old position of cursor

            long oldPosition = AudioReader.Position;

            // define blockSamples by millisec

            TimeSpan timeSpan = TimeSpan.FromMilliseconds(Millisec);

            // number of Samples we ananlyze for 1 time

            int blockSamples = MillisecToSamplesBlock(Millisec);

            // buffer
            BufferSize = MillisecToSamplesBlock(Millisec);
            float[] amplitudeArray = new float[BufferSize];

            // end of file

            bool eof = false;

            while (!eof)
            {
                int ReadedSamples = AudioReader.Read(amplitudeArray, 0, amplitudeArray.Length);

                if (ReadedSamples == 0)
                {
                    eof = true;
                }

                // Samples that is not divided on blockSamples

                int RemainSamples = ReadedSamples % blockSamples;
                ReadedSamples -= RemainSamples;

                // MAIN ANALYZE

                for (int i = 0; i < ReadedSamples; i += blockSamples)
                {
                    float average = 0;

                    // one block can be not completed ( size of block not equals blockSamples )

                    int analyzedSamples = 0;

                    // i + j < amplitudeArray.Length  -  out of the range

                    for (int j = 0; j < blockSamples && i + j < amplitudeArray.Length; j++)
                    {
                        // amplitude can be negative

                        float sampleLocal = Math.Abs(amplitudeArray[i + j]);
                        average += sampleLocal;
                        analyzedSamples++;
                    }
                    average /= analyzedSamples;

                    // DETECT Is Silence

                    bool        isSilenceRemain = average < amplitudeSilenceThreshold ? true : false;
                    VolumeValue volumeRemain    = isSilenceRemain ?
                                                  VolumeValue.Silence :
                                                  VolumeValue.Noise;

                    // add timespan to list

                    TimeSpanVolume spanRemain = new TimeSpanVolume(volumeRemain, timeSpan);
                    TimeSpanVolumes.Add(spanRemain);
                }

                // Remain ANALYZE

                // if Remain samples is not 0, that means we need to analyze it separately (last samples is not clear for dividing it on blocks)

                float averageRemain = 0;
                for (int i = ReadedSamples; i < ReadedSamples + RemainSamples; i++)
                {
                    float sampleLocal = Math.Abs(amplitudeArray[i]);
                    averageRemain += sampleLocal;
                }
                averageRemain /= RemainSamples;

                // check if Time span is not empty

                TimeSpan remainTimeSpan = TimeSpan.FromMilliseconds(SamplesBlockToMillisec(RemainSamples));
                if (remainTimeSpan > TimeSpan.Zero)
                {
                    // DETECT Is Silence

                    bool        isSilence = averageRemain < amplitudeSilenceThreshold ? true : false;
                    VolumeValue volume    = isSilence ?
                                            VolumeValue.Silence :
                                            VolumeValue.Noise;

                    // add timespan to list
                    TimeSpanVolume span = new TimeSpanVolume(volume, remainTimeSpan);
                    TimeSpanVolumes.Add(span);
                }
            }

            AudioReader.Position = oldPosition;
            TimeSpanVolumes.TrimExcess();
            //TimeSpanVolumes = SqueezeListOfTimeSpans(TimeSpanVolumes);
            return(TimeSpanVolumes);
        }