public SampleDataAsset(string name, WaveFile wave)
 {
     if (name == null)
         throw new ArgumentNullException("An asset must be given a valid name.");
     assetName = name;
     SamplerChunk smpl = wave.FindChunk<SamplerChunk>();
     if (smpl != null)
     {
         sampleRate = (int)(44100.0 * (1.0 / (smpl.SamplePeriod / 22675.0)));
         rootKey = (short)smpl.UnityNote;
         tune = (short)(smpl.PitchFraction * 100);
         if (smpl.Loops.Length > 0)
         {
             //--WARNING ASSUMES: smpl.Loops[0].Type == SamplerChunk.SampleLoop.LoopType.Forward
             loopStart = smpl.Loops[0].Start;
             loopEnd = smpl.Loops[0].End + smpl.Loops[0].Fraction + 1;
         }
     }
     else
     {
         sampleRate = wave.Format.SampleRate;
     }
     byte[] data = wave.Data.RawSampleData;
     if (wave.Format.ChannelCount != audioChannels) //reformat to supported channels
         data = WaveHelper.GetChannelPcmData(data, wave.Format.BitsPerSample, wave.Format.ChannelCount, audioChannels);
     sampleData = PcmData.Create(wave.Format.BitsPerSample, data, true);
     start = 0;
     end = sampleData.Length;
 }
            public SampleAsset(string name, WaveFile wf)
            {
                Name = name;
                Channels = (byte)wf.Format.ChannelCount;
                Bits = (byte)wf.Format.BitsPerSample;

                SamplerChunk smpl = wf.FindChunk<SamplerChunk>();
                if (smpl != null)
                {
                    SampleRate = (int)(44100.0 * (1.0 / (smpl.SamplePeriod / 22675.0)));
                    RootKey = (short)smpl.UnityNote;
                    Tune = (short)(smpl.PitchFraction * 100);
                    if (smpl.Loops.Length > 0)
                    {
                        if (smpl.Loops[0].Type != SamplerChunk.SampleLoop.LoopType.Forward)
                            Console.WriteLine("Warning: Loopmode was not supported on asset: " + Name);
                        LoopStart = smpl.Loops[0].Start;
                        LoopEnd = smpl.Loops[0].End + smpl.Loops[0].Fraction + 1;
                    }
                }
                else
                {
                    SampleRate = wf.Format.SampleRate;
                }


                SampleRate = wf.Format.SampleRate;
                Data = wf.Data.RawSampleData;
            }
示例#3
0
        /// <summary>
        /// Decodes the ogg-vorbis file
        /// </summary>
        /// <param name="input">Stream of the ogg-vorbis file</param>
        /// <returns>PCM-Wave version of the input</returns>
        public WaveFile Decode(Stream input)
        {
            MemoryStream output = new MemoryStream();
            WaveFile wf = new WaveFile();

            VorbisFile vf = new VorbisFile((FileStream)input, null, 0);
            Info inf = vf.getInfo(-1);

            wf.Channels = (short)inf.channels;
            wf.Frequency = inf.rate;
            wf.Bits = 16;

            Axiom.Core.LogManager.Instance.Write("SoundSystem: File is Ogg Vorbis "+inf.version.ToString()+" "+inf.rate.ToString()+"Hz, "+inf.channels.ToString()+" channels");

            int bufferlen = 4096;
            int result = 1;
            byte[] buffer = new byte[bufferlen];
            int[] section = new int[1];
            while(result != 0)
            {
                result = vf.read(buffer, bufferlen, 0, 2, 1, section);
                output.Write(buffer, 0, result);
            }

            output.Seek(0, SeekOrigin.Begin);
            wf.Data = output;

            return wf;
        }
示例#4
0
        protected void OpenExistingFile(WaveFile file)
        {
            // only open file if it's not already open
            MdiForm match = (MdiForm)MdiChildren.FirstOrDefault(f => ((MdiForm)f).Wave == file);
            if (match != null)
            {
                match.Activate();
                return;
            }

            MdiForm graphForm = new MdiForm(file);
            graphForm.MdiParent = this;
            graphForm.Show();

            //_menuWindow.

            // start tracking the current directory
            var directory = Path.GetDirectoryName(file.filePath);
            WaveManagerBusiness.WaveManager.AddDirectory(directory);

            // redraw the file list in the left panel
            WaveManagerBusiness.WaveManager.FireRepaintFileList();
        }
示例#5
0
 public DiscreteSignal WaveToSignal(WaveFile waveFile)
 {
     return(waveFile[Channels.Average]);
 }
示例#6
0
 private void button1_Click(object sender, EventArgs e)
 {
     WaveFile wff = new WaveFile(textBox1.Text);
 }
示例#7
0
    void Start()
    {
        AudioClip clip;

        float[]  data;
        double[] data2;
        double   sampleRate;
        int      channels;

        using (var stream = System.IO.File.OpenRead(filePath)) {
            var waveFile = new WaveFile(stream);
            clip       = AudioClip.Create("test", waveFile.Samples.Length / waveFile.Channels, channels = waveFile.Channels, (int)waveFile.SamplePerSec, false);
            songLength = (float)waveFile.Samples.Length / waveFile.SamplePerSec / waveFile.Channels;
            data       = new float[waveFile.Samples.Length];
            data2      = new double[waveFile.Samples.Length / waveFile.Channels];
            sampleRate = waveFile.SamplePerSec;
            for (int i = 0; i < data.Length; i++)
            {
                data[i] = (float)waveFile.Samples[i];
            }
            for (int i = 0; i < data2.Length; i++)
            {
                data2[i] = waveFile.Samples[i * waveFile.Channels];
            }
            clip.SetData(data, 0);
        }

        const int length = 2048;
        const int skip   = 512;
        const int width  = length >> 3;
        var       com    = new Complex[length];

        var texture = new Texture2D(width, data.Length / channels / skip, TextureFormat.RGB24, false);

        texture.filterMode = FilterMode.Point;
        Debug.Log(texture.height);
        Debug.Log(texture.width);

        if (System.IO.File.Exists(filePath + ".png"))
        {
            texture.LoadImage(System.IO.File.ReadAllBytes(filePath + ".png"));
        }
        else
        {
            for (int i = 0; i < data.Length / channels / skip; i++)
            {
                if (i * skip * channels + length * channels >= data.Length)
                {
                    break;
                }

                for (int j = 0; j < length; j++)
                {
                    com[j].real = data[i * skip * channels + j * channels];
                    com[j].img  = 0;
                }

                com = FFT.CalculateFFT(com, false);

                for (int j = 0; j < width; j++)
                {
                    texture.SetPixel(width - j - 1, i, gradient.Evaluate(com[j].fMagnitude * 5));
                }
            }
        }

        texture.Apply();

        uiRawImage.texture = texture;
//		uiRawImage.SetNativeSize();

        beatmapEditor.Init(clip.length, 2, 0);

        System.IO.File.WriteAllBytes(filePath + ".png", texture.EncodeToPNG());

        source.clip = clip;
    }
示例#8
0
 private static AudioClip LoadAudio(byte[] data)
 {
     //TODO handling for things that aren't WAV
     return(WaveFile.Load(data, false).ToAudioClip());
 }
示例#9
0
        // Open file
        private void openToolStripMenuItem_Click(object sender, EventArgs e)
        {
            spectrumPanel.refresh();
            signalPanel.refresh();

            signalPanel.Stride = 64;

            var ofd = new OpenFileDialog();

            if (ofd.ShowDialog() != DialogResult.OK)
            {
                return;
            }

            txtFilePath.Text = ofd.FileName;
            _waveFileName    = ofd.FileName;

            try
            {
                using (var stream = new FileStream(_waveFileName, FileMode.Open))
                {
                    IAudioContainer waveFile = new WaveFile(stream);
                    _signal         = waveFile[Channels.Left];
                    _signal_storage = _signal;
                }
            }
            catch (IOException ex)
            {
                Console.WriteLine("Error reading from {0}. Message = {1}", _waveFileName, ex.Message);
            }

            // Get duration
            WaveFileReader wf        = new WaveFileReader(_waveFileName);
            var            totalTime = wf.TotalTime;

            txtTotalTime.Text          = totalTime.TotalMilliseconds.ToString() + " ms";
            signalPanel.max_time_value = (float)totalTime.TotalMilliseconds;

            var max_value = _signal[0];
            var count     = 0;

            for (int i = 1; i < _signal.Length; i++)
            {
                if (max_value < _signal[i])
                {
                    max_value = _signal[i];
                }

                if (_signal[i] > 0.2)
                {
                    count++;
                }
            }
            txtMaxValue.Text = max_value.ToString();

            signalPanel.Signal = _signal;

            _fftSize      = int.Parse(fftSizeTextBox.Text);
            _cepstrumSize = int.Parse(cepstrumSizeTextBox.Text);
            _hopSize      = int.Parse(hopSizeTextBox.Text);

            _fft = new Fft(_fftSize);
            _cepstralTransform = new CepstralTransform(_cepstrumSize, _fftSize);

            var pitchTracker = new Pitch((float)_fftSize / _signal.SamplingRate,
                                         (float)_hopSize / _signal.SamplingRate);

            _pitchTrack = pitchTracker.Track(_signal);

            // Show chart in frequency domain
            UpdateAutoCorrelation();
            UpdateSpectra();

            // obtain spectrogram

            _stft = new Stft(_fftSize, _hopSize, WindowTypes.Rectangular);
            var spectrogram = _stft.Spectrogram(_signal);

            specNoComboBox.DataSource = Enumerable.Range(1, _pitchTrack.Count).ToArray();

            _specNo = 0;

            spectrumPanelAfterFilter.refresh();
            signalPanelAfterFilter.refresh();
            lblNote.Visible = false;
            savePWMDataToolStripMenuItem.Enabled = true;
        }
示例#10
0
 public void Stop()
 {
     if (_inputType == InputType.Plugin && _frontend != null)
     {
         _frontend.Stop();
         _frontend = null;
     }
     if (_wavePlayer != null)
     {
         _wavePlayer.Dispose();
         _wavePlayer = null;
     }
     if (_waveRecorder != null)
     {
         _waveRecorder.Dispose();
         _waveRecorder = null;
     }
     if (_waveDuplex != null)
     {
         _waveDuplex.Dispose();
         _waveDuplex = null;
     }
     _inputSampleRate = 0;
     if (_waveReadThread != null)
     {
         _waveReadThread.Join();
         _waveReadThread = null;
     }
     if (_iqStream != null)
     {
         _iqStream.Close();
     }
     if (_audioStream != null)
     {
         _audioStream.Close();
     }
     if (_streamHookManager != null)
     {
         _streamHookManager.CloseStreams();
         _streamHookManager.Stop();
     }
     if (_dspThread != null)
     {
         _dspThread.Join();
         _dspThread = null;
     }
     if (_streamHookManager != null)
     {
         _streamHookManager.StopIQObserverThread();
     }
     if (_waveFile != null)
     {
         _waveFile.Dispose();
         _waveFile = null;
     }
     if (_iqStream != null)
     {
         _iqStream.Dispose();
         _iqStream = null;
     }
     if (_streamHookManager != null)
     {
         _streamHookManager.DisposeStreams();
     }
     _audioStream  = null;
     _dspOutBuffer = null;
     _iqInBuffer   = null;
 }
示例#11
0
 public Invert(WaveFile waveFile)
 {
     this.waveFile = waveFile;
 }
示例#12
0
文件: DTMF.cs 项目: rmc00/gsf
        /// <summary>
        /// Generates the specified dual-tone multi-frequencies <paramref name="repeatCount"/> times storing them in the specified <see cref="WaveFile"/>.
        /// </summary>
        /// <param name="destination"><see cref="WaveFile"/> used to store generated dual-tone multi-frequencies.</param>
        /// <param name="tones">Dual-tone multi-frequencies to generate.</param>
        /// <param name="volume">Volume of generated dual-tones as a percentage (0 to 1).</param>
        /// <param name="repeatCount">Number of times to repeat each tone.</param>
        /// <exception cref="ArgumentOutOfRangeException">Value must be expressed as a fractional percentage between zero and one.</exception>
        /// <exception cref="InvalidOperationException"><see cref="DTMF"/> only generated for <see cref="WaveFile"/> with a sample rate of 8, 16, 24, 32 or 64 bits per sample.</exception>
        public static void Generate(WaveFile destination, DTMF[] tones, double volume, int repeatCount)
        {
            if (volume < 0.0D || volume > 1.0D)
                throw new ArgumentOutOfRangeException("volume", "Value must be expressed as a fractional percentage between zero and one");

            double amplitude = destination.AmplitudeScalar * volume;

            // Iterate through each repeat count
            for (int x = 0; x < repeatCount; x++)
            {
                // Interate through each tone
                foreach (DTMF tone in tones)
                {
                    // Iterate through each sample for total DTMF duration
                    for (long y = 0; y < tone.Duration * destination.SampleRate; y++)
                    {
                        // Compute frequencies of DTMF at given time and add to wave file
                        destination.AddSample(DTMF.ComputeFrequencies(tone, y, destination.SampleRate) * amplitude);
                    }
                }
            }
        }
示例#13
0
        async public void extractFeatures(string _filepath, StorageFile sf)
        {
            op          = new float[10];
            tdVectors   = new List <float[]>();
            mfccVectors = new List <float[]>();


            featureTimeList = new List <float[]>();

            //NWaves
            FilePath       = _filepath;
            PredictedLabel = "Ready!.";
            //player.Load(GetStreamFromFile(FilePath));
            //player.Play();
            mMedia.Source = MediaSource.CreateFromStorageFile(sf);
            bool test = player.IsPlaying;

            mMedia.AutoPlay = true;
            MusicProperties properties = await sf.Properties.GetMusicPropertiesAsync();

            TimeSpan myTrackDuration = properties.Duration;

            duration = Convert.ToInt32(myTrackDuration.TotalSeconds);
            if (FilePath != null)
            {
                DiscreteSignal signal;

                // load
                var mfcc_no      = 24;
                var samplingRate = 44100;
                var mfccOptions  = new MfccOptions
                {
                    SamplingRate  = samplingRate,
                    FeatureCount  = mfcc_no,
                    FrameDuration = 0.025 /*sec*/,
                    HopDuration   = 0.010 /*sec*/,
                    PreEmphasis   = 0.97,
                    Window        = WindowTypes.Hamming
                };

                var opts = new MultiFeatureOptions
                {
                    SamplingRate  = samplingRate,
                    FrameDuration = 0.025,
                    HopDuration   = 0.010
                };
                var tdExtractor   = new TimeDomainFeaturesExtractor(opts);
                var mfccExtractor = new MfccExtractor(mfccOptions);

                // Read from file.
                featureString = String.Empty;
                featureString = $"green,";
                //MFCC
                var mfccList = new List <List <double> >();
                var tdList   = new List <List <double> >();
                //MFCC
                //TD Features
                //Spectral features
                for (var i = 0; i < mfcc_no; i++)
                {
                    mfccList.Add(new List <double>());
                }
                for (var i = 0; i < 4; i++)
                {
                    tdList.Add(new List <double>());
                }


                string specFeatures = String.Empty;
                Console.WriteLine($"{tag} Reading from file");
                using (var stream = new FileStream(FilePath, FileMode.Open))
                {
                    var waveFile = new WaveFile(stream);
                    signal = waveFile[channel : Channels.Left];
                    ////Compute MFCC
                    float[] mfvfuck = new float[25];
                    var     sig_sam = signal.Samples;
                    mfccVectors = mfccExtractor.ComputeFrom(sig_sam);

                    var fftSize = 1024;
                    tdVectors = tdExtractor.ComputeFrom(signal.Samples);
                    var fft        = new Fft(fftSize);
                    var resolution = (float)samplingRate / fftSize;

                    var frequencies = Enumerable.Range(0, fftSize / 2 + 1)
                                      .Select(f => f * resolution)
                                      .ToArray();

                    var spectrum = new Fft(fftSize).MagnitudeSpectrum(signal).Samples;

                    var centroid  = Spectral.Centroid(spectrum, frequencies);
                    var spread    = Spectral.Spread(spectrum, frequencies);
                    var flatness  = Spectral.Flatness(spectrum, 0);
                    var noiseness = Spectral.Noiseness(spectrum, frequencies, 3000);
                    var rolloff   = Spectral.Rolloff(spectrum, frequencies, 0.85f);
                    var crest     = Spectral.Crest(spectrum);
                    var decrease  = Spectral.Decrease(spectrum);
                    var entropy   = Spectral.Entropy(spectrum);
                    specFeatures = $"{centroid},{spread},{flatness},{noiseness},{rolloff},{crest},{decrease},{entropy}";
                    //}
                    Console.WriteLine($"{tag} All features ready");

                    for (int calibC = 0; calibC < mfccVectors.Count;)
                    {
                        featureString = String.Empty;
                        var tmp = new ModelInput();

                        for (var j = 0; j < (mfccVectors.Count / duration) - 1 && calibC < mfccVectors.Count; j++)
                        {
                            for (var i = 0; i < mfcc_no; i++)
                            {
                                mfccList[i].Add(mfccVectors[calibC][i]);
                            }
                            for (var i = 0; i < 4; i++)
                            {
                                tdList[i].Add(tdVectors[calibC][i]);
                            }
                            calibC += 1;
                        }

                        var mfcc_statistics = new List <double>();
                        for (var i = 0; i < mfcc_no; i++)
                        {
                            //preheader += m + "_mean";
                            //preheader += m + "_min";
                            //preheader += m + "_var";
                            //preheader += m + "_sd";
                            //preheader += m + "_med";
                            //preheader += m + "_lq";
                            //preheader += m + "_uq";
                            //preheader += m + "_skew";
                            //preheader += m + "_kurt";
                            mfcc_statistics.Add(Statistics.Mean(mfccList[i]));
                            mfcc_statistics.Add(Statistics.Minimum(mfccList[i]));
                            mfcc_statistics.Add(Statistics.Variance(mfccList[i]));
                            mfcc_statistics.Add(Statistics.StandardDeviation(mfccList[i]));
                            mfcc_statistics.Add(Statistics.Median(mfccList[i]));
                            mfcc_statistics.Add(Statistics.LowerQuartile(mfccList[i]));
                            mfcc_statistics.Add(Statistics.UpperQuartile(mfccList[i]));
                            mfcc_statistics.Add(Statistics.Skewness(mfccList[i]));
                            mfcc_statistics.Add(Statistics.Kurtosis(mfccList[i]));
                        }
                        var td_statistics = new List <double>();

                        for (var i = 0; i < 4; i++)
                        {
                            td_statistics.Add(Statistics.Mean(tdList[i]));
                            td_statistics.Add(Statistics.Minimum(tdList[i]));
                            td_statistics.Add(Statistics.Variance(tdList[i]));
                            td_statistics.Add(Statistics.StandardDeviation(tdList[i]));
                            td_statistics.Add(Statistics.Median(tdList[i]));
                            td_statistics.Add(Statistics.LowerQuartile(tdList[i]));
                            td_statistics.Add(Statistics.UpperQuartile(tdList[i]));
                            td_statistics.Add(Statistics.Skewness(tdList[i]));
                            td_statistics.Add(Statistics.Kurtosis(tdList[i]));
                        }

                        // Write MFCCs
                        featureString += String.Join(",", mfcc_statistics);
                        featureString += ",";
                        featureString += String.Join(",", td_statistics);
                        //Write Spectral features as well
                        featureString += ",";
                        featureString += specFeatures;
                        Console.WriteLine($"{tag} Feature String ready {featureString}");
                        if (File.Exists(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp")))
                        {
                            File.Delete(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp"));
                            File.WriteAllText(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp"), featureString);
                        }
                        else
                        {
                            File.WriteAllText(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp"), featureString);
                        }

                        MLContext mLContext = new MLContext();

                        string fileName = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp");

                        IDataView dataView = mLContext.Data.LoadFromTextFile <ModelInput>(
                            path: fileName,
                            hasHeader: false,
                            separatorChar: ',',
                            allowQuoting: true,
                            allowSparse: false);

                        // Use first line of dataset as model input
                        // You can replace this with new test data (hardcoded or from end-user application)
                        ModelInput sampleForPrediction = mLContext.Data.CreateEnumerable <ModelInput>(dataView, false)
                                                         .First();
                        ModelOutput opm = ConsumeModel.Predict(sampleForPrediction);
                        featureTimeList.Add(opm.Score);
                        Console.WriteLine($"{tag} Feature vs time list ready");
                    }
                    //Console.WriteLine($"{tag} MFCC: {mfccVectors.Count}");
                    //Console.WriteLine($"{tag} TD: {tdVectors.Count}");
                    //Console.WriteLine($"{tag} featureTimeArray: {featureTimeList.Count} {featureString}");
                }
            }
            playAudio();
        }
示例#14
0
        public void Render(ScriptPortal.Vegas.Vegas myVegas)
        {
            SetProgressBounds(Count);
            using (UndoBlock undo = new UndoBlock("Render tracks"))
            {
                for (int i = 0; i < Count; i++)
                {
                    var ri = this[i];

                    foreach (var trk in myVegas.Project.Tracks)
                    {
                        trk.Mute = !ri.Tracks.Contains(trk);
                    }

                    // padding
                    if (ri.RenderParams.GetParam <bool>(RenderTags.DoPadding))
                    {
                        if (ri.RenderTemplate.RendererID != myVegas.Renderers.FindByName("Wave (Microsoft)").ID)
                        {
                            ErrorLog(
                                String.Format(
                                    "The region {0} could not be padded. Padded rendering can only be performed on .WAV (PCM) files.",
                                    ri.Region.Label));
                        }
                        else
                        {
                            var paddingTime = Timecode.FromSeconds(ri.PaddingSeconds);
                            if (ri.Start - paddingTime < myVegas.Project.Ruler.StartTime)
                            {
                                ErrorLog(String.Format(
                                             "The region {0} could not be padded. Move your region further into the project.", ri.Region.Label));
                            }
                            else
                            {
                                ri.Start  -= paddingTime;
                                ri.Length += paddingTime;
                                ri.Length += paddingTime;
                            }
                        }
                    }

                    if (File.Exists(ri.FilePath) && ri.RenderParams.GetParam <bool>(RenderTags.DoReadonly))
                    {
                        // check readonly
                        var attr = File.GetAttributes(ri.FilePath);
                        if (attr.IsSet(FileAttributes.ReadOnly))
                        {
                            File.SetAttributes(ri.FilePath, attr & ~FileAttributes.ReadOnly);
                        }
                    }
                    SetProgress(i);
                    SetProgressStatus("Rendering " + ri.FilePath);
                    RenderStatus status = myVegas.Render(ri.FilePath, ri.RenderTemplate, ri.Start, ri.Length);
                    if (status != RenderStatus.Complete)
                    {
                        ErrorLog(String.Format("{0} raised error {1}", ri.FilePath, status.ToString()));
                    }
                    else
                    {
                        // file successfully rendered

                        // strip padding
                        if (ri.RenderParams.GetParam <bool>(RenderTags.DoPadding))
                        {
                            WaveFile.StripPadding(ri.FilePath, ri.PaddingSeconds);
                        }
                    }
                }
                foreach (ScriptPortal.Vegas.Track trk in myVegas.Project.Tracks)
                {
                    trk.Mute = false;
                }
                undo.Cancel = true;                 // we didn't really do anything useful.
            }
        }
示例#15
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:NoobAudioLib.FX.Volume"/> class.
 /// </summary>
 /// <param name="waveFile">Wave file.</param>
 public Volume(WaveFile waveFile)
 {
     this.waveFile = waveFile;
 }
示例#16
0
        private IEnumerator ProcessChat()
        {
            if (source.isPlaying)
            {
                yield return(null);
            }

            while (processStatus != "Process")
            {
                yield return(null);
            }

            // When processing the chat, ignore input speech
            if (processStatus == "Process")
            {
                speechStreamer.Active = false;
            }

            processStatus = "Processing";


            if (String.IsNullOrEmpty(inputSpeech))
            {
                yield return(null);
            }

            messageTested = false;
            var inputMessage = new MessageInput()
            {
                Text    = inputSpeech,
                Options = new MessageInputOptions()
                {
                    ReturnContext = true
                }
            };


            Assistant_service.Message(OnMessage, assistantId, sessionId, input: inputMessage);

            while (!messageTested)
            {
                messageTested = false;
                yield return(null);
            }


            //_testString = "I am Bob";
            if (!String.IsNullOrEmpty(_testString))
            {
                byte[]    synthesizeResponse = null;
                AudioClip clip = null;
                tts_service.Synthesize(
                    callback: (DetailedResponse <byte[]> response, IBMError error) =>
                {
                    synthesizeResponse = response.Result;
                    clip = WaveFile.ParseWAV("myClip" + counter.ToString(), synthesizeResponse);
                    PlayClip(clip);
                },
                    text: _testString,
                    //voice: "en-US_AllisonV3Voice",
                    voice: "en-US_MichaelV3Voice",
                    //voice: "en-US_MichaelVoice",
                    accept: "audio/wav"
                    );

                while (synthesizeResponse == null)
                {
                    yield return(null);
                }

                counter++;

                // Set the flag to know that speech processing has finished
                processStatus = "Finished";

                yield return(new WaitForSeconds(clip.length));
            }
        }
示例#17
0
        public IEnumerator CreateService()
        {
            if (string.IsNullOrEmpty(Assistant_apikey))
            {
                throw new IBMException("Please provide Watson Assistant IAM ApiKey for the service.");
            }

            //  Create credential and instantiate service
            //            IamAuthenticator authenticator = new IamAuthenticator(apikey: Assistant_apikey, url: serviceUrl);
            IamAuthenticator authenticator = new IamAuthenticator(apikey: Assistant_apikey);

            //  Wait for tokendata
            while (!authenticator.CanAuthenticate())
            {
                yield return(null);
            }

            Assistant_service = new AssistantService(versionDate, authenticator);
            if (!string.IsNullOrEmpty(serviceUrl))
            {
                Assistant_service.SetServiceUrl(serviceUrl);
            }

            if (string.IsNullOrEmpty(tts_apikey))
            {
                throw new IBMException("Please provide Text-to-speech IAM ApiKey for the service.");
            }


            //  Create credential and instantiate service
            tts_authenticator = new IamAuthenticator(apikey: tts_apikey);

            //  Wait for tokendata
            while (!tts_authenticator.CanAuthenticate())
            {
                yield return(null);
            }

            tts_service = new TextToSpeechService(tts_authenticator);
            if (!string.IsNullOrEmpty(tts_serviceUrl))
            {
                tts_service.SetServiceUrl(tts_serviceUrl);
            }

            // Set speech processing status to "Processing"
            processStatus = "Processing";

            // Create services
            Runnable.Run(speechStreamer.CreateService());

            // Ignore input speech while the bot is speaking.
            speechStreamer.Active = false;


            Assistant_service.CreateSession(OnCreateSession, assistantId);

            while (!createSessionTested)
            {
                yield return(null);
            }

            messageTested = false;
            var inputMessage = new MessageInput()
            {
                Text    = inputSpeech,
                Options = new MessageInputOptions()
                {
                    ReturnContext = true
                }
            };

            Assistant_service.Message(OnMessage, assistantId, sessionId);
            while (!messageTested)
            {
                messageTested = false;
                yield return(null);
            }


            //_testString = "I am Bob";
            //            if (!String.IsNullOrEmpty(_testString))
            //            {
            byte[]    synthesizeResponse = null;
            AudioClip clip = null;

            tts_service.Synthesize(
                callback: (DetailedResponse <byte[]> response, IBMError error) =>
            {
                synthesizeResponse = response.Result;
                clip = WaveFile.ParseWAV("myClip" + counter.ToString(), synthesizeResponse);
                PlayClip(clip);
            },
                text: _testString,
                //voice: "en-US_AllisonV3Voice",
                voice: "en-US_MichaelV3Voice",
                //voice: "en-US_MichaelVoice",
                accept: "audio/wav"
                );

            while (synthesizeResponse == null)
            {
                yield return(null);
            }

            counter++;

            processStatus = "Finished";

            yield return(new WaitForSeconds(clip.length));

            //            }
        }
示例#18
0
            private void doExport()
            {
                if (!string.IsNullOrEmpty(FileName))
                {
                    string wavFileName = FileName;

                    if (System.IO.Path.GetExtension(FileName).ToLowerInvariant() == ".ogg")
                    {
                        wavFileName =
                            System.IO.Path.Combine(
                                System.IO.Path.GetDirectoryName(FileName),
                                string.Format("{0}.wav", System.IO.Path.GetFileNameWithoutExtension(FileName))
                            );
                    }

                    WaveFile wav = new WaveFile(2, 16, 44100, wavFileName);

                    Player p = new Player();
                    p.Stereo = ApplicationState.Instance.Stereo;
                    p.NextLine += new Player.NextLineEventHandler(updateNextLine);
                    p.SongFinished += new EventHandler(songFinished);
                    p.CurrentSong = ApplicationState.Instance.CurrentSong;
                    p.DumpToWavFile(wav, NumRepeats);

                    wav.Close();

                    if(wavFileName != FileName)
                    {
                        convertToOgg(wavFileName);
                    }

                    this.Parent.NotifyCompleted();
                }
            }
 //--Methods
 public static float[] GetSampleDataInterleaved(WaveFile wave, int expectedChannels)
 {
     return GetSampleDataInterleaved(wave.Data.RawSampleData, wave.Format.BitsPerSample, wave.Format.ChannelCount, expectedChannels);
 }
示例#20
0
文件: DTMF.cs 项目: rmc00/gsf
 /// <summary>
 /// Generates the specified dual-tone multi-frequency <paramref name="repeatCount"/> times storing it in the specified <see cref="WaveFile"/>.
 /// </summary>
 /// <param name="destination"><see cref="WaveFile"/> used to store generated dual-tone multi-frequencies.</param>
 /// <param name="tone">Dual-tone multi-frequency to generate.</param>
 /// <param name="volume">Volume of generated dual-tones as a percentage (0 to 1).</param>
 /// <param name="repeatCount">Number of times to repeat the tone.</param>
 public static void Generate(WaveFile destination, DTMF tone, double volume, int repeatCount)
 {
     Generate(destination, new DTMF[] { tone }, volume, repeatCount);
 }
示例#21
0
        public void extractFeatures()
        {
            //NWaves
            //Initial setup
            if (_filePath != null)
            {
                DiscreteSignal signal;

                // load
                var mfcc_no      = 24;
                var samplingRate = 44100;
                var mfccOptions  = new MfccOptions
                {
                    SamplingRate  = samplingRate,
                    FeatureCount  = mfcc_no,
                    FrameDuration = 0.025 /*sec*/,
                    HopDuration   = 0.010 /*sec*/,
                    PreEmphasis   = 0.97,
                    Window        = WindowTypes.Hamming
                };

                var opts = new MultiFeatureOptions
                {
                    SamplingRate  = samplingRate,
                    FrameDuration = 0.025,
                    HopDuration   = 0.010
                };
                var tdExtractor   = new TimeDomainFeaturesExtractor(opts);
                var mfccExtractor = new MfccExtractor(mfccOptions);

                // Read from file.
                featureString = String.Empty;
                featureString = $"green,";
                //MFCC
                var avg_vec_mfcc = new List <float>(mfcc_no + 1);
                //TD Features
                var avg_vec_td = new List <float>(4);
                //Spectral features
                var avg_vec_spect = new List <float>(10);

                for (var i = 0; i < mfcc_no; i++)
                {
                    avg_vec_mfcc.Add(0f);
                }
                for (var i = 0; i < 4; i++)
                {
                    avg_vec_td.Add(0f);
                }

                for (var i = 0; i < 10; i++)
                {
                    avg_vec_spect.Add(0f);
                }

                string specFeatures = String.Empty;
                Console.WriteLine($"{tag} Reading from file");
                using (var stream = new FileStream(_filePath, FileMode.Open))
                {
                    var waveFile = new WaveFile(stream);
                    signal = waveFile[channel : Channels.Left];
                    ////Compute MFCC
                    float[] mfvfuck = new float[25];
                    var     sig_sam = signal.Samples;
                    mfccVectors = mfccExtractor.ComputeFrom(sig_sam);

                    var fftSize = 1024;
                    tdVectors = tdExtractor.ComputeFrom(signal.Samples);
                    var fft        = new Fft(fftSize);
                    var resolution = (float)samplingRate / fftSize;

                    var frequencies = Enumerable.Range(0, fftSize / 2 + 1)
                                      .Select(f => f * resolution)
                                      .ToArray();

                    var spectrum = new Fft(fftSize).MagnitudeSpectrum(signal).Samples;

                    var centroid  = Spectral.Centroid(spectrum, frequencies);
                    var spread    = Spectral.Spread(spectrum, frequencies);
                    var flatness  = Spectral.Flatness(spectrum, 0);
                    var noiseness = Spectral.Noiseness(spectrum, frequencies, 3000);
                    var rolloff   = Spectral.Rolloff(spectrum, frequencies, 0.85f);
                    var crest     = Spectral.Crest(spectrum);
                    var decrease  = Spectral.Decrease(spectrum);
                    var entropy   = Spectral.Entropy(spectrum);
                    specFeatures = $"{centroid},{spread},{flatness},{noiseness},{rolloff},{crest},{decrease},{entropy}";
                    //}
                    Console.WriteLine($"{tag} All features ready");
                    for (int calibC = 0; calibC < mfccVectors.Count; calibC += (mfccVectors.Count / duration) - 1)
                    {
                        featureString = String.Empty;
                        var tmp = new ModelInput();
                        for (var i = 0; i < mfcc_no; i++)
                        {
                            avg_vec_mfcc[i] = mfccVectors[calibC][i];
                        }
                        for (var i = 0; i < 4; i++)
                        {
                            avg_vec_td[i] = tdVectors[calibC][i];
                        }
                        featureString += String.Join(",", avg_vec_mfcc);
                        featureString += ",";
                        featureString += String.Join(",", avg_vec_td);
                        featureString += ",";
                        featureString += specFeatures;
                        Console.WriteLine($"{tag} Feature String ready {featureString}");
                        if (File.Exists(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp")))
                        {
                            File.Delete(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp"));
                            File.WriteAllText(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp"), featureString);
                        }
                        else
                        {
                            File.WriteAllText(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp"), featureString);
                        }

                        MLContext mLContext = new MLContext();

                        string fileName = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "temp");

                        IDataView dataView = mLContext.Data.LoadFromTextFile <ModelInput>(
                            path: fileName,
                            hasHeader: false,
                            separatorChar: ',',
                            allowQuoting: true,
                            allowSparse: false);

                        // Use first line of dataset as model input
                        // You can replace this with new test data (hardcoded or from end-user application)
                        ModelInput sampleForPrediction = mLContext.Data.CreateEnumerable <ModelInput>(dataView, false)
                                                         .First();
                        ModelOutput opm = ConsumeModel.Predict(sampleForPrediction);
                        featureTimeList.Add(opm.Score);
                        Console.WriteLine($"{tag} Feature vs time list ready");
                    }
                    //Console.WriteLine($"{tag} MFCC: {mfccVectors.Count}");
                    //Console.WriteLine($"{tag} TD: {tdVectors.Count}");
                    //Console.WriteLine($"{tag} featureTimeArray: {featureTimeList.Count} {featureString}");
                }
            }
        }
示例#22
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:NoobAudioLib.FX.PitchShift"/> class.
 /// </summary>
 /// <param name="SampleRate">Sample rate.</param>
 /// <param name="waveFile">Wave file.</param>
 public PitchShift(int SampleRate, WaveFile waveFile, int pitchModifier)
 {
     this.waveFile   = waveFile;
     this.SampleRate = SampleRate;
 }
示例#23
0
        private void openToolStripMenuItem_Click(object sender, EventArgs e)
        {
            var ofd = new OpenFileDialog();

            if (ofd.ShowDialog() != DialogResult.OK)
            {
                return;
            }

            using (var stream = new FileStream(ofd.FileName, FileMode.Open))
            {
                var waveFile = new WaveFile(stream);
                _signal = waveFile[Channels.Left];
            }

            _stft = new Stft(_frameSize, _hopSize);

            var frameDuration = (double)_frameSize / _signal.SamplingRate;
            var hopDuration   = (double)_hopSize / _signal.SamplingRate;

            var freqs = new[] { 300f, 600, 1000, 2000, 4000, 7000 };

            var pitchOptions = new PitchOptions
            {
                SamplingRate  = _signal.SamplingRate,
                FrameDuration = frameDuration,
                HopDuration   = hopDuration,
                HighFrequency = 900/*Hz*/
            };

            var pitchExtractor = new PitchExtractor(pitchOptions);
            var pitchTrack     = pitchExtractor.ParallelComputeFrom(_signal)
                                 .Select(p => p[0])
                                 .ToArray();

            var options = new MultiFeatureOptions
            {
                SamplingRate  = _signal.SamplingRate,
                FrameDuration = frameDuration,
                HopDuration   = hopDuration
            };

            var tdExtractor = new TimeDomainFeaturesExtractor(options);

            tdExtractor.AddFeature("pitch_zcr", (signal, start, end) => Pitch.FromZeroCrossingsSchmitt(signal, start, end));

            var mpeg7Extractor = new Mpeg7SpectralFeaturesExtractor(options);

            mpeg7Extractor.IncludeHarmonicFeatures("all");
            mpeg7Extractor.SetPitchTrack(pitchTrack);

            options.FeatureList = "sc+sn";
            options.Frequencies = freqs;
            var spectralExtractor = new SpectralFeaturesExtractor(options);
            //spectralExtractor.AddFeature("pitch_hss", (spectrum, fs) => Pitch.FromHss(spectrum, _signal.SamplingRate));

            var tdVectors       = tdExtractor.ParallelComputeFrom(_signal);
            var spectralVectors = spectralExtractor.ParallelComputeFrom(_signal);
            var mpeg7Vectors    = mpeg7Extractor.ComputeFrom(_signal);

            _vectors = FeaturePostProcessing.Join(tdVectors, spectralVectors, mpeg7Vectors);

            //FeaturePostProcessing.NormalizeMean(_vectors);
            //FeaturePostProcessing.AddDeltas(_vectors);

            var descriptions = tdExtractor.FeatureDescriptions
                               .Concat(spectralExtractor.FeatureDescriptions)
                               .Concat(mpeg7Extractor.FeatureDescriptions)
                               .ToList();

            FillFeaturesList(_vectors, descriptions, tdExtractor.TimeMarkers(_vectors.Length));

            spectrogramPlot.ColorMapName      = "afmhot";
            spectrogramPlot.MarklineThickness = 2;
            spectrogramPlot.Spectrogram       = _stft.Spectrogram(_signal);
        }
示例#24
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:NoobAudioLib.FX.Reverb"/> class.
 /// </summary>
 /// <param name="waveFile">Wave file.</param>
 /// <param name="DelayMS">Delay in miliseconds.</param>
 /// <param name="Decay">Decay.</param>
 public Reverb(WaveFile waveFile, int DelayMS, float Decay)
 {
     this.DelayMS  = DelayMS;
     this.Decay    = Decay;
     this.waveFile = waveFile;
 }
示例#25
0
 public Reverse(WaveFile waveFile)
 {
     this.waveFile = waveFile;
 }
示例#26
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:NoobAudioLib.FX.Trimmer"/> class.
 /// </summary>
 /// <param name="SampleRate">Sample rate.</param>
 /// <param name="waveFile">Wave file.</param>
 public Trimmer(int SampleRate, WaveFile waveFile)
 {
     this.waveFile   = waveFile;
     this.SampleRate = SampleRate;
 }
示例#27
0
 private void OnDragDrop(object sender, DragEventArgs e)
 {
     string[] files = (string[])e.Data.GetData(DataFormats.FileDrop);
     WaveFile f = new WaveFile();
     foreach (string file in files)
     {
         f = WaveManagerBusiness.WaveManager.OpenFile(file);
     }
 }
示例#28
0
 /// <summary>
 /// Initializes a new instance of the <see cref="T:NoobAudioLib.FX.SpeedShift"/> class.
 /// </summary>
 /// <param name="waveFile">Wave file.</param>
 public SpeedShift(WaveFile waveFile)
 {
     this.waveFile = waveFile;
 }
示例#29
0
 private void UpdateToolbarOptionsForCurrentWindow(WaveFile file)
 {
     UpdateToolbarOptionsForCurrentWindow();
 }
示例#30
0
 public void WriteBuffer(byte[] buff, WaveFormatEx wfex, string destFile)
 {
     WaveFile.WriteWaveData(buff, wfex, destFile);
 }
示例#31
0
 /// <summary>
 /// Generates the specified dual-tone multi-frequency storing it in the specified <see cref="WaveFile"/>.
 /// </summary>
 /// <param name="destination"><see cref="WaveFile"/> used to store generated dual-tone multi-frequencies.</param>
 /// <param name="tone">Dual-tone multi-frequency to generate.</param>
 /// <param name="volume">Volume of generated dual-tones as a percentage (0 to 1).</param>
 public static void Generate(WaveFile destination, DTMF tone, double volume)
 {
     Generate(destination, new[] { tone }, volume, 1);
 }
示例#32
0
		/// <summary>
		/// Create an instance of WaveIn.
		/// </summary>
		public WaveIn()
		{
			m_msgWindow = new SoundMessageWindow(this);
			m_file = new WaveFile();
		}
示例#33
0
 /// <summary>
 /// Generates the specified dual-tone multi-frequency <paramref name="repeatCount"/> times storing it in the specified <see cref="WaveFile"/>.
 /// </summary>
 /// <param name="destination"><see cref="WaveFile"/> used to store generated dual-tone multi-frequencies.</param>
 /// <param name="tone">Dual-tone multi-frequency to generate.</param>
 /// <param name="volume">Volume of generated dual-tones as a percentage (0 to 1).</param>
 /// <param name="repeatCount">Number of times to repeat the tone.</param>
 public static void Generate(WaveFile destination, DTMF tone, double volume, int repeatCount)
 {
     Generate(destination, new[] { tone }, volume, repeatCount);
 }
示例#34
0
 public ParticleVacuumEffect(ParticleSystem particleSystem, Transform vacuumTarget, WaveFile audioBuffer, float deletionDistance)
 {
     _particleSystem  = particleSystem;
     _vacuumTarget    = vacuumTarget;
     _waveFile        = audioBuffer;
     DeletionDistance = deletionDistance;
     _particleSystem.ModifyAllParticles((ref ParticleState ps) =>
     {
         Vector3 direction = Vector3.Normalize(ps.Offset - _vacuumTarget.Position);
         ps.Velocity      += (ps.Offset - _particleSystem.Transform.Position) * InitialExpulsionVelocity;
     });
 }
示例#35
0
 /// <summary>
 /// Generates a single instance of each of the specified dual-tone multi-frequencies storing them in the specified <see cref="WaveFile"/>.
 /// </summary>
 /// <param name="destination"><see cref="WaveFile"/> used to store generated dual-tone multi-frequencies.</param>
 /// <param name="tones">Dual-tone multi-frequencies to generate.</param>
 /// <param name="volume">Volume of generated dual-tones as a percentage (0 to 1).</param>
 public static void Generate(WaveFile destination, DTMF[] tones, double volume)
 {
     Generate(destination, tones, volume, 1);
 }
示例#36
0
        public void DoTranscoding(EncoderSettings encoderSettings, string inputFile)
        {
            _grabber = CdRipper.CreateGrabber(OutputFormat);
            if (_grabber == null)
            {
                throw new NotSupportedException(string.Format("TXT_UNSUPPORTED_OUTPUT_FORMAT: {0}", InputFormat));
            }

            switch (InputFormat)
            {
            case AudioMediaFormatType.WAV:
                switch (OutputFormat)
                {
                case AudioMediaFormatType.MP3:
                {
                    // Transcode WAV => MP3 i.o.w encode the wav
                    WaveFormatEx wfex    = WaveFormatEx.Cdda;
                    byte[]       buff    = WaveFile.ReadWaveData(inputFile, ref wfex);
                    GrabberToMP3 grabber = (_grabber as GrabberToMP3);
                    grabber.Options = (encoderSettings as Mp3EncoderSettings).Options;


                    // Resample is not supported at this time.
                    // Specify the same settings as the input WAV file, otherwise we'll be failing.
                    grabber.Options.WaveFormat = wfex;

                    grabber.EncodeBuffer(buff,
                                         Path.ChangeExtension(inputFile, "MP3"),
                                         false, null);

                    return;
                }
                }
                break;

            case AudioMediaFormatType.MP3:
                switch (OutputFormat)
                {
                case AudioMediaFormatType.WAV:
                    // Transcode MP3 => WAV i.o.w decode the MP3
                    string outputFile = Path.ChangeExtension(inputFile, "WAV");
                    if (DecodeMP3ToWAV(inputFile, outputFile) == false)
                    {
                        throw new Exception("TXT_FAILED_CONVERSION_MP3_WAV");
                    }

                    return;

                case AudioMediaFormatType.MP3:
                {
                    // Transcode MP3 => MP3 i.o.w adjust MP3 encoding
                    string tempWavFile = Path.GetTempFileName();
                    if (DecodeMP3ToWAV(inputFile, tempWavFile) == false)
                    {
                        throw new Exception("TXT_FAILED_CONVERSION_MP3_TEMP_WAV");
                    }

                    WaveFormatEx wfex = WaveFormatEx.Cdda;
                    byte[]       buff = WaveFile.ReadWaveData(tempWavFile, ref wfex);

                    GrabberToMP3 grabber = (_grabber as GrabberToMP3);
                    grabber.Options = (encoderSettings as Mp3EncoderSettings).Options;

                    ID3FileInfoSlim ifiSlim =
                        new ID3FileInfoSlim(MediaFileInfo.FromPath(inputFile, false));

                    grabber.EncodeBuffer(buff,
                                         Path.ChangeExtension(inputFile, "REENC.MP3"),
                                         (encoderSettings as Mp3EncoderSettings).CopyInputFileMetadata,
                                         ifiSlim);

                    if (File.Exists(tempWavFile))
                    {
                        File.Delete(tempWavFile);
                    }

                    return;
                }
                }
                break;
            }

            throw new NotSupportedException(string.Format("TXT_UNSUPPORTED_TRANSCODING: {0}", this));
        }
示例#37
0
        public override IEnumerator RunTest()
        {
            LogSystem.InstallDefaultReactors();

            try
            {
                VcapCredentials vcapCredentials = new VcapCredentials();
                fsData          data            = null;

                //  Get credentials from a credential file defined in environmental variables in the VCAP_SERVICES format.
                //  See https://www.ibm.com/watson/developercloud/doc/common/getting-started-variables.html.
                var environmentalVariable = Environment.GetEnvironmentVariable("VCAP_SERVICES");
                var fileContent           = File.ReadAllText(environmentalVariable);

                //  Add in a parent object because Unity does not like to deserialize root level collection types.
                fileContent = Utility.AddTopLevelObjectToJson(fileContent, "VCAP_SERVICES");

                //  Convert json to fsResult
                fsResult r = fsJsonParser.Parse(fileContent, out data);
                if (!r.Succeeded)
                {
                    throw new WatsonException(r.FormattedMessages);
                }

                //  Convert fsResult to VcapCredentials
                object obj = vcapCredentials;
                r = _serializer.TryDeserialize(data, obj.GetType(), ref obj);
                if (!r.Succeeded)
                {
                    throw new WatsonException(r.FormattedMessages);
                }

                //  Set credentials from imported credntials
                Credential credential = vcapCredentials.VCAP_SERVICES["speech_to_text"][TestCredentialIndex].Credentials;
                _username = credential.Username.ToString();
                _password = credential.Password.ToString();
                _url      = credential.Url.ToString();
            }
            catch
            {
                Log.Debug("TestSpeechToText", "Failed to get credentials from VCAP_SERVICES file. Please configure credentials to run this test. For more information, see: https://github.com/watson-developer-cloud/unity-sdk/#authentication");
            }

            //  Create credential and instantiate service
            Credentials credentials = new Credentials(_username, _password, _url);

            //  Or authenticate using token
            //Credentials credentials = new Credentials(_url)
            //{
            //    AuthenticationToken = _token
            //};

            _speechToText         = new SpeechToText(credentials);
            _customCorpusFilePath = Application.dataPath + "/Watson/Examples/ServiceExamples/TestData/test-stt-corpus.txt";
            _customWordsFilePath  = Application.dataPath + "/Watson/Examples/ServiceExamples/TestData/test-stt-words.json";
            _wavFilePath          = Application.dataPath + "/Watson/Examples/ServiceExamples/TestData/test-audio.wav";
            _audioClip            = WaveFile.ParseWAV("testClip", File.ReadAllBytes(_wavFilePath));

            Log.Debug("ExampleSpeechToText", "Attempting to recognize");
            _speechToText.Recognize(_audioClip, HandleOnRecognize);
            while (!_recognizeTested)
            {
                yield return(null);
            }

            //  Get models
            Log.Debug("ExampleSpeechToText", "Attempting to get models");
            _speechToText.GetModels(HandleGetModels);
            while (!_getModelsTested)
            {
                yield return(null);
            }

            //  Get model
            Log.Debug("ExampleSpeechToText", "Attempting to get model {0}", _modelNameToGet);
            _speechToText.GetModel(HandleGetModel, _modelNameToGet);
            while (!_getModelTested)
            {
                yield return(null);
            }

            //  Get customizations
            Log.Debug("ExampleSpeechToText", "Attempting to get customizations");
            _speechToText.GetCustomizations(HandleGetCustomizations);
            while (!_getCustomizationsTested)
            {
                yield return(null);
            }

            //  Create customization
            Log.Debug("ExampleSpeechToText", "Attempting create customization");
            _speechToText.CreateCustomization(HandleCreateCustomization, "unity-test-customization", "en-US_BroadbandModel", "Testing customization unity");
            while (!_createCustomizationsTested)
            {
                yield return(null);
            }

            //  Get customization
            Log.Debug("ExampleSpeechToText", "Attempting to get customization {0}", _createdCustomizationID);
            _speechToText.GetCustomization(HandleGetCustomization, _createdCustomizationID);
            while (!_getCustomizationTested)
            {
                yield return(null);
            }

            //  Get custom corpora
            Log.Debug("ExampleSpeechToText", "Attempting to get custom corpora for {0}", _createdCustomizationID);
            _speechToText.GetCustomCorpora(HandleGetCustomCorpora, _createdCustomizationID);
            while (!_getCustomCorporaTested)
            {
                yield return(null);
            }

            //  Add custom corpus
            Log.Debug("ExampleSpeechToText", "Attempting to add custom corpus {1} in customization {0}", _createdCustomizationID, _createdCorpusName);
            _speechToText.AddCustomCorpus(HandleAddCustomCorpus, _createdCustomizationID, _createdCorpusName, true, _customCorpusFilePath);
            while (!_addCustomCorpusTested)
            {
                yield return(null);
            }

            //  Get custom corpus
            Log.Debug("ExampleSpeechToText", "Attempting to get custom corpus {1} in customization {0}", _createdCustomizationID, _createdCorpusName);
            _speechToText.GetCustomCorpus(HandleGetCustomCorpus, _createdCustomizationID, _createdCorpusName);
            while (!_getCustomCorpusTested)
            {
                yield return(null);
            }

            //  Wait for customization
            Runnable.Run(CheckCustomizationStatus(_createdCustomizationID));
            while (!_isCustomizationReady)
            {
                yield return(null);
            }

            //  Get custom words
            Log.Debug("ExampleSpeechToText", "Attempting to get custom words.");
            _speechToText.GetCustomWords(HandleGetCustomWords, _createdCustomizationID);
            while (!_getCustomWordsTested)
            {
                yield return(null);
            }

            //  Add custom words from path
            Log.Debug("ExampleSpeechToText", "Attempting to add custom words in customization {0} using Words json path {1}", _createdCustomizationID, _customWordsFilePath);
            string customWords = File.ReadAllText(_customWordsFilePath);

            _speechToText.AddCustomWords(HandleAddCustomWordsFromPath, _createdCustomizationID, customWords);
            while (!_addCustomWordsFromPathTested)
            {
                yield return(null);
            }

            //  Wait for customization
            _isCustomizationReady = false;
            Runnable.Run(CheckCustomizationStatus(_createdCustomizationID));
            while (!_isCustomizationReady)
            {
                yield return(null);
            }

            //  Add custom words from object
            Words       words    = new Words();
            Word        w0       = new Word();
            List <Word> wordList = new List <Word>();

            w0.word           = "mikey";
            w0.sounds_like    = new string[1];
            w0.sounds_like[0] = "my key";
            w0.display_as     = "Mikey";
            wordList.Add(w0);
            Word w1 = new Word();

            w1.word           = "charlie";
            w1.sounds_like    = new string[1];
            w1.sounds_like[0] = "char lee";
            w1.display_as     = "Charlie";
            wordList.Add(w1);
            Word w2 = new Word();

            w2.word           = "bijou";
            w2.sounds_like    = new string[1];
            w2.sounds_like[0] = "be joo";
            w2.display_as     = "Bijou";
            wordList.Add(w2);
            words.words = wordList.ToArray();

            Log.Debug("ExampleSpeechToText", "Attempting to add custom words in customization {0} using Words object", _createdCustomizationID);
            _speechToText.AddCustomWords(HandleAddCustomWordsFromObject, _createdCustomizationID, words);
            while (!_addCustomWordsFromObjectTested)
            {
                yield return(null);
            }

            //  Wait for customization
            _isCustomizationReady = false;
            Runnable.Run(CheckCustomizationStatus(_createdCustomizationID));
            while (!_isCustomizationReady)
            {
                yield return(null);
            }

            //  Get custom word
            Log.Debug("ExampleSpeechToText", "Attempting to get custom word {1} in customization {0}", _createdCustomizationID, words.words[0].word);
            _speechToText.GetCustomWord(HandleGetCustomWord, _createdCustomizationID, words.words[0].word);
            while (!_getCustomWordTested)
            {
                yield return(null);
            }

            //  Train customization
            Log.Debug("ExampleSpeechToText", "Attempting to train customization {0}", _createdCustomizationID);
            _speechToText.TrainCustomization(HandleTrainCustomization, _createdCustomizationID);
            while (!_trainCustomizationTested)
            {
                yield return(null);
            }

            //  Wait for customization
            _isCustomizationReady = false;
            Runnable.Run(CheckCustomizationStatus(_createdCustomizationID));
            while (!_isCustomizationReady)
            {
                yield return(null);
            }

            //  Upgrade customization - not currently implemented in service
            //Log.Debug("ExampleSpeechToText", "Attempting to upgrade customization {0}", _createdCustomizationID);
            //_speechToText.UpgradeCustomization(HandleUpgradeCustomization, _createdCustomizationID);
            //while (!_upgradeCustomizationTested)
            //    yield return null;

            //  Delete custom word
            Log.Debug("ExampleSpeechToText", "Attempting to delete custom word {1} in customization {0}", _createdCustomizationID, words.words[2].word);
            _speechToText.DeleteCustomWord(HandleDeleteCustomWord, _createdCustomizationID, words.words[2].word);
            while (!_deleteCustomWordTested)
            {
                yield return(null);
            }

            //  Delay
            Log.Debug("ExampleDiscovery", string.Format("Delaying delete environment for {0} sec", _delayTimeInSeconds));
            Runnable.Run(Delay(_delayTimeInSeconds));
            while (!_readyToContinue)
            {
                yield return(null);
            }

            _readyToContinue = false;
            //  Delete custom corpus
            Log.Debug("ExampleSpeechToText", "Attempting to delete custom corpus {1} in customization {0}", _createdCustomizationID, _createdCorpusName);
            _speechToText.DeleteCustomCorpus(HandleDeleteCustomCorpus, _createdCustomizationID, _createdCorpusName);
            while (!_deleteCustomCorpusTested)
            {
                yield return(null);
            }

            //  Delay
            Log.Debug("ExampleDiscovery", string.Format("Delaying delete environment for {0} sec", _delayTimeInSeconds));
            Runnable.Run(Delay(_delayTimeInSeconds));
            while (!_readyToContinue)
            {
                yield return(null);
            }

            _readyToContinue = false;
            //  Reset customization
            Log.Debug("ExampleSpeechToText", "Attempting to reset customization {0}", _createdCustomizationID);
            _speechToText.ResetCustomization(HandleResetCustomization, _createdCustomizationID);
            while (!_resetCustomizationTested)
            {
                yield return(null);
            }

            //  Delay
            Log.Debug("ExampleDiscovery", string.Format("Delaying delete environment for {0} sec", _delayTimeInSeconds));
            Runnable.Run(Delay(_delayTimeInSeconds));
            while (!_readyToContinue)
            {
                yield return(null);
            }

            _readyToContinue = false;
            //  Delete customization
            Log.Debug("ExampleSpeechToText", "Attempting to delete customization {0}", _createdCustomizationID);
            _speechToText.DeleteCustomization(HandleDeleteCustomization, _createdCustomizationID);
            while (!_deleteCustomizationsTested)
            {
                yield return(null);
            }

            Log.Debug("ExampleSpeechToText", "Speech to Text examples complete.");

            yield break;
        }
示例#38
0
 /// <summary>
 /// Create an instance of WaveOut.
 /// </summary>
 public WaveOut()
 {
     m_msgWindow = new SoundMessageWindow(this);
     m_file      = new WaveFile();
 }
示例#39
0
        private void openToolStripMenuItem_Click(object sender, EventArgs e)
        {
            var ofd = new OpenFileDialog();

            if (ofd.ShowDialog() != DialogResult.OK)
            {
                return;
            }

            _waveFileName = ofd.FileName;

            using (var stream = new FileStream(_waveFileName, FileMode.Open))
            {
                var waveFile = new WaveFile(stream);
                _bitDepth = waveFile.WaveFmt.BitsPerSample;
                _signal   = waveFile[Channels.Average];
            }

            _stft = new Stft(512, 64, _windowType);

            _spectrogram = _stft.Spectrogram(_signal);

            //var processed = _stft.Inverse(_stft.Direct(_signal));
            //_processedSignal = new DiscreteSignal(_signal.SamplingRate, processed);


            // 1) check also this:
            var mp        = _stft.MagnitudePhaseSpectrogram(_signal);
            var processed = _stft.ReconstructMagnitudePhase(mp);

            _processedSignal = new DiscreteSignal(_signal.SamplingRate, processed);

            // 2) or check this:
            //var processed = new GriffinLimReconstructor(_spectrogram, _stft).Reconstruct();
            //_processedSignal = new DiscreteSignal(_signal.SamplingRate, processed);

            signalPanel.Gain            = 120;
            signalPanel.Signal          = _signal;
            processedSignalPanel.Gain   = 120;
            processedSignalPanel.Signal = _processedSignal;

            spectrogramPanel.Spectrogram = _spectrogram;



            //// StftC - has complex FFT

            //// RealFFT-based Stft is 30% faster!

            //var sr = new Stft(2048, 256);
            //var sc = new StftC(2048, 256);

            //var sw = new Stopwatch();

            //sw.Start();

            //for (var i = 0; i < 10; i++)
            //{
            //    var processed1 = sr.Inverse(sr.Direct(_signal));
            //    _processedSignal = new DiscreteSignal(_signal.SamplingRate, processed1);
            //}

            //sw.Stop();

            //var t1 = sw.Elapsed;


            //sw.Reset();
            //sw.Start();

            //for (var i = 0; i < 10; i++)
            //{
            //    var processed1 = sc.Inverse(sc.Direct(_signal));
            //    _processedSignal = new DiscreteSignal(_signal.SamplingRate, processed1);
            //}

            //sw.Stop();

            //var t2 = sw.Elapsed;

            //MessageBox.Show(t1 + " " + t2);
        }
示例#40
0
文件: DTMF.cs 项目: rmc00/gsf
 /// <summary>
 /// Generates the specified dual-tone multi-frequency storing it in the specified <see cref="WaveFile"/>.
 /// </summary>
 /// <param name="destination"><see cref="WaveFile"/> used to store generated dual-tone multi-frequencies.</param>
 /// <param name="tone">Dual-tone multi-frequency to generate.</param>
 /// <param name="volume">Volume of generated dual-tones as a percentage (0 to 1).</param>
 public static void Generate(WaveFile destination, DTMF tone, double volume)
 {
     Generate(destination, new DTMF[] { tone }, volume, 1);
 }
示例#41
0
文件: Program.cs 项目: zwinlu/gsf
        static int Main(string[] args)
        {
            // System settings
            ConfigurationFile configFile = ConfigurationFile.Current;
            CategorizedSettingsElementCollection systemSettings = configFile.Settings["systemSettings"];

            systemSettings.Add("NodeID", Guid.NewGuid().ToString(), "Unique Node ID");
            Guid   nodeID            = systemSettings["NodeID"].ValueAs <Guid>();
            bool   useMemoryCache    = systemSettings["UseMemoryCache"].ValueAsBoolean(false);
            string connectionString  = systemSettings["ConnectionString"].Value;
            string nodeIDQueryString = null;
            string parameterizedQuery;
            int    protocolID, signalTypePMID, signalTypePAID;

            // Define guid with query string delimiters according to database needs
            Dictionary <string, string> settings = connectionString.ParseKeyValuePairs();
            string setting;

            if (settings.TryGetValue("Provider", out setting))
            {
                // Check if provider is for Access since it uses braces as Guid delimiters
                if (setting.StartsWith("Microsoft.Jet.OLEDB", StringComparison.OrdinalIgnoreCase))
                {
                    nodeIDQueryString = "{" + nodeID + "}";
                }
            }

            if (string.IsNullOrWhiteSpace(nodeIDQueryString))
            {
                nodeIDQueryString = "'" + nodeID + "'";
            }

            using (AdoDataConnection database = new AdoDataConnection("systemSettings"))
            {
                IDbConnection connection = database.Connection;

                if (Convert.ToInt32(connection.ExecuteScalar("SELECT COUNT(*) FROM Protocol WHERE Acronym='WAV'")) == 0)
                {
                    if (database.IsSQLServer || database.IsJetEngine)
                    {
                        connection.ExecuteNonQuery("INSERT INTO Protocol(Acronym, Name, [Type], Category, AssemblyName, TypeName) VALUES('WAV', 'Wave Form Input Adapter', 'Frame', 'Audio', 'WavInputAdapter.dll', 'WavInputAdapter.WavInputAdapter')");
                    }
                    else
                    {
                        connection.ExecuteNonQuery("INSERT INTO Protocol(Acronym, Name, Type, Category, AssemblyName, TypeName) VALUES('WAV', 'Wave Form Input Adapter', 'Frame', 'Audio', 'WavInputAdapter.dll', 'WavInputAdapter.WavInputAdapter')");
                    }
                }

                protocolID = Convert.ToInt32(connection.ExecuteScalar("SELECT ID FROM Protocol WHERE Acronym='WAV'"));

                // Typically these values should be defined as analogs, however, we use a voltage magnitude signal type
                // since these types of values can be better graphed with auto-scaling in the visualization tools
                signalTypePMID = Convert.ToInt32(connection.ExecuteScalar("SELECT ID FROM SignalType WHERE Acronym='VPHM'"));
                signalTypePAID = Convert.ToInt32(connection.ExecuteScalar("SELECT ID FROM SignalType WHERE Acronym='VPHA'"));

                string pathRoot   = FilePath.GetDirectoryName((args.Length > 0) ? args[0] : systemSettings["MusicDirectory"].Value);
                string sourcePath = Path.Combine(pathRoot, "*" + Path.DirectorySeparatorChar + "*.wav");

                foreach (string sourceFileName in FilePath.GetFileList(sourcePath))
                {
                    WaveFile sourceWave;
                    string   fileName     = FilePath.GetFileName(sourceFileName);
                    char[]   invalidChars = { '\'', '[', ']', '(', ')', ',', '-', '.' };

                    Console.WriteLine("Loading metadata for \"{0}\"...\r\n", fileName);
                    sourceWave = WaveFile.Load(sourceFileName, false);

                    fileName = FilePath.GetFileNameWithoutExtension(fileName).RemoveDuplicateWhiteSpace().RemoveCharacters(invalidChars.Contains).Trim();
                    string acronym = fileName.Replace(' ', '_').ToUpper() + "_" + (int)(sourceWave.SampleRate / SI.Kilo) + "KHZ";
                    string name    = GenerateSongName(sourceWave, fileName);

                    Console.WriteLine("   Acronym = {0}", acronym);
                    Console.WriteLine("      Name = {0}", name);
                    Console.WriteLine("");

                    // Check to see if device exists
                    if (Convert.ToInt32(connection.ExecuteScalar(database.ParameterizedQueryString("SELECT COUNT(*) FROM Device WHERE Acronym = {0}", "acronym"), acronym)) == 0)
                    {
                        parameterizedQuery = database.ParameterizedQueryString("INSERT INTO Device(NodeID, Acronym, Name, ProtocolID, FramesPerSecond, " +
                                                                               "MeasurementReportingInterval, ConnectionString, Enabled) VALUES(" + nodeIDQueryString + ", {0}, {1}, {2}, {3}, {4}, {5}, {6})",
                                                                               "acronym", "name", "protocolID", "framesPerSecond", "measurementReportingInterval",
                                                                               "connectionString", "enabled");

                        // Insert new device record
                        connection.ExecuteNonQuery(parameterizedQuery, acronym, name, protocolID, sourceWave.SampleRate, 1000000, $"wavFileName={FilePath.GetAbsolutePath(sourceFileName)}; connectOnDemand=true; outputSourceIDs={acronym}; memoryCache={useMemoryCache}", database.Bool(true));
                        int    deviceID = Convert.ToInt32(connection.ExecuteScalar(database.ParameterizedQueryString("SELECT ID FROM Device WHERE Acronym = {0}", "acronym"), acronym));
                        string pointTag;
                        int    lastPhasorIndex = 0;
                        int    phasorIndex     = 1;

                        // Add a measurement for each defined wave channel
                        for (int i = 0; i < sourceWave.Channels; i++)
                        {
                            int index        = i + 1;
                            int signalTypeID = index % 2 == 0 ? signalTypePAID : signalTypePMID;

                            if (i > 0 && i % 2 == 0)
                            {
                                phasorIndex++;
                            }

                            if (lastPhasorIndex != phasorIndex)
                            {
                                lastPhasorIndex = phasorIndex;

                                parameterizedQuery = database.ParameterizedQueryString("INSERT INTO Phasor(DeviceID, Label, Type, Phase, SourceIndex) VALUES ({0}, {1}, 'V', '+', {2})", "deviceID", "label", "sourceIndex");

                                // Insert new phasor record
                                connection.ExecuteNonQuery(parameterizedQuery, (object)deviceID, acronym, phasorIndex);
                            }

                            string signalSuffix = index % 2 == 0 ? "-PA" : "-PM";
                            pointTag = acronym + ":WAVA" + index;

                            parameterizedQuery = database.ParameterizedQueryString("INSERT INTO Measurement(DeviceID, PointTag, SignalTypeID, PhasorSourceIndex, SignalReference, Description, " +
                                                                                   "Enabled) VALUES({0}, {1}, {2}, {3}, {4}, {5}, {6})", "deviceID", "pointTag", "signalTypeID", "phasorSourceIndex", "signalReference", "description", "enabled");

                            // Insert new measurement record
                            connection.ExecuteNonQuery(parameterizedQuery, (object)deviceID, pointTag, signalTypeID, phasorIndex, acronym + signalSuffix + phasorIndex, name + " - channel " + phasorIndex, database.Bool(true));
                            //Convert.ToInt32(connection.ExecuteScalar(database.ParameterizedQueryString("SELECT PointID FROM Measurement WHERE PointTag = {0}", "pointTag"), pointTag));
                        }

                        // Disable all non analog measurements that may be associated with this device
                        connection.ExecuteNonQuery(database.ParameterizedQueryString("UPDATE Measurement SET Enabled = {0} WHERE DeviceID = {1} AND NOT SignalTypeID IN ({2}, {3})", "enabled", "deviceID", "signalTypePAID", "signalTypePMID"), database.Bool(false), deviceID, signalTypePAID, signalTypePMID);
                    }
                }
            }

            return(0);
        }
示例#42
0
文件: DTMF.cs 项目: rmc00/gsf
 /// <summary>
 /// Generates a single instance of each of the specified dual-tone multi-frequencies storing them in the specified <see cref="WaveFile"/>.
 /// </summary>
 /// <param name="destination"><see cref="WaveFile"/> used to store generated dual-tone multi-frequencies.</param>
 /// <param name="tones">Dual-tone multi-frequencies to generate.</param>
 /// <param name="volume">Volume of generated dual-tones as a percentage (0 to 1).</param>
 public static void Generate(WaveFile destination, DTMF[] tones, double volume)
 {
     Generate(destination, tones, volume, 1);
 }
示例#43
0
 public Delay(WaveFile waveFile)
 {
     this.waveFile = waveFile;
     sampleRate    = (int)this.waveFile.Format.SamplesPerSec;
 }
示例#44
0
文件: WaveFile.cs 项目: avs009/gsf
        /// <summary>
        /// Combines wave files together, all starting at the same time, into a single file.
        /// This has the effect of playing two sound tracks simultaneously.
        /// </summary>
        /// <param name="waveFiles">Wave files to combine</param>
        /// <param name="volumes">Volume for each wave file (0.0 to 1.0)</param>
        /// <returns>Combined wave files.</returns>
        /// <remarks>
        /// <para>
        /// Cumulatively, volumes cannot exceed 1.0 - these volumes represent a fractional percentage
        /// of volume to be applied to each wave file.
        /// </para>
        /// <para>
        /// Resulting sounds will overlap; no truncation is performed. Final wave file length will equal length of
        /// longest source file.
        /// </para>
        /// <para>
        /// Combining sounds files with non-PCM based audio formats will have unexpected results.
        /// </para>
        /// </remarks>
        public static WaveFile Combine(WaveFile[] waveFiles, double[] volumes)
        {
            if (waveFiles.Length > 1)
            {
                // Validate volumes
                if (volumes.Length != waveFiles.Length)
                    throw new ArgumentOutOfRangeException("volumes", "There must be one volume per each wave file");

                if (volumes.Sum() > 1.0D)
                    throw new ArgumentOutOfRangeException("volumes", "Cumulatively, volumes cannot exceed 1.0");

                // Deep clone first wave file - this will become the base of the new combined wave file
                WaveFile next, source = waveFiles[0].Clone();
                int maxLength, nextLength, sourceLength = source.SampleBlocks.Count;

                // Validate compatibility of wave files to be combined
                for (int x = 1; x < waveFiles.Length; x++)
                {
                    next = waveFiles[x];

                    if (source.AudioFormat != next.AudioFormat ||
                        source.SampleRate != next.SampleRate ||
                        source.BitsPerSample != next.BitsPerSample ||
                        source.Channels != next.Channels)
                        throw new ArgumentException("All wave files to be combined must have the same audio format, sample rate, bits per sample and number of channels");
                }

                // Apply volume adjustment to source file
                for (int x = 0; x < sourceLength; x++)
                {
                    for (int y = 0; y < source.Channels; y++)
                    {
                        // Apply volume adjustment to source
                        source.SampleBlocks[x][y] = CombineBinarySamples(source.BitsPerSample, 0, source.SampleBlocks[x][y], volumes[0]);
                    }
                }

                // Combine subsequent wave files
                for (int x = 1; x < waveFiles.Length; x++)
                {
                    next = waveFiles[x];
                    nextLength = next.SampleBlocks.Count;
                    maxLength = sourceLength > nextLength ? sourceLength : nextLength;

                    for (int y = 0; y < maxLength; y++)
                    {
                        if (y < sourceLength && y < nextLength)
                        {
                            for (int z = 0; z < source.Channels; z++)
                            {
                                // Combine each data channel from the source and current wave files
                                source.SampleBlocks[y][z] = CombineBinarySamples(source.BitsPerSample, source.SampleBlocks[y][z], next.SampleBlocks[y][z], volumes[x]);
                            }
                        }
                        else
                        {
                            // Extend source if necessary - note that extended samples still need to be equalized
                            if (nextLength > sourceLength)
                            {
                                LittleBinaryValue[] samples = new LittleBinaryValue[source.Channels];

                                for (int z = 0; z < source.Channels; z++)
                                {
                                    // Combine extended samples with "0" from source to maintain amplitude equalization
                                    samples[z] = CombineBinarySamples(source.BitsPerSample, 0, next.SampleBlocks[y][z], volumes[x]);
                                }

                                source.SampleBlocks.Add(samples);
                            }
                            else
                                break;
                        }
                    }
                }

                return source;
            }
            else
                throw new ArgumentException("You must provide at least two wave files to combine.", "waveFiles");
        }
        public void ReadMainFileHeader()
        {
            waveFile = new WaveFile();
            waveFile.maindata = new RiffChunk();

            Debug.WriteLine("position reader: " + reader.BaseStream.Position);
            waveFile.maindata.sGroupID = new string(reader.ReadChars(4));
            Debug.WriteLine("position reader: " + reader.BaseStream.Position);
            waveFile.maindata.dwFileLength = reader.ReadUInt32() + 8;
            Debug.WriteLine("position reader: " + reader.BaseStream.Position);
            waveFile.maindata.sRiffType = new string(reader.ReadChars(4));
            Debug.WriteLine("position reader: " + reader.BaseStream.Position);

            Debug.WriteLine(waveFile.maindata.dwFileLength);
            Debug.WriteLine(reader.BaseStream.Length);
        }