public void CopyToFloatArray() { Signal target = Signal.FromArray(data, 8000); float[] dest = new float[12]; target.CopyTo(dest); Assert.AreEqual(data.Reshape(), dest); float[] larger = new float[20]; target.CopyTo(larger); Assert.AreEqual(Vector.Create(20, data.Reshape()), larger); }
/// <summary> /// Encodes the Wave stream into a Signal object. /// </summary> /// public void Encode(Signal signal) { if (!initialized) { initialize(signal); firstWriteHeaders(); } // Update counters numberOfSamples += signal.NumberOfSamples; numberOfFrames += signal.Length; bytes += signal.NumberOfBytes; duration += (int)signal.Duration.TotalMilliseconds; // Navigate to start position long position = waveStream.Position; waveStream.Seek(0, SeekOrigin.Begin); // Update headers updateHeaders(); // Go back to previous position waveStream.Seek(position, SeekOrigin.Begin); if (buffer == null || buffer.Length < signal.NumberOfBytes) { buffer = new byte[signal.NumberOfBytes]; } signal.CopyTo(buffer); // Write the current signal data waveStream.Write(buffer, 0, signal.NumberOfBytes); }
private double[] getWholeSignal() { Signal tempSignal = this.decoder.Decode(this.frames); double[] wholeSignal = new double[this.frames]; tempSignal.CopyTo(wholeSignal); return(wholeSignal); }
public void addNewFrame(Signal signal) { // Save current frame signal.CopyTo(current); // Save to memory this.encoder.Encode(signal); // Update counters this.duration += signal.Duration; this.samples += signal.Samples; this.frames += signal.Length; }
/// <summary> /// Open the given audio file and return an "AudioSignal" with the following info: /// 1. data[]: array of audio samples /// 2. sample rate /// 3. signal length in milli sec /// </summary> /// <param name="filePath">audio file path</param> /// <returns>AudioSignal containing its data, sample rate and length in ms</returns> public static AudioSignal OpenAudioFile(string filePath) { WaveDecoder waveDecoder = new WaveDecoder(filePath); AudioSignal signal = new AudioSignal(); signal.sampleRate = waveDecoder.SampleRate; signal.signalLengthInMilliSec = waveDecoder.Duration; Signal tempSignal = waveDecoder.Decode(waveDecoder.Frames); signal.data = new double[waveDecoder.Frames]; tempSignal.CopyTo(signal.data); return(signal); }
public void waveInput() { writeHeaderForARFF(); for (int i = 0; i < audioFileName.Length; i++) { string type; string name; setTypeAndName(audioFileName[i], out type, out name); Accord.Audio.Formats.WaveDecoder currentWav = new Accord.Audio.Formats.WaveDecoder(audioFileName[i]); //Used for time Domain Signal timeDomain = currentWav.Decode(); //Creates an array of time domain samples for use in calculation of RootMeanSquare aka AverageEnergy float[] energyArray = new float[timeDomain.Samples]; timeDomain.CopyTo(energyArray); //average energy for the current wav file double averageEnergy = Accord.Audio.Tools.RootMeanSquare(energyArray); //ZCR for the current wav file double zeroCrossingRate = zeroCrossingRateMethod(timeDomain); Accord.Audio.Windows.RaisedCosineWindow window = Accord.Audio.Windows.RaisedCosineWindow.Hamming(1024); Signal[] windows = timeDomain.Split(window, 512); //Used for Frequency Domain ComplexSignal[] tempFrequency = windows.Apply(ComplexSignal.FromSignal); tempFrequency.ForwardFourierTransform(); ComplexSignal curComplex = tempFrequency[0]; double[] power = { }; double[] magnitudes = { }; double[] freq = { }; var length = curComplex.Length / (2 + 1); double[] meanPower = new double[length]; double[] meanMagnitudes = new double[length]; createFrequencyArray(tempFrequency, curComplex, out power, out magnitudes, out freq, out meanPower, out meanMagnitudes); //Spectral Centrois for the current wav file double spectralCentroid = meanMagnitudes.Zip(freq, (m, f) => m * f).Sum() / meanMagnitudes.Sum(); //double spectralCentroid = spectralCentroidMethod(tempFrequency); //Writes data to arff file writeARFF(name, averageEnergy, zeroCrossingRate, spectralCentroid, type); } }
public void AudioOutputMethod(Signal input) { if (audioOutputDeviceOwnerHandle == null) { return; } if (audioOutputDevice == null) { InitAudioOutputDevice(input); } encoder.Encode(input); float[] samples = new float[input.Samples]; input.CopyTo(samples); audioOutputDevice.Play(); }
private void output_NewFrameRequested(object sender, NewFrameRequestedEventArgs e) { e.FrameIndex = decoder.Position; Signal s = decoder.Decode(e.Frames); if (s == null) { e.Stop = true; } else { // Inform the number of frames // actually read from source e.Frames = s.Length; // Copy the signal to the buffer s.CopyTo(e.Buffer); } }
private void AudioOutputDeviceNewFrameRequested(object sender, NewFrameRequestedEventArgs e) { if (decoder == null) { return; } e.FrameIndex = decoder.Position; Signal signal = decoder.Decode(e.Frames); if (signal == null) { e.Stop = true; return; } e.Frames = signal.Length; signal.CopyTo(e.Buffer); }
/// <summary> /// This event is triggered when the sound card needs more samples to be /// played. When this happens, we have to feed it additional frames so it /// can continue playing. /// </summary> /// private void output_NewFrameRequested(object sender, NewFrameRequestedEventArgs e) { // This is the next frame index e.FrameIndex = decoder.Position; // Attempt to decode the requested number of frames from the stream Signal signal = decoder.Decode(e.Frames); if (signal == null) { // We could not get the requested number of frames. When // this happens, this is an indication we need to stop. e.Stop = true; return; } // Inform the number of frames // actually read from source e.Frames = signal.Length; // Copy the signal to the buffer signal.CopyTo(e.Buffer); }
public void FillNewFrame(NewFrameRequestedEventArgs frameRequestArgs) { // This is the next frame index frameRequestArgs.FrameIndex = this.decoder.Position; // Attempt to decode the requested number of frames from the stream Signal signal = this.decoder.Decode(frameRequestArgs.Frames); if (signal == null) { // We could not get the requested number of frames. When // this happens, this is an indication we need to stop. frameRequestArgs.Stop = true; return; } // Inform the number of frames // actually read from source frameRequestArgs.Frames = signal.Length; // Copy the signal to the buffer signal.CopyTo(frameRequestArgs.Buffer); }