Example #1
0
        private void InitializeSoundOut(IWaveSource soundSource)
        {
            // SoundOut implementation which plays the sound
            this.soundOut = new WasapiOut(this.eventSync, this.audioClientShareMode, this.latency, ThreadPriority.Highest);

            // MMNotificationClient
            this.MMNotificationClient = new MMNotificationClient();
            this.MMNotificationClient.DefaultDeviceChanged += this.MMNotificationClient_DefaultDeviceChanged;

            // Initialize the soundOut
            this.notificationSource = new SingleBlockNotificationStream(soundSource.ToSampleSource());
            this.soundOut.Initialize(this.notificationSource.ToWaveSource(16));

            // Create the FFT provider
            this.fftProvider = new FftProvider(this.soundOut.WaveSource.WaveFormat.Channels, FftSize.Fft2048);

            this.notificationSource.SingleBlockRead += this.InputStream_Sample;
            this.soundOut.Stopped += this.SoundOutStoppedHandler;

            this.soundOut.Volume = this.volume;
        }
Example #2
0
        public ParticleMoveSystem(World world)
        {
            particleSet = world.GetEntities().With <Translation>().With <Velocity>().AsSet();
            capture     = new WasapiLoopbackCapture();
            capture.Initialize();
            var soundInSource = new SoundInSource(capture);
            var source        = soundInSource.ToSampleSource();

            fft = new FftProvider(source.WaveFormat.Channels, fftSize);

            var notificationSource = new SingleBlockNotificationStream(source);

            notificationSource.SingleBlockRead += SingleBlockRead;

            waveSource = notificationSource.ToWaveSource(16);
            buffer     = new byte[waveSource.WaveFormat.BytesPerSecond / 2];

            soundInSource.DataAvailable += DataAvailable;

            // capture.DataAvailable += (sender, args) => DataAvailable(sender, args);
            capture.Start();
        }
Example #3
0
            public WrapperSpectrumPlayer(CSCorePlayer player, SpectrumChannel channel,
                                         ICollection <EventHandler <SingleBlockReadEventArgs> > inputStreamList)
            {
                this.player = player;
                this.player.PropertyChanged += (_, __) => PropertyChanged(_, __);
                this.soundOut = player.soundOut;

                fftProvider = new FftProvider(2, FftSize.Fft1024);

                if (channel != SpectrumChannel.Stereo)
                {
                    if (channel == SpectrumChannel.Left)
                    {
                        if (this.player.notificationSource != null)
                        {
                            this.player.notificationSource.SingleBlockRead += InputStream_LeftSample;
                        }
                        inputStreamList.Add(InputStream_LeftSample);
                    }
                    if (channel == SpectrumChannel.Right)
                    {
                        if (this.player.notificationSource != null)
                        {
                            this.player.notificationSource.SingleBlockRead += InputStream_RightSample;
                        }
                        inputStreamList.Add(InputStream_RightSample);
                    }
                }
                else
                {
                    if (this.player.notificationSource != null)
                    {
                        this.player.notificationSource.SingleBlockRead += InputStream_Sample;
                    }
                    inputStreamList.Add(InputStream_Sample);
                }
            }
Example #4
0
        private async Task Play()
        {
            if (IsPlaying)
            {
                Pause();
                return;
            }

            if (_audioGraph == null)
            {
                var settings = new AudioGraphSettings(AudioRenderCategory.Media)
                {
                    PrimaryRenderDevice = SelectedDevice
                };

                var createResult = await AudioGraph.CreateAsync(settings);

                if (createResult.Status != AudioGraphCreationStatus.Success)
                {
                    return;
                }

                _audioGraph = createResult.Graph;
                _audioGraph.UnrecoverableErrorOccurred += OnAudioGraphError;
            }

            if (_deviceOutputNode == null)
            {
                var deviceResult = await _audioGraph.CreateDeviceOutputNodeAsync();

                if (deviceResult.Status != AudioDeviceNodeCreationStatus.Success)
                {
                    return;
                }
                _deviceOutputNode = deviceResult.DeviceOutputNode;
            }

            if (_frameOutputNode == null)
            {
                _frameOutputNode              = _audioGraph.CreateFrameOutputNode();
                _audioGraph.QuantumProcessed += GraphOnQuantumProcessed;
            }

            if (_fileInputNode == null)
            {
                if (CurrentPlayingFile == null)
                {
                    return;
                }

                var fileResult = await _audioGraph.CreateFileInputNodeAsync(CurrentPlayingFile);

                if (fileResult.Status != AudioFileNodeCreationStatus.Success)
                {
                    return;
                }
                _fileInputNode = fileResult.FileInputNode;
                _fileInputNode.AddOutgoingConnection(_deviceOutputNode);
                _fileInputNode.AddOutgoingConnection(_frameOutputNode);
                Duration = _fileInputNode.Duration;
                _fileInputNode.PlaybackSpeedFactor = PlaybackSpeed / 100.0;
                _fileInputNode.OutgoingGain        = Volume / 100.0;
                _fileInputNode.FileCompleted      += FileInputNodeOnFileCompleted;
            }

            Debug.WriteLine($" CompletedQuantumCount: {_audioGraph.CompletedQuantumCount}");
            Debug.WriteLine($"SamplesPerQuantum: {_audioGraph.SamplesPerQuantum}");
            Debug.WriteLine($"LatencyInSamples: {_audioGraph.LatencyInSamples}");
            var channelCount = (int)_audioGraph.EncodingProperties.ChannelCount;

            _fftProvider = new FftProvider(channelCount, FftSize.Fft2048);
            _audioGraph.Start();
            IsPlaying = true;
        }
Example #5
0
        public string GetLevelsFromAudioFX(string audioType, string audioFile)
        {
            string audioFilename  = Path.Combine(Executor.Current.ExpanderSharedFiles, audioType, audioFile);
            string levelsFilename = Path.Combine(Executor.Current.ExpanderSharedFiles, audioType, audioFile + ".levels");

            if (!File.Exists(levelsFilename))
            {
                using (ISampleSource source = CodecFactory.Instance.GetCodec(audioFilename).ToSampleSource())
                {
                    var fftProvider = new FftProvider(source.WaveFormat.Channels, FftSize.Fft1024);

                    int millisecondsPerFrame = 1000 / 40;

                    long maxBufferLengthInSamples = source.GetRawElements(millisecondsPerFrame);

                    long bufferLength = Math.Min(source.Length, maxBufferLengthInSamples);

                    float[] buffer = new float[bufferLength];

                    int read             = 0;
                    int totalSamplesRead = 0;

                    var fftData = new float[1024];

                    var   list    = new List <float>();
                    float highest = 0;
                    do
                    {
                        //determine how many samples to read
                        int samplesToRead = (int)Math.Min(source.Length - totalSamplesRead, buffer.Length);

                        read = source.Read(buffer, 0, samplesToRead);
                        if (read == 0)
                        {
                            break;
                        }

                        totalSamplesRead += read;

                        //add read data to the fftProvider
                        fftProvider.Add(buffer, read);

                        fftProvider.GetFftData(fftData);

                        float highestAmplitude = 0;
                        for (int i = 0; i < fftData.Length / 2; i++)
                        {
                            if (fftData[i] > highestAmplitude)
                            {
                                highestAmplitude = fftData[i];
                            }
                        }

                        list.Add(highestAmplitude);
                        if (highestAmplitude > highest)
                        {
                            highest = highestAmplitude;
                        }
                    } while (totalSamplesRead < source.Length);

                    if (highest > 0)
                    {
                        // Adjust to equalize
                        float adjustment = 1 / highest;

                        for (int i = 0; i < list.Count; i++)
                        {
                            list[i] *= adjustment;
                        }
                    }

                    using (var fs = File.Create(levelsFilename))
                    {
                        fs.Write(list.Select(x => (byte)(x * 255)).ToArray(), 0, list.Count);
                    }
                }
            }

            return(levelsFilename);
        }
Example #6
0
        /// <summary>
        /// Entry point
        /// </summary>
        /// <param name="args"></param>
        static void Main(string[] args)
        {
            // Validate cmd line args
            if (args.Length != 1)
            {
                Console.WriteLine("Provide a valid music file location (mp3, wav, or m4a)");
                return;
            }

            string filename = args[0];

            if (!File.Exists(filename))
            {
                Console.Error.WriteLine("Could not find file: '{0}'", filename);
                return;
            }

            // Read in audio file and initialize fft
            IWaveSource   waveSource;
            ISampleSource sampleSource;

            try
            {
                waveSource = CodecFactory.Instance.GetCodec(filename);
            }
            catch (NotSupportedException ex)
            {
                Console.Error.WriteLine("No supporting decoder for given file: '{0}'\n", filename);
                Console.Error.WriteLine(ex.ToString());
                return;
            }

            sampleSource = waveSource.ToSampleSource();

            FftProvider fftProvider = new FftProvider(sampleSource.WaveFormat.Channels, FftSize.Fft1024);
            List <Tuple <int, Complex[]> > fftResults = new List <Tuple <int, Complex[]> >();
            int i = 0;

            // Scrub through the audio 1024 samples at a time and perform fft on each chunk
            while (sampleSource.Position < sampleSource.Length)
            {
                float[] samples = new float[1024];
                sampleSource.Read(samples, 0, 1024);
                fftProvider.Add(samples, samples.Count());

                Complex[] result = new Complex[(int)fftProvider.FftSize];
                if (fftProvider.GetFftData(result))
                {
                    fftResults.Add(new Tuple <int, Complex[]>(i, result));
                    ++i;
                }
            }

            Console.WriteLine("FFT done");

            // Stores the fundamental frequency and amplitude at each frame (1024 samples)
            List <Tuple <double, double> > fundFreqs = new List <Tuple <double, double> >();

            i = 0;

            // For each fft output
            foreach (var pair in fftResults)
            {
                // The output of the fft has a frequency domain and amplitude range.
                // In this case, the index of the value represents frequency: index * ((sampleRate / 2) / (vals.Length / 2))
                // The value at an index is the amplitude as a complex number. To normalize, calculate: sqrt(real^2 + imaginary^2), this can then be
                // used to calculate dB level with dBspl equation (20 * log10(normal))
                Complex[] vals = pair.Item2;

                // Frequency buckets produced by fft. Size of each bucket depends on sample rate.
                // 0 to N/2 of fft output is what we want, N/2 to N is garbage (negative frequencies)
                int nyquistLength = vals.Length / 2;

                // Nyquist rate is maximum possible reproducible sample frequency of a given sample rate
                int nyquistRate = sampleSource.WaveFormat.SampleRate / 2;


                // Normalize the amplitudes
                double[] normals = new double[nyquistLength];

                for (int j = 0; j < nyquistLength; ++j)
                {
                    normals[j] = Math.Sqrt(Math.Pow(vals[j].Real, 2) + Math.Pow(vals[j].Imaginary, 2));
                }

                // Find the fundamental frequency and amplitude of that frequency
                double fundFreq  = 0;
                double amplitude = double.NegativeInfinity; // in dB spl

                int freqBucket = MaxIndex(normals);
                if (freqBucket > 0)
                {
                    fundFreq = freqBucket * (nyquistRate / nyquistLength);
                }
                if (fundFreq != 0)
                {
                    amplitude = 20 * Math.Log10(normals[freqBucket]);   // Convert to dB
                }

                fundFreqs.Add(new Tuple <double, double>(fundFreq, amplitude));
                ++i;
            }

            Console.WriteLine("Fundamental frequency analysis of each frame done");

            Console.WriteLine("Writing results to csv (timestamp,frequency,amplitude)...");

            FileStream   outFileStream = null;
            StreamWriter writer        = null;

            try
            {
                outFileStream = File.Create("out.csv");
                writer        = new StreamWriter(outFileStream);

                for (int j = 0; j < fundFreqs.Count; ++j)
                {
                    writer.WriteLine(string.Format("{0},{1},{2}", FrameIndexToTimestamp(j, sampleSource.WaveFormat.SampleRate, 1024), fundFreqs[j].Item1, fundFreqs[j].Item2));
                }

                writer.Close();
                outFileStream.Close();
            }
            catch (Exception ex)
            {
                Console.Error.WriteLine("failed to write output:");
                Console.Error.WriteLine(ex.ToString());

                if (outFileStream != null)
                {
                    outFileStream.Close();
                }
                if (writer != null)
                {
                    writer.Close();
                }
            }

            Console.WriteLine("Done");
            Console.ReadKey(true);
        }
        public VisualizerCSCoreFft(int channels, int fftSize)
        {
            FftSize size = (FftSize)fftSize;

            m_Fft = new FftProvider(channels, size);
        }