Esempio n. 1
1
        public Recorder()
        {
            int waveInDevices = WaveIn.DeviceCount;

            //for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            //{
            //    WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
            //    comboBox1.Items.Add(string.Format("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels));
            //}

            waveIn = new WaveIn();
            waveIn.DeviceNumber = 0;
            waveIn.DataAvailable += waveIn_DataAvailable;
            waveIn.RecordingStopped += waveIn_RecordingStopped;

            int sampleRate = 16000; // 16 kHz
            int channels = 1; // mono
            int bits = 16;

            recordingFormat = new WaveFormat(sampleRate, bits, channels);
            waveIn.WaveFormat = recordingFormat;

            string path = "C:\\temp";
            if( !Directory.Exists(path) )
            {
                Directory.CreateDirectory(path);
            }

            TempWavFileName = String.Format("{0}\\{1}.wav", path, Guid.NewGuid().ToString());

            writer = new WaveFileWriter(TempWavFileName, recordingFormat);
        }
        public AudioRecorder()
        {
            isRecording = false;
            recordingFormat = new WaveFormat(44100, 1);

            spectData = new Complex[1000000][];
        }
        /// <summary>
        /// Add a new input to the mixer
        /// </summary>
        /// <param name="waveStream">The wave input to add</param>
        public void AddInputStream(WaveStream waveStream)
        {
            if (waveStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
                throw new ArgumentException("Must be IEEE floating point", "waveStream");
            if (waveStream.WaveFormat.BitsPerSample != 32)
                throw new ArgumentException("Only 32 bit audio currently supported", "waveStream");

            if (inputStreams.Count == 0)
            {
                // first one - set the format
                int sampleRate = waveStream.WaveFormat.SampleRate;
                int channels = waveStream.WaveFormat.Channels;
                this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
            }
            else
            {
                if (!waveStream.WaveFormat.Equals(waveFormat))
                    throw new ArgumentException("All incoming channels must have the same format", "waveStream");
            }

            lock (inputsLock)
            {
                this.inputStreams.Add(waveStream);
                this.length = Math.Max(this.length, waveStream.Length);
                // get to the right point in this input file
                waveStream.Position = Position;
            }
        }
        public void StartRecordingSetDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                MessageBox.Show(Properties.Strings.MessageBox_NoRecordingDevices);
                Console.WriteLine("No devices found.");
                return;
            }

            soundIn = new CSCore.SoundIn.WasapiLoopbackCapture
            {
                Device = recordingDevice
            };

            soundIn.Initialize();
            soundInSource = new SoundInSource(soundIn)
            {
                FillWithZeros = false
            };
            convertedSource              = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
            convertedSource              = convertedSource.ToStereo();
            soundInSource.DataAvailable += OnDataAvailable;
            soundIn.Start();

            var format = convertedSource.WaveFormat;

            waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
        }
Esempio n. 5
0
		public Mp3Composite(string filename)
		{
			this.filename = filename;
			writer = File.Create(filename);
            format = null;
			timeTotal = TimeSpan.Zero;
		}
 public DrumPatternSampleProvider(DrumPattern pattern)
 {
     var kit = new DrumKit();
     this.sequencer = new PatternSequencer(pattern, kit);
     this.waveFormat = kit.WaveFormat;
     mixer = new MixingSampleProvider(waveFormat);
 }
Esempio n. 7
0
        public void Initialise(WaveFormat format, WaveOut driver)
        {
            if (driver == null)
            {
                throw new ArgumentNullException("driver", "Must specify a WaveIn device instance");
            }

            if (format == null)
            {
                throw new ArgumentNullException("format", "Must specify an audio format");
            }

            var caps = WaveOut.GetCapabilities(driver.DeviceNumber);

            device = new WaveOutDeviceData
            {
                Driver = driver,
                Name = caps.ProductName,
                Channels = caps.Channels,
                Buffers = new float[caps.Channels][]
            };

            Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, caps.Channels);
            OutputBuffer = new BufferedWaveProvider(Format);
            OutputBuffer.DiscardOnBufferOverflow = true;

            driver.Init(OutputBuffer);

            mapOutputs();
        }
Esempio n. 8
0
        public ComplexFilter(
            WaveFormat format, 
            IWindowFunction windowFunction, 
            IFilterImplementation filterImplementation)
        {
            if (format == null)
            {
                throw new ArgumentNullException("format", "Format cannot be null");
            }

            if (windowFunction == null)
            {
                throw new ArgumentNullException("windowFunction", "Window function cannot be null");
            }

            if (filterImplementation == null)
            {
                throw new ArgumentNullException("filterImplementation", "Filter implementation cannot be null");
            }

            this.format = format;
            this.filterOrder = 20;
            this.windowFunction = windowFunction;
            this.FilterImplementation = filterImplementation;

            this.filters = new ObservableCollection<IDigitalFilter>();
            this.filters.CollectionChanged += filters_CollectionChanged;

            updateCoefficients();
        }
Esempio n. 9
0
        public AsioCard(
            WaveFormat format,
            AsioOut driver, 
            AsioInputMapper inputMapper, 
            AsioOutputMapper outputMapper)
        {
            if (format == null)
            {
                throw new ArgumentNullException("format", "Must specify an audio format");
            }

            if (driver == null)
            {
                throw new ArgumentNullException("driver", "Asio driver cannot be null");
            }

            if (inputMapper == null)
            {
                throw new ArgumentNullException("inputMapper", "Asio input mapper cannot be null");
            }

            if (outputMapper == null)
            {
                throw new ArgumentNullException("outputMapper", "Asio output mapper cannot be null");
            }

            this.format = format;
            this.driver = driver;
            this.inputMapper = inputMapper;
            this.outputMapper = outputMapper;
        }
        public static ISampleProvider ResampleIfNeeded(this ISampleProvider node, WaveFormat format)
        {
            if (!node.WaveFormat.Equals(format))
            {
                ISampleProvider provider = node;

                if (node.WaveFormat.Channels != format.Channels)
                {
                    if (node.WaveFormat.Channels == 1 && format.Channels == 2)
                    {
                        provider = provider.ToStereo();
                    }
                    else if (node.WaveFormat.Channels == 2 && format.Channels == 1)
                    {
                        provider = provider.ToMono();
                    }
                    else
                    {
                        throw new ArgumentException("Cannot change channel count from " + node.WaveFormat.Channels + " to " + format.Channels);
                    }
                }

                return new WdlResamplingSampleProvider(provider, format.SampleRate);
            }
            else
            {
                return node;
            }
        }
Esempio n. 11
0
        /// <summary>
        /// Creates a new Wave input stream
        /// </summary>
        /// <param name="deviceNumber">The device to open - 0 is default</param>
        /// <param name="desiredFormat">The PCM format to record in</param>
        /// <param name="callbackWindow">If this parameter is non-null, the Wave In Messages
        /// will be sent to the message loop of the supplied control. This is considered a
        /// safer way to use the waveIn functionality</param>
        public WaveInStream(int deviceNumber, WaveFormat desiredFormat, System.Windows.Forms.Control callbackWindow)
        {
            this.waveFormat = desiredFormat;
            callback = new WaveInterop.WaveCallback(Callback);
            if (callbackWindow == null)
            {
                MmException.Try(WaveInterop.waveInOpen(out waveInHandle, deviceNumber, desiredFormat, callback, 0, WaveInterop.CallbackFunction), "waveInOpen");
            }
            else
            {
                waveInWindow = new WaveWindowNative(callback);
                MmException.Try(WaveInterop.waveInOpenWindow(out waveInHandle, deviceNumber, desiredFormat, callbackWindow.Handle, 0, WaveInterop.CallbackWindow), "waveInOpen");
                waveInWindow.AssignHandle(callbackWindow.Handle);
            }

            // Default to three buffers of 100ms each
            int bufferSize = desiredFormat.AverageBytesPerSecond / 10;
            numBuffers = 3;

            buffers = new WaveInBuffer[numBuffers];
            for (int n = 0; n < numBuffers; n++)
            {
                buffers[n] = new WaveInBuffer(waveInHandle, bufferSize);
            }
        }
Esempio n. 12
0
 public CallbackWaveProvider16(WaveFormat format, RenderAudioBufferDelegate renderCallback, object syncLock)
 {
     SyncLock = syncLock;
     m_Format = format;
     SilenceBuffer = new byte[m_Format.BitsPerSample / 8 * m_Format.Channels * 2];
     RenderCallback = renderCallback;
 }
 /// <summary>
 /// Adds a new mixer input
 /// </summary>
 /// <param name="mixerInput">Mixer input</param>
 public void AddSequencingInput(RenderItemSampleProvider mixerInput)
 {
     // we'll just call the lock around add since we are protecting against an AddMixerInput at
     // the same time as a Read, rather than two AddMixerInput calls at the same time
     lock (sources)
     {
         if (this.sources.Count >= maxInputs)
         {
             throw new InvalidOperationException("Too many mixer inputs");
         }
         this.sources.Add(mixerInput);
         lastSample = Math.Max(lastSample, mixerInput.LastSample);
     }
     if (this.waveFormat == null)
     {
         this.waveFormat = mixerInput.WaveFormat;
     }
     else
     {
         if (this.WaveFormat.SampleRate != mixerInput.WaveFormat.SampleRate ||
             this.WaveFormat.Channels != mixerInput.WaveFormat.Channels)
         {
             throw new ArgumentException("All mixer inputs must have the same WaveFormat");
         }
     }
 }
Esempio n. 14
0
        //TODO wrap WaveIn to allow DI
        public void Initialise(WaveFormat format, WaveIn driver)
        {
            if (driver == null)
            {
                throw new ArgumentNullException("driver", "Must specify a WaveIn device instance");
            }

            if (format == null)
            {
                throw new ArgumentNullException("format", "Must specify an audio format");
            }

            this.driver = driver;

            driver.DataAvailable += device_DataAvailable;

            var caps = WaveIn.GetCapabilities(driver.DeviceNumber);

            driver.WaveFormat = format;
            device = new WaveInDeviceData
            {
                Driver = driver,
                Name = caps.ProductName,
                Channels = caps.Channels,
                Buffers = new float[caps.Channels][]
            };

            Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, device.Channels);
            formatPerLine = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, 1);

            mapInputs(device.Channels);
        }
Esempio n. 15
0
        public static void Connect(IPEndPoint endpoint, MMDevice device, ICodec codec)
        {
            var config = new NetPeerConfiguration("airgap");

            _client = new NetClient(config);
            _client.RegisterReceivedCallback(MessageReceived);

            _client.Start();

            _waveIn = new WasapiLoopbackCapture(device);
            _codec = codec;

            _sourceFormat = _waveIn.WaveFormat;
            _targetFormat = new WaveFormat(_codec.SampleRate, _codec.Channels); // format to convert to

            _waveIn.DataAvailable += SendData;
            _waveIn.RecordingStopped += (sender, args) => Console.WriteLine("Stopped");
            // TODO: RecordingStopped is called when you change the audio device settings, should recover from that

            NetOutgoingMessage formatMsg = _client.CreateMessage();
            formatMsg.Write(_targetFormat.Channels);
            formatMsg.Write(_targetFormat.SampleRate);
            formatMsg.Write(codec.Name);

            _client.Connect(endpoint, formatMsg);
        }
 /// <summary>
 /// Creates a new 32 bit WaveMixerStream
 /// </summary>
 public WaveMixerStream32Custom()
 {
     this.autoStop = true;
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(44100, 2);
     this.bytesPerSample = 4;
     this.inputStreams = new List<WaveStream>();
 }
 /// <summary>
 /// Creates a new 32 bit WaveMixerStream
 /// </summary>
 public RecordableMixerStream32(int sampleRate, int channels)
 {
     this.autoStop = true;
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
     this.bytesPerSample = 4;
     this.inputStreams = new List<WaveStream>();
 }
Esempio n. 18
0
        public void Start()
        {
            if (WaveIn.DeviceCount < 1)
                throw new Exception("Insufficient input device(s)!");

            if (WaveOut.DeviceCount < 1)
                throw new Exception("Insufficient output device(s)!");

            frame_size = toxav.CodecSettings.audio_sample_rate * toxav.CodecSettings.audio_frame_duration / 1000;

            toxav.PrepareTransmission(CallIndex, false);

            WaveFormat format = new WaveFormat((int)toxav.CodecSettings.audio_sample_rate, (int)toxav.CodecSettings.audio_channels);
            wave_provider = new BufferedWaveProvider(format);
            wave_provider.DiscardOnBufferOverflow = true;

            wave_out = new WaveOut();
            //wave_out.DeviceNumber = config["device_output"];
            wave_out.Init(wave_provider);

            wave_source = new WaveIn();
            //wave_source.DeviceNumber = config["device_input"];
            wave_source.WaveFormat = format;
            wave_source.DataAvailable += wave_source_DataAvailable;
            wave_source.RecordingStopped += wave_source_RecordingStopped;
            wave_source.BufferMilliseconds = (int)toxav.CodecSettings.audio_frame_duration;
            wave_source.StartRecording();

            wave_out.Play();
        }
Esempio n. 19
0
 public SampleSource(float[] sampleData, WaveFormat waveFormat, int startIndex, int length)
 {
     this.SampleData = sampleData;
     this.SampleWaveFormat = waveFormat;
     this.StartIndex = startIndex;
     this.Length = length;
 }
Esempio n. 20
0
        protected MediaBankBase(WaveFormat targetFormat)
        {
            TargetWaveFormat = targetFormat;
            Random = new Random();

            LoadMedia();
        }
Esempio n. 21
0
        private void convert()
        {
            using (var reader = new WaveFileReader(path + rawFile))
            {
                var newFormat = new NAudio.Wave.WaveFormat(16000, 16, 1);
                using (var conversionStream = new WaveFormatConversionStream(newFormat, reader))
                {
                    WaveFileWriter.CreateWaveFile(path + wavFile, conversionStream);
                }
            }

            if (!File.Exists(path + wavFile))
            {
                Console.WriteLine("wav file no!");
            }
            else
            {
                using (FileStream sourceStream = new FileStream(path + wavFile, FileMode.Open))
                {
                    WAVReader audioSource = new WAVReader(path + wavFile, sourceStream);

                    AudioBuffer buff        = new AudioBuffer(audioSource, 0x10000);
                    FlakeWriter flakeWriter = new FlakeWriter(path + flacFile, audioSource.PCM);

                    flakeWriter.CompressionLevel = 8;
                    while (audioSource.Read(buff, -1) != 0)
                    {
                        flakeWriter.Write(buff);
                    }

                    flakeWriter.Close();
                    audioSource.Close();
                }
            }
        }
 /// <summary>
 /// Constructs a new MediaFoundationTransform wrapper
 /// Will read one second at a time
 /// </summary>
 /// <param name="sourceProvider">The source provider for input data to the transform</param>
 /// <param name="outputFormat">The desired output format</param>
 public MediaFoundationTransform(IWaveProvider sourceProvider, WaveFormat outputFormat)
 {
     this.outputWaveFormat = outputFormat;
     this.sourceProvider = sourceProvider;
     sourceBuffer = new byte[ComputeSourceBufferSize(sourceProvider)];
     outputBuffer = new byte[ComputeOutputBufferSize(outputFormat)]; // we will grow this buffer if needed, but try to make something big enough
 }
Esempio n. 23
0
        /// <summary>Constructor - Supports opening a FLAC file</summary>
        public FLACFileReader(string flacFileName)
        {
            // Open the flac file for reading through a binary reader
            m_stream = File.OpenRead(flacFileName);
            m_reader = new BinaryReader(m_stream);
            // Create the FLAC decoder
            m_decoderContext = LibFLACSharp.FLAC__stream_decoder_new();

            if (m_decoderContext == IntPtr.Zero)
                throw new ApplicationException("FLAC: Could not initialize stream decoder!");

            // Create call back delegates
            m_writeCallback = new LibFLACSharp.Decoder_WriteCallback(FLAC_WriteCallback);
            m_metadataCallback = new LibFLACSharp.Decoder_MetadataCallback(FLAC_MetadataCallback);
            m_errorCallback = new LibFLACSharp.Decoder_ErrorCallback(FLAC_ErrorCallback);

            // Initialize the FLAC decoder
            if (LibFLACSharp.FLAC__stream_decoder_init_file(m_decoderContext,
                                               flacFileName, m_writeCallback, m_metadataCallback, m_errorCallback,
                                               IntPtr.Zero) != 0)
                throw new ApplicationException("FLAC: Could not open stream for reading!");

            // Process the meta-data (but not the audio frames) so we can prepare the NAudio wave format
            FLACCheck(
                LibFLACSharp.FLAC__stream_decoder_process_until_end_of_metadata(m_decoderContext),
                "Could not process until end of metadata");

            // Initialize NAudio wave format
            m_waveFormat = new WaveFormat(m_flacStreamInfo.SampleRate, m_flacStreamInfo.BitsPerSample, m_flacStreamInfo.Channels);
        }
Esempio n. 24
0
        /// <summary>
        /// Opens MP3 from a stream rather than a file
        /// Will not dispose of this stream itself
        /// </summary>
        /// <param name="inputStream"></param>
        public Mp3FileReader(Stream inputStream)
        {
            int sampleRate;
            int bitRate;

            mp3Stream = inputStream;
            id3v2Tag = Id3v2Tag.ReadTag(mp3Stream);

            dataStartPosition = mp3Stream.Position;
            Mp3Frame mp3Frame = new Mp3Frame(mp3Stream);
            sampleRate = mp3Frame.SampleRate;
            frameLengthInBytes = mp3Frame.FrameLength;
            bitRate = mp3Frame.BitRate;
            xingHeader = XingHeader.LoadXingHeader(mp3Frame);

            this.length = mp3Stream.Length - dataStartPosition;

            // try for an ID3v1 tag as well
            mp3Stream.Position = mp3Stream.Length - 128;
            byte[] tag = new byte[128];
            mp3Stream.Read(tag, 0, 3);
            if (tag[0] == 'T' && tag[1] == 'A' && tag[2] == 'G')
            {
                id3v1Tag = tag;
                this.length -= 128;
            }

            mp3Stream.Position = dataStartPosition;

            // TODO: choose more appropriately
            waveFormat = new Mp3WaveFormat(sampleRate, 2, frameLengthInBytes, bitRate);
        }
        public WasapiLoopbackCaptureProvider(MMDevice device)
            : base(new WasapiLoopbackCapture(device))
        {
            _wasapiOut = new WasapiOut(device, AudioClientShareMode.Shared, true, 200);

            _wasapiOut.Init(new SilenceProvider(Wf.CreateIeeeFloatWaveFormat(44100, 2)));
        }
Esempio n. 26
0
        private WaveFileReader(Stream inputStream, bool ownInput)
        {
            this.waveStream = inputStream;
            var chunkReader = new WaveFileChunkReader();
            try
            {
                chunkReader.ReadWaveHeader(inputStream);
                this.waveFormat = chunkReader.WaveFormat;
                this.dataPosition = chunkReader.DataChunkPosition;
                this.dataChunkLength = chunkReader.DataChunkLength;
                this.chunks = chunkReader.RiffChunks;
            }
            catch
            {
                if (ownInput)
                {
                    inputStream.Dispose();
                }

                throw;
            }

            Position = 0;
            this.ownInput = ownInput;
        }
Esempio n. 27
0
 private void StartCapture(WaveFormat captureFormat)
 {
     EnsureDeviceIsCreated();
     captureDevice.WaveFormat = captureFormat;
     captureDevice.StartRecording();
     IsCapturing = true;
 }
Esempio n. 28
0
 /// <summary>
 /// Creates a Wave File Reader based on an input stream
 /// </summary>
 /// <param name="inputStream">The input stream containing a WAV file including header</param>
 public GrainWaveProvider(Stream inputStream)
 {
     this.waveStream = inputStream;
     var chunkReader = new WaveFileChunkReader();
     chunkReader.ReadWaveHeader(inputStream);
     this.waveFormat = chunkReader.WaveFormat;
     this.dataPosition = chunkReader.DataChunkPosition;
     this.dataChunkLength = chunkReader.DataChunkLength;
     this.chunks = chunkReader.RiffChunks;            
 	
     waveStream.Position = dataPosition;
     
     var samples = (dataChunkLength / BlockAlign) * waveFormat.Channels;
 	FSample = new float[samples];
 	
 	for(int i=0; i<samples; i++)
 	{
 		TryReadFloat(out FSample[i]);
 	}
 	
 	waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(waveFormat.SampleRate, waveFormat.Channels);
 	
 	//grain
 	FGrain = new Grain();
 	
 	FGrain.SampleRate = waveFormat.SampleRate;
 	FGrain.Start = 20000;
 	
 	FGrain.Length = 1024;
 	FGrain.Freq = 440;
 	FGrain.Index = 0;
 	
 }
 public AutoDisposeSampleProvider(ISampleProvider provider,
      IEnumerable<IDisposable> disposables)
 {
     this._provider = provider;
     this._disposables = new CompositeDisposable(disposables);
     this.WaveFormat = provider.WaveFormat;
 }
Esempio n. 30
0
 public void StructureSizeIsCorrect()
 {
     WaveFormat waveFormat = new WaveFormat(8000, 16, 1);
     Assert.AreEqual(18, Marshal.SizeOf(waveFormat), "WaveFormat Size");
     AdpcmWaveFormat adpcmWaveFormat = new AdpcmWaveFormat(8000,1);
     Assert.AreEqual(18 + 32, Marshal.SizeOf(adpcmWaveFormat), "WaveFormat Size");            
 }
Esempio n. 31
0
        public override void ProcessItem(IJob job, IJobItem item)
        {
            var bitRate = job.Preset.BitRate;
            var sampleRate = job.Preset.SampleRate;
            var tempdir = Path.GetTempPath();
            var tempfile = Path.Combine(tempdir, DateTime.Now.Ticks + "." + job.Preset.Extension);

            var subType = this.GetAudioSubtypeForExtension(job.Preset.Extension);
            var waveFormat = new WaveFormat(sampleRate, 2);

            var mediaType = MediaFoundationEncoder.SelectMediaType(subType, waveFormat, bitRate);

            if (mediaType != null)
            {
                using (var decoder = new MediaFoundationReader(item.LastFile))
                {
                    using (var encoder = new MediaFoundationEncoder(mediaType))
                    {
                        encoder.Encode(tempfile, decoder);
                    }
                }
            }

            item.TemporaryFiles.Add(tempfile);
        }
        /// <summary>
        /// WaveStream to resample using the DMO Resampler
        /// </summary>
        /// <param name="inputProvider">Input Stream</param>
        /// <param name="outputFormat">Desired Output Format</param>
        public ResamplerDmoStream(IWaveProvider inputProvider, WaveFormat outputFormat)
        {
            this.inputProvider = inputProvider;
            this.inputStream = inputProvider as WaveStream;
            this.outputFormat = outputFormat;
            this.resampler = new Resampler();
            if (!resampler.MediaObject.SupportsInputWaveFormat(0, inputProvider.WaveFormat))
            {
                throw new ArgumentException("Unsupported Input Stream format", "inputStream");
            }

            resampler.MediaObject.SetInputWaveFormat(0, inputProvider.WaveFormat);
            if (!resampler.MediaObject.SupportsOutputWaveFormat(0, outputFormat))
            {
                throw new ArgumentException("Unsupported Output Stream format", "outputStream");
            }

            resampler.MediaObject.SetOutputWaveFormat(0, outputFormat);
            if (inputStream != null)
            {
                position = InputToOutputPosition(inputStream.Position);
            }
            this.inputMediaBuffer = new MediaBuffer(inputProvider.WaveFormat.AverageBytesPerSecond);
            this.outputBuffer = new DmoOutputDataBuffer(outputFormat.AverageBytesPerSecond);
        }
Esempio n. 33
0
        public StereoSignalNode(WaveFormat format, ISignalProcess process)
        {
            if (process == null)
            {
                throw new ArgumentNullException("process", "Process cannot be null");
            }

            if (format == null)
            {
                throw new ArgumentNullException("format", "Must specify audio format");
            }

            this.format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, 2);

            SignalProcess = process;
            SignalProcess.Format = this.format;

            Name = string.Format("{0} (Stereo)", SignalProcess.Name);

            LeftIn = new SignalSink(this);
            RightIn = new SignalSink(this);

            LeftIn.ReceivedData += LeftIn_ReceivedData;
            RightIn.ReceivedData += RightIn_ReceivedData;

            LeftOut = new SignalSource(this);
            RightOut = new SignalSource(this);
        }
Esempio n. 34
0
        private void InitializeRecorder()
        {
            NAudio.Wave.WaveFormat wf = stream.GetWaveFormat();
            string datetime           = DateTime.Now.ToString().Replace('/', '-').Replace(":", "");
            string fileName           = stream.GetTitle() + " " + datetime + ".wav";

            wfw = new WaveFileWriter(fileName, wf);
        }
Esempio n. 35
0
 // Method to create wav file from float array
 public static void createWav(float[] array, String name, NAudio.Wave.WaveFormat audio)
 {
     NAudio.Wave.WaveFormat waveFormat = audio;
     // Console.WriteLine(waveFormat.SampleRate + " " + waveFormat.BitsPerSample + " " + waveFormat.Channels);
     using (NAudio.Wave.WaveFileWriter writer = new NAudio.Wave.WaveFileWriter(name + ".wav", waveFormat))
     {
         writer.WriteSamples(array, 0, array.Length);
     }
 }
Esempio n. 36
0
 public LSWavetail()
 {
     srcdir     = "";
     id         = "wavetailer";
     samplerate = 44100;
     big_endian = false;
     bitness    = 16;
     chans      = 2;
     delete     = 10;
     wf         = null;
 }
        /// <summary>
        /// Start recording on the device in the parameter.
        /// </summary>
        /// <param name="recordingDevice">the device to start recording</param>
        /// <returns>true if the recording is started, or false</returns>
        public bool StartRecordingSetDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                logger.Log(Properties.Strings.MessageBox_NoRecordingDevices);
                return(false);
            }

            try
            {
                soundIn = new CSCore.SoundIn.WasapiLoopbackCapture
                {
                    Device = recordingDevice
                };

                soundIn.Initialize();
                soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };
                convertedSource              = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
                convertedSource              = convertedSource.ToStereo();
                soundInSource.DataAvailable += OnDataAvailable;
                soundIn.Stopped             += OnRecordingStopped;
                soundIn.Start();

                var format = convertedSource.WaveFormat;
                waveFormat     = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                isRecording    = true;
                bufferCaptured = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };
                bufferSend = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };

                eventThread = new Thread(EventThread)
                {
                    Name         = "Loopback Event Thread",
                    IsBackground = true
                };
                eventThread.Start(new WeakReference <LoopbackRecorder>(this));

                return(true);
            }
            catch (Exception ex)
            {
                logger.Log(ex, "Error initializing the recording device:");
            }

            return(false);
        }
Esempio n. 38
0
        // Method for creating float array from given wav file
        public void wavToFloatArray(string path)
        {
            audio = new NAudio.Wave.AudioFileReader(path);
            NAudio.Wave.WaveFormat waveFormat = audio.WaveFormat;
            midLen += audio.Length;

            algo.set_fs(waveFormat.SampleRate);
            algo.defineStepAndLength();
            originalWavSamples = new float[audio.Length / 4];
            algo.setSamples(originalWavSamples.Length);
            audio.Read(originalWavSamples, 0, originalWavSamples.Length);
            audio.Close();
        }
Esempio n. 39
0
        ///Take .wav and make it a mono-channel file
        public void StereoToMono(string sourceFile)
        {
            var outputFile = @"C:\Users\Fazle\source\repos\Practice\Media\currentMono.wav";

            using (var waveFileReader = new WaveFileReader(sourceFile))
            {
                var outFormat = new NAudio.Wave.WaveFormat(waveFileReader.WaveFormat.SampleRate, 1);
                using (var resampler = new MediaFoundationResampler(waveFileReader, outFormat))
                {
                    WaveFileWriter.CreateWaveFile(outputFile, resampler);
                }
            }
        }
Esempio n. 40
0
        public OpusCodec(int bitRate, int outputSampleRate, FragLabs.Audio.Codecs.Opus.Application opusMode)
        {
            this.outputSampleRate = outputSampleRate;
            this.bitRate          = bitRate;
            this.opusMode         = opusMode;

            _segmentFrames = 960;

            _recordFormat = new WaveFormat(outputSampleRate, 16 * channels, channels);

            CreateEncoder();
            CreateDecoder();
        }
Esempio n. 41
0
        public void SetMicrophone(MMDevice mic)
        {
            microphone     = mic;
            micAudioClient = mic.AudioClient;

            // Initialize AudioClient
            NAudio.Wave.WaveFormat waveFormat = micAudioClient.MixFormat;
            micAudioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.None, 1000, 0, waveFormat, audioSession);
            int bufferSize = micAudioClient.BufferSize;

            micFrameSize = waveFormat.Channels * waveFormat.BitsPerSample / 8; // size in bytes
            Console.WriteLine("INFO: Microphone Buffer size " + bufferSize.ToString() + " Frame size " + micFrameSize.ToString());
            Console.WriteLine("INFO: Microphone wave format " + waveFormat.ToString());
        }
Esempio n. 42
0
        private static WaveFormat mp3ToWav(string pathToMp3, Stream outputStream, int sampleRate, int bitDepth, int numChannels)
        {
            using (var reader = new Mp3FileReader(pathToMp3))
            {
                var targetFormat = new NAudio.Wave.WaveFormat(sampleRate, bitDepth, numChannels);
                var pcmStream    = new WaveFormatConversionStream(targetFormat, reader);
                var buffer       = new byte[pcmStream.Length];
                pcmStream.Read(buffer, 0, (int)pcmStream.Length);
                outputStream.Write(buffer, 0, buffer.Length);
                outputStream.Position = 0;

                pcmStream.Close();

                return(targetFormat);
            }
        }
Esempio n. 43
0
        public void SetSpeaker(MMDevice speak)
        {
            speaker          = speak;
            speakAudioClient = speak.AudioClient;

            // Is 7.1 supported?
            Console.WriteLine(speakAudioClient.IsFormatSupported(AudioClientShareMode.Shared, new NAudio.Wave.WaveFormat(44100, 32, 2)));

            // Initalize AudioClient
            NAudio.Wave.WaveFormat waveFormat = speakAudioClient.MixFormat;
            speakAudioClient.Initialize(AudioClientShareMode.Shared, AudioClientStreamFlags.None, 100000000, 0, waveFormat, audioSession);
            int bufferSize = speakAudioClient.BufferSize;

            speakFrameSize = waveFormat.Channels * waveFormat.BitsPerSample / 8; // size in bytes
            Console.WriteLine("INFO: Speaker Buffer size " + bufferSize.ToString() + " Frame Size " + speakFrameSize.ToString());
            Console.WriteLine("INFO: Speaker wave format " + waveFormat.ToString() + " encoding " + waveFormat.Encoding);
        }
Esempio n. 44
0
        public Dumper(NAudio.Wave.WaveFormat wf, string tag)
        {
            string sheader = "";

            if (wf != null && wf.Channels == 1)
            {
                sheader = "52 49 46 46 24 ff ff 7f 57 41 56 45 66 6D 74 20 10 00 00 00 01 00 01 00 44 AC 00 00 88 58 01 00 02 00 10 00 64 61 74 61 00 ff ff 7f";
            }
            else if (wf != null && wf.Channels == 2)
            {
                sheader = "52 49 46 46 24 ff ff 7f 57 41 56 45 66 6D 74 20 10 00 00 00 01 00 02 00 44 AC 00 00 10 B1 02 00 04 00 10 00 64 61 74 61 00 ff ff 7f";
            }

            sheader = sheader.Replace(" ", "");
            byte[] header = new byte[sheader.Length / 2];
            if (header.Length > 0)
            {
                for (int a = 0; a < header.Length; a++)
                {
                    header[a] = Convert.ToByte(sheader.Substring(a * 2, 2), 16);
                }
            }

            string ext = "pcm";

            if (header.Length > 2)
            {
                ext = "wav";
            }

            string fn;

            lock (lck)
            {
                string ts = System.DateTime.UtcNow.ToString("yyyy-MM-dd_HH.mm.ss");
                fn = string.Format("Loopstream-{0}-{1}-{2}.{3}", ts, tag, ++ctr, ext);
            }
            fs = new System.IO.FileStream(fn, System.IO.FileMode.Create);
            if (header.Length > 0)
            {
                fs.Write(header, 0, header.Length);
            }
        }
Esempio n. 45
0
        /// <summary>
        /// Converts a WMA file to a WAV stream
        /// </summary>
        /// <param name="outputStream">Stream to store the converted wav.</param>
        /// <param name="filePath">Path to a .wma file to convert</param>
        /// <returns>The WaveFormat object of the converted wav</returns>
        private static WaveFormat wmaToWav(string pathToWma, Stream outputStream, int sampleRate, int bitDepth, int numChannels)
        {
            if (!Path.GetExtension(pathToWma).ToLowerInvariant().Contains("wma"))
            {
                throw new ArgumentException("Must be a .wma file!");
            }

            using (var reader = new WMAFileReader(pathToWma))
            {
                var targetFormat = new NAudio.Wave.WaveFormat(sampleRate, bitDepth, numChannels);
                var pcmStream    = new WaveFormatConversionStream(targetFormat, reader);
                var buffer       = new byte[pcmStream.Length];
                pcmStream.Read(buffer, 0, (int)pcmStream.Length);
                outputStream.Write(buffer, 0, buffer.Length);
                outputStream.Position = 0;

                pcmStream.Close();

                return(targetFormat);
            }
        }
Esempio n. 46
0
        // Sets up and plays music file that was read in

        public void playBack()
        {
            playback = new WaveOut();
            NAudio.Wave.WaveFormat waveFormat = new NAudio.Wave.WaveFormat(waveIn.SampleRate, waveIn.BitsPerSample, waveIn.NumChannels);
            //for (int ii = 0; ii < (int)Math.Floor((double)waveIn.data.Length/1024); ii++)
            //{
            byte[] sound = new byte[waveIn.data.Length];
            for (int jj = 0; jj < sound.Length; jj++)
            {
                sound[jj] = waveIn.data[jj];
            }
            BufferedWaveProvider bwp = new BufferedWaveProvider(waveFormat);

            bwp.DiscardOnBufferOverflow = true;
            bwp.AddSamples(sound, 0, sound.Length);


            playback.Init(bwp);

            playback.Play();
            //}
        }
Esempio n. 47
0
        private void buttonAudioSetup_Click(object sender, EventArgs e)
        {
            try
            {
                MMDevice device = null;

                var deviceEnum = new MMDeviceEnumerator();
                if (deviceEnum.HasDefaultAudioEndpoint(DataFlow.Render, Role.Console))
                {
                    device = deviceEnum.GetDefaultAudioEndpoint(DataFlow.Render, Role.Console);
                }

                string deviceId = device.ID;
                NAudio.Wave.WaveFormat deviceFormat = device.AudioClient.MixFormat;

                device.Dispose();

                signalGenerator = new SignalGenerator(16000, 2);
                var signalFormat = signalGenerator.WaveFormat;

                audioRenderer = new MfAudioRenderer();
                AudioRendererArgs audioArgs = new AudioRendererArgs
                {
                    DeviceId      = "",
                    SampleRate    = signalFormat.SampleRate,
                    BitsPerSample = signalFormat.BitsPerSample,
                    Encoding      = (WaveEncodingTag)signalFormat.Encoding,
                    Channels      = signalFormat.Channels,
                };

                audioRenderer.Setup(audioArgs);
            }
            catch (Exception ex)
            {
                logger.Error(ex);
            }
        }
Esempio n. 48
0
        public byte[] GetData(out WaveFormat waveFormat)
        {
            Bass.MusicFree(_channel);

            string fileName = _fileName;

            if ((_channel = Bass.CreateStream(fileName, 0, 0, BassFlags.Float | BassFlags.Decode)) == 0)
            {
                if ((_channel = Bass.MusicLoad(fileName, 0, 0,
                                               BassFlags.MusicSensitiveRamping | BassFlags.Float | BassFlags.Decode, 1)) == 0)
                {
                    throw new Exception("Can't open the file");
                }
            }

            Bass.ChannelGetInfo(_channel, out var info);
            waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(info.Frequency, info.Channels);
            if (info.Channels != 2)
            {
                Bass.MusicFree(_channel);
                Bass.StreamFree(_channel);

                throw new Exception("only stereo sources are supported");
            }

            var len   = (int)Bass.ChannelGetLength(_channel);
            var bytes = new byte[len];
            var o     = Bass.ChannelGetData(_channel, bytes, len);

            if (o == -1)
            {
                throw new Exception(Bass.LastError.ToString());
            }

            return(bytes);
        }
Esempio n. 49
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        // Token: 0x060009C0 RID: 2496 RVA: 0x0001C298 File Offset: 0x0001A498
        public void Init(IWaveProvider waveProvider)
        {
            long num = (long)(this.latencyMilliseconds * 10000);

            this.outputFormat = waveProvider.WaveFormat;
            WaveFormatExtensible waveFormatExtensible;

            if (!this.audioClient.IsFormatSupported(this.shareMode, this.outputFormat, out waveFormatExtensible))
            {
                if (waveFormatExtensible == null)
                {
                    WaveFormat waveFormat = this.audioClient.MixFormat;
                    if (!this.audioClient.IsFormatSupported(this.shareMode, waveFormat))
                    {
                        foreach (WaveFormatExtensible waveFormat in new WaveFormatExtensible[]
                        {
                            new WaveFormatExtensible(this.outputFormat.SampleRate, 32, this.outputFormat.Channels),
                            new WaveFormatExtensible(this.outputFormat.SampleRate, 24, this.outputFormat.Channels),
                            new WaveFormatExtensible(this.outputFormat.SampleRate, 16, this.outputFormat.Channels)
                        })
                        {
                            if (this.audioClient.IsFormatSupported(this.shareMode, waveFormat))
                            {
                                break;
                            }
                            waveFormat = null;
                        }
                        if (waveFormat == null)
                        {
                            waveFormat = new WaveFormatExtensible(this.outputFormat.SampleRate, 16, 2);
                            if (!this.audioClient.IsFormatSupported(this.shareMode, waveFormat))
                            {
                                throw new NotSupportedException("Can't find a supported format to use");
                            }
                        }
                    }
                    this.outputFormat = waveFormat;
                }
                else
                {
                    this.outputFormat = waveFormatExtensible;
                }
                using (new ResamplerDmoStream(waveProvider, this.outputFormat))
                {
                }
                this.dmoResamplerNeeded = true;
            }
            else
            {
                this.dmoResamplerNeeded = false;
            }
            this.sourceProvider = waveProvider;
            if (this.isUsingEventSync)
            {
                if (this.shareMode == AudioClientShareMode.Shared)
                {
                    this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.EventCallback, 0L, 0L, this.outputFormat, Guid.Empty);
                    this.latencyMilliseconds = (int)(this.audioClient.StreamLatency / 10000L);
                }
                else
                {
                    this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.EventCallback, num, num, this.outputFormat, Guid.Empty);
                }
                this.frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                this.audioClient.SetEventHandle(this.frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                this.audioClient.Initialize(this.shareMode, AudioClientStreamFlags.None, num, 0L, this.outputFormat, Guid.Empty);
            }
            this.renderClient = this.audioClient.AudioRenderClient;
        }
Esempio n. 50
0
 /// <summary>
 /// Creates an ACM MP3 Frame decompressor. This is the default with NAudio
 /// </summary>
 /// <param name="mp3Format">A WaveFormat object based </param>
 /// <returns></returns>
 public static IMp3FrameDecompressor CreateAcmFrameDecompressor(WaveFormat mp3Format)
 {
     // new DmoMp3FrameDecompressor(this.Mp3WaveFormat);
     return(new AcmMp3FrameDecompressor(mp3Format));
 }
Esempio n. 51
0
        /// <summary>
        /// Opens MP3 from a stream rather than a file
        /// Will not dispose of this stream itself
        /// </summary>
        /// <param name="inputStream">The incoming stream containing MP3 data</param>
        /// <param name="frameDecompressorBuilder">Factory method to build a frame decompressor</param>
        public Mp3FileReader(Stream inputStream, FrameDecompressorBuilder frameDecompressorBuilder)
        {
            // Calculated as a double to minimize rounding errors

            mp3Stream = inputStream;
            id3v2Tag  = Id3v2Tag.ReadTag(mp3Stream);

            dataStartPosition = mp3Stream.Position;
            var    firstFrame = Mp3Frame.LoadFromStream(mp3Stream);
            double bitRate    = firstFrame.BitRate;

            xingHeader = XingHeader.LoadXingHeader(firstFrame);
            // If the header exists, we can skip over it when decoding the rest of the file
            if (xingHeader != null)
            {
                dataStartPosition = mp3Stream.Position;
            }

            // workaround for a longstanding issue with some files failing to load
            // because they report a spurious sample rate change
            var secondFrame = Mp3Frame.LoadFromStream(mp3Stream);

            if (secondFrame != null &&
                (secondFrame.SampleRate != firstFrame.SampleRate ||
                 secondFrame.ChannelMode != firstFrame.ChannelMode))
            {
                // assume that the first frame was some kind of VBR/LAME header that we failed to recognise properly
                dataStartPosition = secondFrame.FileOffset;
                // forget about the first frame, the second one is the first one we really care about
                firstFrame = secondFrame;
            }

            this.mp3DataLength = mp3Stream.Length - dataStartPosition;

            // try for an ID3v1 tag as well
            mp3Stream.Position = mp3Stream.Length - 128;
            byte[] tag = new byte[128];
            mp3Stream.Read(tag, 0, 128);
            if (tag[0] == 'T' && tag[1] == 'A' && tag[2] == 'G')
            {
                id3v1Tag            = tag;
                this.mp3DataLength -= 128;
            }

            mp3Stream.Position = dataStartPosition;

            // create a temporary MP3 format before we know the real bitrate
            this.Mp3WaveFormat = new Mp3WaveFormat(firstFrame.SampleRate, firstFrame.ChannelMode == ChannelMode.Mono ? 1 : 2, firstFrame.FrameLength, (int)bitRate);

            CreateTableOfContents();
            this.tocIndex = 0;

            // [Bit rate in Kilobits/sec] = [Length in kbits] / [time in seconds]
            //                            = [Length in bits ] / [time in milliseconds]

            // Note: in audio, 1 kilobit = 1000 bits.
            bitRate = (mp3DataLength * 8.0 / TotalSeconds());

            mp3Stream.Position = dataStartPosition;

            // now we know the real bitrate we can create an accurate
            this.Mp3WaveFormat  = new Mp3WaveFormat(firstFrame.SampleRate, firstFrame.ChannelMode == ChannelMode.Mono ? 1 : 2, firstFrame.FrameLength, (int)bitRate);
            decompressor        = frameDecompressorBuilder(Mp3WaveFormat);
            this.waveFormat     = decompressor.OutputFormat;
            this.bytesPerSample = (decompressor.OutputFormat.BitsPerSample) / 8 * decompressor.OutputFormat.Channels;
            // no MP3 frames have more than 1152 samples in them
            // some MP3s I seem to get double
            this.decompressBuffer = new byte[1152 * bytesPerSample * 2];
        }
Esempio n. 52
0
        /// <summary>
        /// Ensures valid AIFF header and then finds data offset.
        /// </summary>
        /// <param name="stream">The stream, positioned at the start of audio data</param>
        /// <param name="format">The format found</param>
        /// <param name="dataChunkPosition">The position of the data chunk</param>
        /// <param name="dataChunkLength">The length of the data chunk</param>
        /// <param name="chunks">Additional chunks found</param>
        public static void ReadAiffHeader(Stream stream, out WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List <AiffChunk> chunks)
        {
            dataChunkPosition = -1;
            format            = null;
            BinaryReader br = new BinaryReader(stream);

            if (ReadChunkName(br) != "FORM")
            {
                throw new FormatException("Not an AIFF file - no FORM header.");
            }
            uint   fileSize = ConvertInt(br.ReadBytes(4));
            string formType = ReadChunkName(br);

            if (formType != "AIFC" && formType != "AIFF")
            {
                throw new FormatException("Not an AIFF file - no AIFF/AIFC header.");
            }

            dataChunkLength = 0;

            while (br.BaseStream.Position < br.BaseStream.Length)
            {
                AiffChunk nextChunk = ReadChunkHeader(br);
                if (nextChunk.ChunkName == "\0\0\0\0")
                {
                    break;
                }

                if (br.BaseStream.Position + nextChunk.ChunkLength > br.BaseStream.Length)
                {
                    break;
                }
                if (nextChunk.ChunkName == "COMM")
                {
                    short  numChannels     = ConvertShort(br.ReadBytes(2));
                    uint   numSampleFrames = ConvertInt(br.ReadBytes(4));
                    short  sampleSize      = ConvertShort(br.ReadBytes(2));
                    double sampleRate      = IEEE.ConvertFromIeeeExtended(br.ReadBytes(10));

                    format = new WaveFormat((int)sampleRate, (int)sampleSize, (int)numChannels);

                    if (nextChunk.ChunkLength > 18 && formType == "AIFC")
                    {
                        // In an AIFC file, the compression format is tacked on to the COMM chunk
                        string compress = new string(br.ReadChars(4)).ToLower();
                        if (compress != "none")
                        {
                            throw new FormatException("Compressed AIFC is not supported.");
                        }
                        br.ReadBytes((int)nextChunk.ChunkLength - 22);
                    }
                    else
                    {
                        br.ReadBytes((int)nextChunk.ChunkLength - 18);
                    }
                }
                else if (nextChunk.ChunkName == "SSND")
                {
                    uint offset    = ConvertInt(br.ReadBytes(4));
                    uint blockSize = ConvertInt(br.ReadBytes(4));
                    dataChunkPosition       = nextChunk.ChunkStart + 16 + offset;
                    dataChunkLength         = (int)nextChunk.ChunkLength - 8;
                    br.BaseStream.Position += (nextChunk.ChunkLength - 8);
                }
                else
                {
                    if (chunks != null)
                    {
                        chunks.Add(nextChunk);
                    }
                    br.BaseStream.Position += nextChunk.ChunkLength;
                }
            }

            if (format == null)
            {
                throw new FormatException("Invalid AIFF file - No COMM chunk found.");
            }
            if (dataChunkPosition == -1)
            {
                throw new FormatException("Invalid AIFF file - No SSND chunk found.");
            }
        }
Esempio n. 53
0
        /// <summary>
        /// Initialize for playing the specified wave stream
        /// </summary>
        /// <param name="waveProvider">IWaveProvider to play</param>
        public void Init(IWaveProvider waveProvider)
        {
            long latencyRefTimes = latencyMilliseconds * 10000;

            outputFormat = waveProvider.WaveFormat;
            // first attempt uses the WaveFormat from the WaveStream
            WaveFormatExtensible closestSampleRateFormat;

            if (!audioClient.IsFormatSupported(shareMode, outputFormat, out closestSampleRateFormat))
            {
                // Use closesSampleRateFormat (in sharedMode, it equals usualy to the audioClient.MixFormat)
                // See documentation : http://msdn.microsoft.com/en-us/library/ms678737(VS.85).aspx
                // They say : "In shared mode, the audio engine always supports the mix format"
                // The MixFormat is more likely to be a WaveFormatExtensible.
                if (closestSampleRateFormat == null)
                {
                    outputFormat = GetFallbackFormat();
                }
                else
                {
                    outputFormat = closestSampleRateFormat;
                }

                try
                {
                    // just check that we can make it.
                    using (new ResamplerDmoStream(waveProvider, outputFormat))
                    {
                    }
                }
                catch (Exception)
                {
                    // On Windows 10 some poorly coded drivers return a bad format in to closestSampleRateFormat
                    // In that case, try and fallback as if it provided no closest (e.g. force trying the mix format)
                    outputFormat = GetFallbackFormat();
                    using (new ResamplerDmoStream(waveProvider, outputFormat))
                    {
                    }
                }
                dmoResamplerNeeded = true;
            }
            else
            {
                dmoResamplerNeeded = false;
            }
            sourceProvider = waveProvider;

            // If using EventSync, setup is specific with shareMode
            if (isUsingEventSync)
            {
                // Init Shared or Exclusive
                if (shareMode == AudioClientShareMode.Shared)
                {
                    // With EventCallBack and Shared, both latencies must be set to 0 (update - not sure this is true anymore)
                    //
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, 0,
                                           outputFormat, Guid.Empty);

                    // Windows 10 returns 0 from stream latency, resulting in maxing out CPU usage later
                    var streamLatency = audioClient.StreamLatency;
                    if (streamLatency != 0)
                    {
                        // Get back the effective latency from AudioClient
                        latencyMilliseconds = (int)(streamLatency / 10000);
                    }
                }
                else
                {
                    // With EventCallBack and Exclusive, both latencies must equals
                    audioClient.Initialize(shareMode, AudioClientStreamFlags.EventCallback, latencyRefTimes, latencyRefTimes,
                                           outputFormat, Guid.Empty);
                }

                // Create the Wait Event Handle
                frameEventWaitHandle = new EventWaitHandle(false, EventResetMode.AutoReset);
                audioClient.SetEventHandle(frameEventWaitHandle.SafeWaitHandle.DangerousGetHandle());
            }
            else
            {
                // Normal setup for both sharedMode
                audioClient.Initialize(shareMode, AudioClientStreamFlags.None, latencyRefTimes, 0,
                                       outputFormat, Guid.Empty);
            }

            // Get the RenderClient
            renderClient = audioClient.AudioRenderClient;
        }
Esempio n. 54
0
 /// <summary>
 /// Allows you to specify the sample rate and channels for this WaveProvider
 /// (should be initialised before you pass it to a wave player)
 /// </summary>
 public void SetWaveFormat(int sampleRate, int channels)
 {
     this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
 }
Esempio n. 55
0
        // Token: 0x06000A5C RID: 2652 RVA: 0x0001E2C4 File Offset: 0x0001C4C4
        private static bool IsPcmOrIeeeFloat(WaveFormat waveFormat)
        {
            WaveFormatExtensible waveFormatExtensible = waveFormat as WaveFormatExtensible;

            return(waveFormat.Encoding == WaveFormatEncoding.Pcm || waveFormat.Encoding == WaveFormatEncoding.IeeeFloat || (waveFormatExtensible != null && (waveFormatExtensible.SubFormat == AudioSubtypes.MFAudioFormat_PCM || waveFormatExtensible.SubFormat == AudioSubtypes.MFAudioFormat_Float)));
        }
        /// <summary>
        /// Start recording on the device in the parameter.
        /// </summary>
        /// <param name="recordingDevice">the device to start recording</param>
        /// <returns>true if the recording is started, or false</returns>
        public bool StartRecordingSetDevice(MMDevice recordingDevice)
        {
            if (recordingDevice == null)
            {
                logger.Log(Properties.Strings.MessageBox_NoRecordingDevices);
                return(false);
            }

            try
            {
                if (recordingDevice.DataFlow == DataFlow.Render)
                {
                    soundIn = new CSCore.SoundIn.WasapiLoopbackCapture
                    {
                        Device = recordingDevice
                    };
                }
                else
                {
                    soundIn = new CSCore.SoundIn.WasapiCapture
                    {
                        Device = recordingDevice
                    };
                }


                soundIn.Initialize();
                soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                var selectedFormat = mainForm.GetSelectedStreamFormat();
                var convertMultiChannelToStereo = mainForm.GetConvertMultiChannelToStereo();
                CSCore.WaveFormat format;
                switch (selectedFormat)
                {
                case Classes.SupportedStreamFormat.Wav:
                    convertedSource = soundInSource.ChangeSampleRate(44100).ToSampleSource().ToWaveSource(16);
                    format          = convertedSource.WaveFormat;
                    if (convertMultiChannelToStereo)
                    {
                        convertedSource = convertedSource.ToStereo();
                    }
                    waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                    break;

                case Classes.SupportedStreamFormat.Mp3_320:
                case Classes.SupportedStreamFormat.Mp3_128:
                    convertedSource = soundInSource.ToSampleSource().ToWaveSource(16);
                    convertedSource = convertedSource.ToStereo();
                    format          = convertedSource.WaveFormat;
                    waveFormat      = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                    break;

                case Classes.SupportedStreamFormat.Wav_16bit:
                    convertedSource = soundInSource.ToSampleSource().ToWaveSource(16);
                    if (convertMultiChannelToStereo)
                    {
                        convertedSource = convertedSource.ToStereo();
                    }
                    format     = convertedSource.WaveFormat;
                    waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                    break;

                case Classes.SupportedStreamFormat.Wav_24bit:
                    convertedSource = soundInSource.ToSampleSource().ToWaveSource(24);
                    if (convertMultiChannelToStereo)
                    {
                        convertedSource = convertedSource.ToStereo();
                    }
                    format     = convertedSource.WaveFormat;
                    waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                    break;

                case Classes.SupportedStreamFormat.Wav_32bit:
                    convertedSource = soundInSource.ToSampleSource().ToWaveSource(32);
                    if (convertMultiChannelToStereo)
                    {
                        convertedSource = convertedSource.ToStereo();
                    }
                    format     = convertedSource.WaveFormat;
                    waveFormat = NAudio.Wave.WaveFormat.CreateCustomFormat(WaveFormatEncoding.IeeeFloat, format.SampleRate, format.Channels, format.BytesPerSecond, format.BlockAlign, format.BitsPerSample);
                    break;

                default:
                    break;
                }

                logger.Log($"Stream format set to {waveFormat.Encoding} {waveFormat.SampleRate} {waveFormat.BitsPerSample} bit");
                soundInSource.DataAvailable += OnDataAvailable;
                soundIn.Stopped             += OnRecordingStopped;
                soundIn.Start();

                isRecording    = true;
                bufferCaptured = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };
                bufferSend = new BufferBlock()
                {
                    Data = new byte[convertedSource.WaveFormat.BytesPerSecond / 2]
                };

                eventThread = new Thread(EventThread)
                {
                    Name         = "Loopback Event Thread",
                    IsBackground = true
                };
                eventThread.Start(new WeakReference <LoopbackRecorder>(this));

                return(true);
            }
            catch (Exception ex)
            {
                logger.Log(ex, "Error initializing the recording device:");
            }

            return(false);
        }
Esempio n. 57
0
 public void setFormat()
 {
     //wf = new WaveFormat(samplerate, bitness, chans);
     wf = WaveFormat.CreateIeeeFloatWaveFormat(samplerate, chans);
 }
Esempio n. 58
0
        public static string Hide(string messagePath, string key, string sourcePath, string destinationPath)
        {
            string mes = "";


            using (var sourceStream = new FileStream(sourcePath, FileMode.Open))
                using (var keyStream = new MemoryStream(Encoding.UTF8.GetBytes(key)))
                    using (var destinationStream = new FileStream(destinationPath, FileMode.Create))
                        using (var reader = new WaveFileReader(sourceStream))
                        {
                            NAudio.Wave.WaveFormat format = reader.WaveFormat;
                            Encode.LSB.WriteWavHeader(destinationStream, false, (ushort)format.Channels, (ushort)format.BitsPerSample, format.SampleRate, (int)(reader.SampleCount));
                            byte[] data = new byte[4];
                            byte   message, bit;
                            int    messageBuffer; //receives the next byte of the message or -1

                            reader.Seek(44, SeekOrigin.Begin);

                            byte[] file = File.ReadAllBytes(messagePath);

                            byte[] length = Encode.LSB.intToBytes(file.Length);

                            using (MemoryStream messageStream = new MemoryStream(Encode.LSB.Connect(length, file)))
                            {
                                while ((messageBuffer = messageStream.ReadByte()) >= 0 && data != null)
                                {
                                    //read one byte of the message stream
                                    message = (byte)messageBuffer;
                                    //for each bit in [message]
                                    for (int bitIndex = 0; bitIndex < 8 && (reader.Read(data, 0, data.Length) != -1); bitIndex++)
                                    {
                                        //mes += data[0] + " " + data[1] + " " + data[2] + " " + data[3] + " " + reader.Position + Environment.NewLine;
                                        //get the next bit from the current message byte...
                                        bit = (byte)(((message & (byte)(1 << bitIndex)) > 0) ? 1 : 0);
                                        //2
                                        Complex[] complexData = new Complex[data.Length];
                                        for (int i = 0; i < complexData.Length; i++)
                                        {
                                            complexData[i] = new Complex(data[i], 0);
                                        }

                                        Complex[] fftdata = FFT.FFT.fft(complexData);
                                        double[]  A       = new double[fftdata.Length];
                                        double[]  phi     = new double[fftdata.Length];

                                        for (int i = 0; i < fftdata.Length; i++)
                                        {
                                            A[i]   = fftdata[i].Magnitude;
                                            phi[i] = fftdata[i].Phase;
                                        }
                                        //mes += phi[0] + Environment.NewLine;
                                        //3
                                        double[] deltaphi = new double[phi.Length - 1];
                                        for (int i = 0; i < deltaphi.Length; i++)
                                        {
                                            deltaphi[i] = phi[i + 1] - phi[i];
                                        }


                                        //4
                                        double phidata = Math.PI / 2 - Math.PI * bit;


                                        //5
                                        phi[1] = phidata;
                                        phi[2] = -phidata;

                                        //6

                                        for (int i = 1; i < phi.Length - 1; i++)
                                        {
                                            phi[i] = phi[i - 1] + deltaphi[i];
                                        }
                                        //mes += phi[1] + Environment.NewLine;

                                        //реконструируем сигнал
                                        byte[]    reconstructed = new byte[data.Length];
                                        Complex[] ifft          = new Complex[phi.Length];
                                        for (int i = 0; i < phi.Length; i++)
                                        {
                                            ifft[i] = A[i] * (new Complex(Math.Cos(phi[i]), Math.Sin(phi[i])));
                                        }
                                        ifft = FFT.FFT.ifft(ifft);
                                        for (int i = 0; i < reconstructed.Length; i++)
                                        {
                                            reconstructed[i] = (byte)ifft[i].Real;
                                        }
                                        mes += reconstructed[0] + " " + reconstructed[1] + " " + reconstructed[2] + " " + reconstructed[3] + " " + destinationStream.Position + Environment.NewLine;
                                        destinationStream.Write(reconstructed, 0, 4);
                                    }
                                }
                                reader.CopyTo(destinationStream);
                                destinationStream.Seek(0, SeekOrigin.Begin);

                                destinationStream.Flush();
                            }
                            return(mes);
                        }
        }
Esempio n. 59
0
        /// <summary>
        /// Ensures valid AIFF header and then finds data offset.
        /// </summary>
        /// <param name="stream">The stream, positioned at the start of audio data</param>
        /// <param name="format">The format found</param>
        /// <param name="dataChunkPosition">The position of the data chunk</param>
        /// <param name="dataChunkLength">The length of the data chunk</param>
        /// <param name="chunks">Additional chunks found</param>
        // Token: 0x06000ABA RID: 2746 RVA: 0x0001F5F8 File Offset: 0x0001D7F8
        public static void ReadAiffHeader(Stream stream, out WaveFormat format, out long dataChunkPosition, out int dataChunkLength, List <AiffFileReader.AiffChunk> chunks)
        {
            dataChunkPosition = -1L;
            format            = null;
            BinaryReader binaryReader = new BinaryReader(stream);

            if (AiffFileReader.ReadChunkName(binaryReader) != "FORM")
            {
                throw new FormatException("Not an AIFF file - no FORM header.");
            }
            AiffFileReader.ConvertInt(binaryReader.ReadBytes(4));
            string a = AiffFileReader.ReadChunkName(binaryReader);

            if (a != "AIFC" && a != "AIFF")
            {
                throw new FormatException("Not an AIFF file - no AIFF/AIFC header.");
            }
            dataChunkLength = 0;
            while (binaryReader.BaseStream.Position < binaryReader.BaseStream.Length)
            {
                AiffFileReader.AiffChunk item = AiffFileReader.ReadChunkHeader(binaryReader);
                if (item.ChunkName == "COMM")
                {
                    short channels = AiffFileReader.ConvertShort(binaryReader.ReadBytes(2));
                    AiffFileReader.ConvertInt(binaryReader.ReadBytes(4));
                    short  bits = AiffFileReader.ConvertShort(binaryReader.ReadBytes(2));
                    double num  = IEEE.ConvertFromIeeeExtended(binaryReader.ReadBytes(10));
                    format = new WaveFormat((int)num, (int)bits, (int)channels);
                    if (item.ChunkLength > 18u && a == "AIFC")
                    {
                        string a2 = new string(binaryReader.ReadChars(4)).ToLower();
                        if (a2 != "none")
                        {
                            throw new FormatException("Compressed AIFC is not supported.");
                        }
                        binaryReader.ReadBytes((int)(item.ChunkLength - 22u));
                    }
                    else
                    {
                        binaryReader.ReadBytes((int)(item.ChunkLength - 18u));
                    }
                }
                else if (item.ChunkName == "SSND")
                {
                    uint num2 = AiffFileReader.ConvertInt(binaryReader.ReadBytes(4));
                    AiffFileReader.ConvertInt(binaryReader.ReadBytes(4));
                    dataChunkPosition = (long)((ulong)(item.ChunkStart + 16u + num2));
                    dataChunkLength   = (int)(item.ChunkLength - 8u);
                    binaryReader.ReadBytes((int)(item.ChunkLength - 8u));
                }
                else
                {
                    if (chunks != null)
                    {
                        chunks.Add(item);
                    }
                    binaryReader.ReadBytes((int)item.ChunkLength);
                }
                if (item.ChunkName == "\0\0\0\0")
                {
                    break;
                }
            }
            if (format == null)
            {
                throw new FormatException("Invalid AIFF file - No COMM chunk found.");
            }
            if (dataChunkPosition == -1L)
            {
                throw new FormatException("Invalid AIFF file - No SSND chunk found.");
            }
        }
    /// <summary>
    /// Build all the temp files
    /// </summary>
    /// <remarks>
    /// One of the most hideous functions ever written.  Refactoring would be nice.
    /// </remarks>
    public void CreateTempFiles()
    {
        /* Dev links:
         *
         * http://localhost:82/player/epgcbha - clerks mp3 - id46
         *
         * http://localhost:82/player/dpccbha - beavis wav - id32
         *
         * http://localhost:82/player/dpbcbha - ace losers mp3 - id31
         *
         * http://192.168.1.117:82/player/fqavbha - borat wav - id50 - it is nice - won't convert to mp3
         *
         */


        dynamic theSound     = null;
        Stream  outputStream = new MemoryStream();

        if (_requestInfo.IsValid)   // Functions.IsNumeric(soundId))
        {
            int soundId = _requestInfo.SoundId;

            //
            // First, look in the cache and see if we have a converted mp3 there already
            //
            string targetMp3File = LocalMp3File;    //Functions.CombineElementsWithDelimiter("\\", _targetPath, string.Format("id{0}.mp3", soundId));
            string targetWavFile = LocalWavFile;    // Functions.CombineElementsWithDelimiter("\\", _targetPath, string.Format("id{0}.wav", soundId));

            //if (!NeedToCreateAtLeastOneFile(targetMp3File, targetWavFile))
            if (HaveAtLeastOneFile(targetMp3File, targetWavFile))
            {
                return;
            }
            else
            {
                //
                // OK, let's grab the sound data from the DB
                //
                theSound = GetSoundData(Functions.ConvertInt(soundId, -1));

                if (theSound != null && theSound.Id >= 0)
                {
                    //
                    // The DB returns HTML-ready bytes.  It would be more efficient to return the binary data.  Then we wouldn't have to do 2 conversions.
                    // Todo!
                    //
                    byte[] originalSourceByteArray = System.Convert.FromBase64String(theSound.Data);

                    if (Functions.IsWav(theSound.FileName))
                    {
                        bool successfulWavConversionToMp3 = false;

                        //
                        // It's an wav, convert to mp3
                        //
                        Mp3Writer outputMp3Writer = null;
                        // Mp3Writer outputFileMp3Writer = null;

                        //
                        // These are WaveLib.WaveStream objects that wrap the LAME thing
                        //
                        WaveStream waveDataToConvertToMp3    = null;
                        WaveStream convertedSourceWaveStream = null;
                        WaveStream originalSourceWaveStream  = new WaveStream(new MemoryStream(originalSourceByteArray));

                        try
                        {
                            outputMp3Writer        = new Mp3Writer(outputStream, originalSourceWaveStream.Format);
                            waveDataToConvertToMp3 = originalSourceWaveStream;
                        }
                        catch         // (Exception ex)
                        {
                            outputMp3Writer = null;

                            //
                            // The source WAV isn't compatible with the LAME thingy.  Let's try to convert it to something we know is usable with the NAudio thingy.
                            // Then we'll use the NAudio stuff to try to get the LAME stuff to work.
                            //
                            //MemoryStream tempMemStream = new MemoryStream();
                            int sampleRate = 16000;
                            int bitDepth   = 16;

                            //
                            // Note: there appears to be a bug in the LAME thing for files with 1 channel (mono).  The file plays at double speed.
                            //
                            int channels = 2;
                            NAudio.Wave.WaveFormat targetFormat = new NAudio.Wave.WaveFormat(sampleRate, bitDepth, channels);

                            NAudio.Wave.WaveStream stream = new NAudio.Wave.WaveFileReader(new MemoryStream(originalSourceByteArray));
                            NAudio.Wave.WaveFormatConversionStream str = null;

                            try
                            {
                                str = new NAudio.Wave.WaveFormatConversionStream(targetFormat, stream);
                            }
                            catch (Exception ex3)
                            {
                                //
                                // The borat "It is nice" WAV won't convert, has strange exception.  Todo: add logging and fix.
                                //
                                ErrorMsg = string.Format("Well, naudio can't convert the WAV to the target WAV format either: {0}", ex3.Message);
                            }

                            if (str != null)
                            {
                                //
                                // For lack of a better solution to get the bytes from the converted data into a "WaveStream" variable, use these
                                // available methods to write to a disk file and then open up the disk file.  The problem with directly converting
                                // with memory streams is that the required headers (like "RIFF") aren't written into the converted stream at this point.
                                //
                                NAudio.Wave.WaveFileWriter.CreateWaveFile(targetWavFile, str);
                                convertedSourceWaveStream = new WaveStream(targetWavFile);

                                //
                                // Now we have a correct WAV memory stream
                                //
                                try
                                {
                                    WaveFormat format = convertedSourceWaveStream.Format;
                                    outputMp3Writer        = new Mp3Writer(outputStream, format);
                                    waveDataToConvertToMp3 = convertedSourceWaveStream;
                                }
                                catch (Exception ex2)
                                {
                                    //
                                    // Crap, I think we're hosed
                                    //
                                    ErrorMsg = string.Format("Oops - second try - can't process this file: {0}", ex2.Message);
                                }
                            }
                        }


                        if (outputMp3Writer != null)
                        {
                            //
                            // If we're here, we've successfully created the MP3 converter from the WAV file, and our data stream
                            // is in the variable "waveDataToConvertToMp3"
                            //
                            try
                            {
                                byte[] buff = new byte[outputMp3Writer.OptimalBufferSize];
                                int    read = 0;
                                while ((read = waveDataToConvertToMp3.Read(buff, 0, buff.Length)) > 0)
                                {
                                    outputMp3Writer.Write(buff, 0, read);
                                }

                                //
                                // We have mp3 bytes, write 'em
                                //
                                // FileStream outputMp3File = new FileStream(targetMp3File, FileMode.CreateNew);
                                //outputMp3File.Write(originalSourceByteArray, 0, originalSourceByteArray.Length);
                                using (Stream outputMp3File = File.OpenWrite(targetMp3File))
                                {
                                    outputStream.Position = 0;
                                    Functions.CopyStream(outputStream, outputMp3File);
                                }

                                successfulWavConversionToMp3 = true;
                            }
                            catch (Exception ex)
                            {
                                ErrorMsg = string.Format("Oops, fatal error: {0}", ex.Message);
                            }
                            finally
                            {
                                outputMp3Writer.Close();
                                waveDataToConvertToMp3.Close();
                            }
                        }

                        if (!successfulWavConversionToMp3)
                        {
                            //
                            // Well, everthing failed.  We have a WAV at least, let's go ahead and write that.
                            //
                            File.WriteAllBytes(targetWavFile, originalSourceByteArray);
                        }

                        //
                        // Let's clean this stuff up
                        //
                        originalSourceWaveStream.Close();

                        if (convertedSourceWaveStream != null)
                        {
                            convertedSourceWaveStream.Close();
                        }
                    }
                    else
                    {
                        FileStream outputMp3File = null;
                        try
                        {
                            outputMp3File = new FileStream(targetMp3File, FileMode.CreateNew);
                            //
                            // We have mp3 bytes, write 'em
                            //
                            outputMp3File.Write(originalSourceByteArray, 0, originalSourceByteArray.Length);
                        }
                        catch
                        {
                            // Maybe we have the file already by another thread?
                            ErrorMsg = "Have mp3, can't write to disk.";
                        }
                        finally
                        {
                            if (outputMp3File != null)
                            {
                                outputMp3File.Close();
                            }
                        }

                        /*
                         * Huge todo: this code works fine on Windows 7, but doesn't work on Windows 2008 Server.  The problem is two fold:
                         *
                         *  a) The two mp3 encoders installed by Win7 (l3codeca.acm and l3codecp.acm) don't exist.  I see no way to "install" them.
                         *      This site comes closes to explaining, but these reg keys do NOT exist so the solution doesn't work:
                         *      http://blog.komeil.com/2008/06/enabling-fraunhofer-mp3-codec-vista.html
                         *
                         *  b) The alternate option explained here:
                         *      http://stackoverflow.com/questions/5652388/naudio-error-nodriver-calling-acmformatsuggest/5659266#5659266
                         *      Also doesn't work since the COM object for the DMO isn't registered on Win2K8.  The Windows SDK is supposed to have it,
                         *      but it doesn't install properly on the server.  Also Win Media Player won't install with a weird "There is no update to Windows Media Player".
                         *
                         *  I've googled for a long time, but this is a tricky one.  Maybe Microsoft needs to be contacted?
                         *
                         * This is required to create WAV files for Firefox to play.  Otherwise you have to click on a link. (alt solution: embed?)
                         *
                         *                      NAudio.Wave.Mp3FileReader reader = null;
                         *                      try
                         *                      {
                         *                              //
                         *                              // Let's write the WAV bytes
                         *                              // http://hintdesk.com/c-mp3wav-converter-with-lamenaudio/
                         *                              //
                         *                              using (reader = new NAudio.Wave.Mp3FileReader(targetMp3File))
                         *                              {
                         *                                      using (NAudio.Wave.WaveStream pcmStream = NAudio.Wave.WaveFormatConversionStream.CreatePcmStream(reader))
                         *                                      {
                         *                                              NAudio.Wave.WaveFileWriter.CreateWaveFile(targetWavFile, pcmStream);
                         *                                      }
                         *                              }
                         *                      }
                         *                      catch
                         *                      {
                         *                              ErrorMsg = "Have mp3, can't convert to WAV";
                         *                      }
                         *                      finally
                         *                      {
                         *                              if (reader != null)
                         *                              {
                         *                                      reader.Close();
                         *                              }
                         *                      }
                         *
                         */
                    }
                }
                else
                {
                    ErrorMsg = "Cannot get sound id";
                }
            }
        }
        else
        {
            ErrorMsg = string.Format("Invalid request parameter: {0}", _encryptedPlayerRequest);
        }
    }