Exemplo n.º 1
0
        public void InitializeRecorder(String recordDirectory, String siteID)
        {
            this.recordDirectory = recordDirectory;
            this.siteID          = siteID;
            var fileName = siteID + "_" + DateTime.Now.ToString("yyyyMMddHHmm") + "_" + field.ToString();

            indOfNextChannelToWrite = 0;

            imageFullFileName    = Path.Combine(recordDirectory, $"{fileName}.mkv");
            audioFileWriters     = new List <WaveFileWriter>();
            audioBufferToRecord  = new List <WaveInEventArgs>();
            qAudioBufferToRecord = new Queue <WaveInEventArgs>();
            qVideoBufferToRecord = new Queue <Capture>();
            var wf_original = audioCaptureDevice.WaveFormat;
            var wf          = WaveFormat.CreateCustomFormat(
                wf_original.Encoding,
                wf_original.SampleRate,
                1,
                wf_original.AverageBytesPerSecond / wf_original.Channels,
                wf_original.BlockAlign / wf_original.Channels,
                wf_original.BitsPerSample);

            for (int i = 0; i < wf_original.Channels; i++)
            {
                String         audioFullFillName = Path.Combine(recordDirectory, $"{fileName}_channel_{i}.wav");
                WaveFileWriter writer            = new WaveFileWriter(audioFullFillName, wf);
                audioFileWriters.Add(writer);
            }
            flushTimer           = new System.Timers.Timer(1000);
            flushTimer.Elapsed  += ShouldFlushCallBack;
            flushTimer.AutoReset = true;
            flushTimer.Enabled   = true;
        }
Exemplo n.º 2
0
        public WaveFormat GetWaveFormat(uint index)
        {
            if (index >= bankData.EntryCount)
            {
                throw new ArgumentOutOfRangeException("index");
            }

            var minFormat = GetMiniWaveFormat(index);

            switch (minFormat.FormatTag)
            {
            case MiniWaveFormat.Tag.PCM:
                return(new WaveFormat((int)minFormat.SamplesPerSecond, (int)minFormat.GetBitsPerSample(), (int)minFormat.Channels));

            case MiniWaveFormat.Tag.ADPCM:
                return(new WaveFormat((int)minFormat.SamplesPerSecond, (int)minFormat.Channels, (int)minFormat.GetBlockAlign()));

            case MiniWaveFormat.Tag.WMA:     // xWMA is supported by XAudio 2.7 and by Xbox One
                return(WaveFormat.CreateCustomFormat((minFormat.BitsPerSample & 0x1) > 0 ? WaveFormatEncoding.Wmaudio3 : WaveFormatEncoding.Wmaudio2,
                                                     (int)minFormat.SamplesPerSecond,
                                                     (int)minFormat.Channels,
                                                     (int)minFormat.GetAvgerageBytesPerSecond(),
                                                     (int)minFormat.GetBlockAlign(),
                                                     (int)minFormat.GetBitsPerSample()));

            case MiniWaveFormat.Tag.XMA:
            default:
                throw new NotSupportedException("Wave format not suppoted.");
            }
        }
Exemplo n.º 3
0
            public WaveProvider(Func <bool> getData, WaveSinkFilter filter)
            {
                int channels       = filter.OutFormat.Channels;
                int bytesPerSample = filter.OutFormat.BytesPerSample;
                int sampleRate     = filter.OutFormat.SampleRate;

                m_waveFormat = WaveFormat.CreateCustomFormat(
                    WaveFormatEncoding.Pcm,
                    sampleRate,
                    channels,
                    channels * bytesPerSample * sampleRate,
                    channels * bytesPerSample,
                    bytesPerSample * 8
                    );
                m_waveFormat = new WaveFormat(
                    sampleRate,
                    bytesPerSample * 8,
                    channels
                    );

                m_getData = getData;
                m_filter  = filter;

                m_blockSize = m_filter.OutFormat.Channels * m_filter.OutFormat.BytesPerSample;
            }
Exemplo n.º 4
0
        public Sound(IntPtr formHandle)
        {
            directSound = new DirectSound();
            directSound.SetCooperativeLevel(formHandle, CooperativeLevel.Normal);

            // WAVEFORMATEX Structure (from Microsoft Documentation on DirectSound)
            // https://docs.microsoft.com/en-us/previous-versions/windows/desktop/ee419019(v%3dvs.85)
            var wFormatTag      = WaveFormatEncoding.Pcm;
            var nSamplesPerSec  = 44100;
            var nChannels       = 1;
            var wBitsPerSample  = 16;                               // (short)
            var nBlockAlign     = (nChannels * wBitsPerSample) / 8; // nBlockAlign must be equal to the product of nChannels and wBitsPerSample divided by 8 (bits per byte)
            var nAvgBytesPerSec = nSamplesPerSec * nBlockAlign;     // nAvgBytesPerSec should be equal to the product of nSamplesPerSec and nBlockAlign

            waveFormat = WaveFormat.CreateCustomFormat(
                tag: wFormatTag,
                sampleRate: nSamplesPerSec,
                channels: nChannels,
                averageBytesPerSecond: nAvgBytesPerSec,
                blockAlign: nBlockAlign,
                bitsPerSample: wBitsPerSample
                );

            var bufferDesc = new SoundBufferDescription();

            bufferDesc.Format      = waveFormat;
            bufferDesc.BufferBytes = Convert.ToInt32(
                bufferDuration.TotalSeconds * waveFormat.AverageBytesPerSecond / waveFormat.Channels);

            buffer = new SecondarySoundBuffer(directSound, bufferDesc);

            int numSamples = buffer.Capabilities.BufferBytes / waveFormat.BlockAlign;

            samples = new short[numSamples];
        }
Exemplo n.º 5
0
        public void Start()
        {
            var location  = System.Reflection.Assembly.GetEntryAssembly().Location;
            var directory = Path.GetDirectoryName(location);
            var filename  = "Recordings/linear.wav";
            var path      = string.Format(@"{0}/{1}", directory, filename);
            var format    = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 16000, 2, 16);

            client = SpeechRecognitionServiceFactory.CreateDataClient(SpeechRecognitionMode.LongDictation, "en-US", key1);

            client.OnConversationError       += Client_OnConversationError;
            client.OnIntent                  += Client_OnIntent;
            client.OnMicrophoneStatus        += Client_OnMicrophoneStatus;
            client.OnPartialResponseReceived += Client_OnPartialResponseReceived;
            client.OnResponseReceived        += Client_OnResponseReceived;

            client.AudioStart();
            client.SendAudioFormat(new SpeechAudioFormat
            {
                AverageBytesPerSecond = 16000,
                BitsPerSample         = 16,
                BlockAlign            = 2,
                ChannelCount          = 1,
                EncodingFormat        = AudioCompressionType.PCM,
                SamplesPerSecond      = 8000
            });

            IsReady = true;
        }
Exemplo n.º 6
0
        /// <summary>
        /// Configures NAudio for audio capture from either the default or specified audio device index
        /// </summary>
        /// <param name="deviceNumber">Desired Capture Device Index; Default is 0</param>
        private void Initialize_NAudio(int deviceNumber = 0)
        {
            // If EventBasedAudio is already initialized, dispose
            if (EventBasedAudio != null)
            {
                EventBasedAudio.Dispose();
            }

            EventBasedAudio = new WaveInEvent();
            EventBasedAudio.DeviceNumber = deviceNumber;

            EventBasedAudio.WaveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 32000, 2, 16);

            BufferedAudioProvider = new BufferedWaveProvider(EventBasedAudio.WaveFormat);
            BufferedAudioProvider.BufferDuration = TimeSpan.FromSeconds(1);

            Azure_NAudio_Callback = new NAudioCompatibleAudioCallback(ref BufferedAudioProvider, ref BufferedAudioProvider_LockObject);

            EventBasedAudio.DataAvailable += (s, e) =>
            {
                lock (BufferedAudioProvider_LockObject)
                {
                    BufferedAudioProvider.AddSamples(e.Buffer, 0, e.BytesRecorded);
                }

                if (AudioFileWriter != null)
                {
                    AudioFileWriter.Write(e.Buffer, 0, e.BytesRecorded);
                }
            };
        }
Exemplo n.º 7
0
        private void OnStartRecordingClick(object sender, EventArgs e)
        {
            // 设置记录器
            // 参数依次为 格式\采样率\声道\每秒平均码率\单位采样点的字节数\采样位数
            _recorder = new WaveIn {
                WaveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 16000, 2, 16)
            };
            _recorder.DataAvailable += RecorderOnDataAvailable;

            // 建立我们的信号链
            _bufferedWaveProvider = new BufferedWaveProvider(_recorder.WaveFormat);

            _fileName           = Path.Combine("temp", Guid.NewGuid() + ".pcm");
            _savingWaveProvider = new SavingWaveProvider(_bufferedWaveProvider, _fileName);



            //设置播放
            _player = new WaveOut();
            _player.Init(_savingWaveProvider);

            // 开始播放和录制
            _player.Play();
            _recorder.StartRecording();

            Log("开始录制");
        }
Exemplo n.º 8
0
        /// 録音を開始します
        private void startRecording()
        {
            // 録音準備
            if (mCapture == null)
            {
                MessageBox.Show(this, "録音デバイスが設定されていません", "エラー", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
            if (mChannelInfo == null)
            {
                MessageBox.Show(this, "録音チャネルが設定されていません", "エラー", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
            var f = mCapture.WaveFormat;

            mRecordingFormat = WaveFormat.CreateCustomFormat(
                f.Encoding,
                f.SampleRate,
                mChannelInfo.Channels, //f.Channels,
                (int)((float)f.AverageBytesPerSecond / f.Channels * mChannelInfo.Channels),
                (int)((float)f.BlockAlign / f.Channels * mChannelInfo.Channels),
                f.BitsPerSample
                );
            mWriter = new WaveFileWriter("a.wav", mRecordingFormat);

            // UI更新
            startRecordingButton.Enabled    = false;
            stopRecordingButton.Enabled     = true;
            inputDeviceListSelector.Enabled = false;
            inputChannelSelector.Enabled    = false;

            // 録音開始
            mCapture.StartRecording();
        }
Exemplo n.º 9
0
        bool InitializeDirectSound(IntPtr hwnd)
        {
            try {
                directSound = new SharpDX.DirectSound.DirectSound();
                directSound.SetCooperativeLevel(hwnd, CooperativeLevel.Priority);

                var soundBufferDescription = new SoundBufferDescription {
                    Flags          = BufferFlags.PrimaryBuffer | BufferFlags.ControlVolume,
                    BufferBytes    = 0,
                    Format         = null,
                    AlgorithmFor3D = Guid.Empty
                };

                primaryBuffer = new PrimarySoundBuffer(directSound, soundBufferDescription);

                var samplesPerSec   = 44100;
                var bitsPerSample   = 16;
                var nChannels       = 2;
                var blockAlign      = bitsPerSample / 8 * nChannels;
                var nAvgBytesPerSec = samplesPerSec * blockAlign;
                var waveFormat      = WaveFormat.CreateCustomFormat(
                    WaveFormatEncoding.Pcm,
                    samplesPerSec,
                    nChannels,
                    nAvgBytesPerSec,
                    blockAlign,
                    bitsPerSample
                    );

                primaryBuffer.Format = waveFormat;
            } catch { return(false); }
            return(true);
        }
Exemplo n.º 10
0
        private void InitWaveOut(EVLVoiceFormat format)
        {
            if (mWaveOut != null)
            {
                Stop();
            }
            WaveFormat waveFormat;

            switch (format)
            {
            case EVLVoiceFormat.MT_MSGSM:
                waveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 16000, 2, 16);
                break;

            case EVLVoiceFormat.MT_MP3_32K_STEREO:
                waveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 2, 32000, 4, 16);
                break;

            default:
                Debug("Format not support");
                return;
            }
            mWaveOut = new WaveOut();
            mBufferedWaveProvider = new BufferedWaveProvider(waveFormat);
            mWaveOut.Init(mBufferedWaveProvider);
        }
Exemplo n.º 11
0
        public NAudioSinkStream(IAudioStream sourceStream)
        {
            AudioProperties sourceProperties = sourceStream.Properties;

            if (sourceProperties.Format == AudioFormat.LPCM)
            {
                waveFormat = WaveFormat.CreateCustomFormat(
                    WaveFormatEncoding.Pcm,
                    sourceProperties.SampleRate,
                    sourceProperties.Channels,
                    sourceProperties.SampleRate * sourceProperties.Channels * sourceProperties.SampleByteSize,
                    sourceProperties.Channels * sourceProperties.SampleByteSize, sourceProperties.BitDepth);
            }
            else if (sourceProperties.Format == AudioFormat.IEEE)
            {
                waveFormat = WaveFormat.CreateCustomFormat(
                    WaveFormatEncoding.IeeeFloat,
                    sourceProperties.SampleRate,
                    sourceProperties.Channels,
                    sourceProperties.SampleRate * sourceProperties.Channels * sourceProperties.SampleByteSize,
                    sourceProperties.Channels * sourceProperties.SampleByteSize, sourceProperties.BitDepth);
            }
            else
            {
                throw new ArgumentException("unsupported source format: " + sourceProperties.ToString());
            }

            this.sourceStream = sourceStream;
        }
Exemplo n.º 12
0
        private void PlaySound(Stream sound)
        {
            DirectSound ds = new DirectSound();

            ds.SetCooperativeLevel(this.Handle, CooperativeLevel.Priority);


            WaveFormat format = WaveFormat.CreateCustomFormat(
                WaveFormatEncoding.Pcm,
                44100,
                2,
                4 * 44100,
                4,
                16
                );

            SoundBufferDescription primaryDesc = new SoundBufferDescription();

            primaryDesc.Format      = format;
            primaryDesc.Flags       = BufferFlags.GlobalFocus;
            primaryDesc.BufferBytes = 8 * 4 * 44100;
            PrimarySoundBuffer pBuffer = new PrimarySoundBuffer(ds, primaryDesc);

            SoundBufferDescription secondDesc = new SoundBufferDescription();

            secondDesc.Format      = format;
            secondDesc.Flags       = BufferFlags.GlobalFocus | BufferFlags.ControlPositionNotify | BufferFlags.GetCurrentPosition2;
            secondDesc.BufferBytes = 8 * 4 * 44100;

            SecondarySoundBuffer secondBuffer = new SecondarySoundBuffer(ds, secondDesc);

            secondBuffer.Write(sound.ReadAll(), 0, LockFlags.None);
            secondBuffer.Play(0, PlayFlags.None);
        }
        public void CanConvertPcmToALaw()
        {
            int channels   = 1;
            int sampleRate = 8000;

            CanCreateConversionStream(
                new WaveFormat(sampleRate, 16, channels),
                WaveFormat.CreateCustomFormat(WaveFormatEncoding.ALaw, sampleRate, channels, sampleRate * channels, 1, 8));
        }
        public void CanConvertMuLawToPcm()
        {
            int channels   = 1;
            int sampleRate = 8000;

            CanCreateConversionStream(
                WaveFormat.CreateCustomFormat(WaveFormatEncoding.MuLaw, sampleRate, channels, sampleRate * channels, 1, 8),
                new WaveFormat(sampleRate, 16, channels));
        }
Exemplo n.º 15
0
        // note: I/O functions do not use the Offset field in Properties

        static public Sound Read(Stream source, Properties prop)
        {
            Sound result = new Sound();

            byte[] data = ReadRaw(source, prop);

            // get the true sample rate
            int sampleRate = prop.Frequency;

            sampleRate = (int)((sampleRate * 44100L) / 60216L);

            // decode the sound and quantize to 16 bits stereo
            switch (prop.SampleType & 0xC)
            {
            case 0x0:
                data = Decode8(data);
                break;

            case 0x4:
                data = Decode16(data);
                break;

            case 0x8:
                data = Decode4(data);
                break;
            }

            // 16-bit stereo format
            result.Format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, sampleRate, 2, sampleRate * 4, 4, 16);
            result.Data   = data;

            // determine volume
            result.Volume = VolumeTable[prop.Volume];

            // determine panning
            int panValue = prop.Panning & 0xF;

            if (panValue < 1)
            {
                panValue = 8;
            }
            if (panValue > 15)
            {
                panValue = 8;
            }
            result.Panning = (float)(panValue - 1) / 14f;

            // flip stereo
            result.Panning = 1.0f - result.Panning;

            // mark linear volume (since we converted it already)
            result.VolumeIsLinear = true;

            // return the final result
            return(result);
        }
 public static WaveFormat ToWaveFormat(this AudioFormat audioFormat)
 {
     return(WaveFormat.CreateCustomFormat(
                (WaveFormatEncoding)Enum.Parse(typeof(WaveFormatEncoding), audioFormat.Encoding),
                audioFormat.SampleRate,
                audioFormat.Channels,
                audioFormat.AverageBytesPerSecond,
                audioFormat.BlockAlign,
                audioFormat.BitsPerSample));
 }
Exemplo n.º 17
0
 public AudioService(DiscordSocketClient client)
 {
     _client       = client;
     _tcs          = new TaskCompletionSource <bool>();
     _disposeToken = new CancellationTokenSource();
     WebClient     = new WebClient();
     YouTube       = new YoutubeClient();
     OutFormat     = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 48000, 2, 192000, 4, 16);
     Failed        = Skip = Exit = false;
 }
Exemplo n.º 18
0
        public void Init(EVLVoiceFormat format, float volume)
        {
            SubDebug(string.Format("Call NMonPlayer init,wave format:{0}", format));
            if (mWaveOut != null)
            {
                mWaveOut.Stop();
                mWaveOut.Dispose();
            }
            try
            {
                mWaveOut = new WaveOut();
                mFormat  = format;
                switch (mFormat)
                {
                case EVLVoiceFormat.MT_MSGSM:
                    mPcmWaveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 16000, 2, 16);
                    break;

                case EVLVoiceFormat.MT_MP3_32K_STEREO:
                    mPcmWaveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 2, 32000, 4, 16);
                    break;

                default:
                    SubDebug(string.Format("Format not support,source wave format:{0}", mFormat));
                    return;
                }
                mBufferedWaveProvider = new BufferedWaveProvider(mPcmWaveFormat);
                mBufferedWaveProvider.DiscardOnBufferOverflow = true;
                mWaveOut.Init(mBufferedWaveProvider);
                if (volume >= 0 && volume <= 1)
                {
                    mWaveOut.Volume = volume;
                }
                else if (volume >= 2 && volume <= 3)
                {
                    volume = volume - 2;
                    mWaveOut.LeftVolume = volume;
                }
                else if (volume >= 4 && volume <= 5)
                {
                    volume = volume - 4;
                    mWaveOut.RightVolume = volume;
                }
                else
                {
                    SubDebug(string.Format("Volume invalid"));
                    mWaveOut.Volume = 1;
                }
                SubDebug(string.Format("WaveOut initialed,\tPlay wave format:{0}", mPcmWaveFormat));
            }
            catch (Exception ex)
            {
                SubDebug(string.Format("Wave out init fail.\t{0}", ex.Message));
            }
        }
Exemplo n.º 19
0
        public void initialize(int samplesPerSecond, int bytesPerSample, int nrChannels,
                               int bufferSizeBytes)
        {
            try
            {
                if (directSound == null)
                {
                    directSound = new DirectSound();
                    directSound.SetCooperativeLevel(owner.Handle, CooperativeLevel.Priority);
                }

                releaseResources();

                this.bufferSizeBytes  = bufferSizeBytes;
                this.bytesPerSample   = bytesPerSample;
                this.samplesPerSecond = samplesPerSecond;
                this.nrChannels       = nrChannels;

                SoundBufferDescription desc = new SoundBufferDescription();
                desc.BufferBytes = bufferSizeBytes;
                desc.Flags       = BufferFlags.Defer | BufferFlags.GlobalFocus |
                                   BufferFlags.ControlVolume | BufferFlags.ControlFrequency |
                                   BufferFlags.GetCurrentPosition2;

                //desc.AlgorithmFor3D = Guid.Empty;

                int blockAlign            = nrChannels * bytesPerSample;
                int averageBytesPerSecond = samplesPerSecond * blockAlign;

                WaveFormat format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm,
                                                                  samplesPerSecond, nrChannels, averageBytesPerSecond, blockAlign, bytesPerSample * 8);

                desc.Format = format;

                silence = new char[bufferSizeBytes];
                Array.Clear(silence, 0, silence.Length);

                audioBuffer = new SecondarySoundBuffer(directSound, desc);

                Volume      = volume;
                offsetBytes = 0;
                prevPlayPos = 0;
                ptsPos      = 0;
                prevPtsPos  = 0;
                playLoops   = 0;
                ptsLoops    = 0;

                //log.Info("Direct Sound Initialized");
            }
            catch (Exception e)
            {
                throw new VideoPlayerException("Error initializing Direct Sound: " + e.Message, e);
            }
        }
Exemplo n.º 20
0
        public void StartAudioStream(Audio audio)
        {
            wave.DeviceNumber = audio.DevNumber;
            int blockAlign            = (audio.Channels * (bitsPerSample / 8));
            int averageBytesPerSecond = sampleRate * blockAlign;
            var waveFormat            = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, sampleRate, audio.Channels, averageBytesPerSecond, blockAlign, bitsPerSample);

            wave.WaveFormat     = waveFormat;
            wave.DataAvailable += Wave_DataAvailable;
            wave.StartRecording();
        }
Exemplo n.º 21
0
        public AudioBuffers(int sampleRate, int channels)
        {
            mics       = new List <short[]>();
            SampleRate = sampleRate;
            mics.Add(new short[SampleRate / 10]);
            mics.Add(new short[SampleRate / 10]);
            Channels     = channels;
            firsttime    = true;
            OutputSignal = new double[mics[0].Length];

            Buffers        = new List <short[]>();
            ShiftedBuffers = new List <double[]>();
            combinedOutput = new List <double>();

            for (int j = 0; j < 5; j++)
            {
                Buffers.Add(new short[SampleRate / 10 / 2]);
                ShiftedBuffers.Add(new double[SampleRate / 10 / 2]);
            }

            WaveInDevices = WaveIn.DeviceCount;
            for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
            {
                WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
                Console.WriteLine("Device {0}: {1}, {2} channels",
                                  waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
            }

            WaveInVar = new WaveInEvent();
            WaveInVar.DeviceNumber   = 0; //Set to default
            WaveInVar.DataAvailable += waveIn_DataAvailable;
            WaveInVar.WaveFormat     = new WaveFormat(SampleRate, 16, Channels);
            Console.WriteLine(WaveInVar.WaveFormat.AverageBytesPerSecond);
            string beamformer = "C:/Users/Nickl/Aalborg Universitet/OneDrive - Aalborg Universitet/3rdtryBeamformer/onlyBeamformer/" + Program.filenameFILE + "_beamformer.wav";
            string micro1     = "C:/Users/Nickl/Aalborg Universitet/OneDrive - Aalborg Universitet/3rdtryBeamformer/" + Program.filenameFILE + "_micro1.wav";
            string micro2     = "C:/Users/Nickl/Aalborg Universitet/OneDrive - Aalborg Universitet/3rdtryBeamformer/" + Program.filenameFILE + "_micro2.wav";
            string combined   = "C:/Users/Nickl/Aalborg Universitet/OneDrive - Aalborg Universitet/3rdtryBeamformer/" + Program.filenameFILE + "_combined.wav";



            writer = new WaveFileWriter(beamformer,
                                        WaveFormat.CreateCustomFormat(WaveInVar.WaveFormat.Encoding, SampleRate, 1,
                                                                      WaveInVar.WaveFormat.AverageBytesPerSecond, (1 * WaveInVar.WaveFormat.BitsPerSample) / 16, 16));
            writerOriginal1 = new WaveFileWriter(micro1,
                                                 WaveFormat.CreateCustomFormat(WaveInVar.WaveFormat.Encoding, SampleRate, 1,
                                                                               WaveInVar.WaveFormat.AverageBytesPerSecond, (1 * WaveInVar.WaveFormat.BitsPerSample) / 16, 16));
            writerOriginal2 = new WaveFileWriter(micro2,
                                                 WaveFormat.CreateCustomFormat(WaveInVar.WaveFormat.Encoding, SampleRate, 1,
                                                                               WaveInVar.WaveFormat.AverageBytesPerSecond, (1 * WaveInVar.WaveFormat.BitsPerSample) / 16, 16));
            combinedWriter = new WaveFileWriter(combined,
                                                WaveFormat.CreateCustomFormat(WaveInVar.WaveFormat.Encoding, SampleRate, 1,
                                                                              WaveInVar.WaveFormat.AverageBytesPerSecond, (1 * WaveInVar.WaveFormat.BitsPerSample) / 16, 16));
        }
Exemplo n.º 22
0
        private static void SetupSoundOutput(libspotify.sp_audioformat format)
        {
            const int bitsPerSample         = 16;
            int       blockAlign            = (format.channels * (bitsPerSample / 8));
            int       averageBytesPerSecond = format.sample_rate * blockAlign;
            var       waveFormat            = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, format.sample_rate, format.channels,
                                                                            averageBytesPerSecond, blockAlign, bitsPerSample);

            soundBuffer = new BufferedWaveProvider(waveFormat);
            soundBuffer.BufferDuration = TimeSpan.FromSeconds(10);
            waveOut.Init(soundBuffer);
        }
Exemplo n.º 23
0
        public WaveFormat DequeueWaveFormat()
        {
            int averageBytesPerSecond = DequeueInt();
            int bitsPerSample         = DequeueInt();
            int blockAlign            = DequeueInt();
            int channels = DequeueInt();
            WaveFormatEncoding encoding = DequeueWaveFormatEncoding();
            int sampleRate = DequeueInt();

            return(WaveFormat.CreateCustomFormat(encoding, sampleRate,
                                                 channels, averageBytesPerSecond, blockAlign, bitsPerSample));
        }
Exemplo n.º 24
0
        public WaveStreamImpl(GameRes.SoundInput input)
        {
            m_input = input;
            var format = m_input.Format;

            m_format = WaveFormat.CreateCustomFormat((WaveFormatEncoding)format.FormatTag,
                                                     (int)format.SamplesPerSecond,
                                                     format.Channels,
                                                     (int)format.AverageBytesPerSecond,
                                                     format.BlockAlign,
                                                     format.BitsPerSample);
        }
Exemplo n.º 25
0
        public BPCMWaveProvider(BitstreamReader stream, double srf = 1)
        {
            streamBPCM   = stream;
            frame0       = streamBPCM.Analysis.FrameSet[0];
            currentFrame = frame0;
            vol          = 1;
            srfactor     = srf;
            int speedSR = (int)Math.Round(frame0.SamplingRate * srf, 0);

            waveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, speedSR, frame0.Channels, speedSR * frame0.Channels * 2, frame0.Channels * 2, 16);
            rb         = new RingBuffer(2097152);
            tsOffset   = 0;
        }
Exemplo n.º 26
0
        public void PrepareWrite(EVLVoiceFormat format, bool isSrcWriteFile, bool isPcmWriteFile, string wavePath)
        {
            SubDebug(string.Format("Call PrepareWrite,source wave format:{0}", format));
            Stop();
            mFormat = format;
            switch (mFormat)
            {
            case EVLVoiceFormat.MT_MSGSM:
                mSrcWaveFormat = new Gsm610WaveFormat();
                break;

            case EVLVoiceFormat.MT_MP3_32K_STEREO:
                mSrcWaveFormat = new Mp3WaveFormat(8000, 2, 288, 32000);
                break;

            default:
                SubDebug(string.Format("Format not support,source wave format:{0}", format));
                return;
            }
            if (mSrcWaveFormat.Channels == 2)
            {
                mPcmWaveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 2, 32000, 4, 16);
                //mPcmWaveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 16000, 2, 16);
            }
            else
            {
                mPcmWaveFormat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 16000, 2, 16);
            }
            if (!Path.IsPathRooted(wavePath))
            {
                wavePath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, wavePath);
            }
            if (!Directory.Exists(wavePath))
            {
                Directory.CreateDirectory(wavePath);
            }
            string srcPath, pcmPath;

            srcPath = Path.Combine(wavePath, "Src_" + DateTime.Now.ToString("yyyyMMddHHmmss") + ".wav");
            pcmPath = Path.Combine(wavePath, "Pcm_" + DateTime.Now.ToString("yyyyMMddHHmmss") + ".wav");
            if (isSrcWriteFile)
            {
                mSrcWriter = new WaveFileWriter(srcPath, mSrcWaveFormat);
            }
            if (isPcmWriteFile)
            {
                mPcmWriter = new WaveFileWriter(pcmPath, mPcmWaveFormat);
            }
            SubDebug(string.Format("Writer prepared,source wave format:{0},dest wave format:{1}", mSrcWaveFormat, mPcmWaveFormat));
        }
Exemplo n.º 27
0
 private static void ConvertToPcm(ref WaveStream readerStream, ref WaveFormat waveFormat)
 {
     waveFormat = WaveFormat.CreateCustomFormat
                  (
         WaveFormatEncoding.Pcm,
         waveFormat.SampleRate,
         waveFormat.Channels,
         waveFormat.AverageBytesPerSecond,
         waveFormat.BlockAlign,
         waveFormat.BitsPerSample
                  );
     readerStream = new WaveFormatConversionStream(waveFormat, readerStream);
     readerStream = new BlockAlignReductionStream(readerStream);
 }
Exemplo n.º 28
0
        static public Sound Read(Stream source, Properties properties)
        {
            BinaryReaderEx reader = new BinaryReaderEx(source);
            Sound          result = new Sound();
            bool           stereo = (properties.Flag0F & 0x80) != 0;

            byte[] data       = reader.ReadBytes(properties.SampleLength);
            int    dataLength = data.Length;

            byte[] newData = new byte[dataLength * (stereo ? 1 : 2)];

            // translate the "signed" data
            int newDataPtr = 0;

            for (int i = 0; i < dataLength; i += 2)
            {
                int sample = ((int)data[i] << 8) | data[i + 1];
                if (sample >= 0x8000)
                {
                    sample = -(sample & 0x7FFF);
                }
                newData[newDataPtr]     = (byte)(sample & 0xFF);
                newData[newDataPtr + 1] = (byte)((sample >> 8) & 0xFF);
                if (!stereo)
                {
                    newData[newDataPtr + 2] = newData[newDataPtr];
                    newData[newDataPtr + 3] = newData[newDataPtr + 1];
                    newDataPtr += 2;
                }
                newDataPtr += 2;
            }

            // 16-bit stereo format
            result.Format = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, properties.Frequency, 2, properties.Frequency * 4, 4, 16);
            result.Data   = newData;

            // determine volume
            result.Volume = VolumeTable[properties.Volume];

            // determine panning
            result.Panning = (float)(properties.Panning - 1) / (float)0x7E;

            // we already precalculated volume
            result.VolumeIsLinear = true;

            // return the final result
            return(result);
        }
Exemplo n.º 29
0
        private WaveFormat CreatePcmWaveFormat(SscfWaveHeader waveHeader)
        {
            const int bitsPerSample         = 16;
            short     channels              = (short)waveHeader.NumChannels;
            short     blockAlign            = (short)(channels * 2);
            int       sampleRate            = waveHeader.SamplingRate;
            int       averageBytesPerSecond = sampleRate * blockAlign;

            return(WaveFormat.CreateCustomFormat(
                       WaveFormatEncoding.Pcm,
                       sampleRate,
                       channels,
                       averageBytesPerSecond,
                       blockAlign,
                       bitsPerSample));
        }
Exemplo n.º 30
0
 /// <summary>
 /// Creates a new mono ISampleProvider based on a stereo input
 /// </summary>
 /// <param name="sourceProvider">Stereo 16 bit PCM input</param>
 public StereoToMonoSampleProvider(ISampleProvider sourceProvider)
 {
     LeftVolume  = 0.5f;
     RightVolume = 0.5f;
     if (sourceProvider.WaveFormat.Channels != 2)
     {
         throw new ArgumentException("Source must be stereo");
     }
     this.sourceProvider = sourceProvider;
     WaveFormat          = WaveFormat.CreateCustomFormat(
         sourceProvider.WaveFormat.Encoding,
         sourceProvider.WaveFormat.SampleRate,
         1,
         sourceProvider.WaveFormat.AverageBytesPerSecond / 2,
         sourceProvider.WaveFormat.BitsPerSample / 8,
         sourceProvider.WaveFormat.BitsPerSample);
 }