Exemplo n.º 1
0
        private void InitAudioCapture()
        {
            // Configuramos DirectSound
            device = new Device();
            device.SetCooperativeLevel(this, CooperativeLevel.Normal);

            capture = new Capture(record_source);

            // Creamos Waveformat
            waveFormat = new WaveFormat
            {
                BitsPerSample         = bitsPerSample,                                                          // 16 bits
                BlockAlign            = (short)(channels * (bitsPerSample / (short)8)),
                Channels              = channels,                                                               // Stereo
                AverageBytesPerSecond = (short)(channels * (bitsPerSample / (short)8)) * samplesPerSecond,      // 22kHz
                SamplesPerSecond      = samplesPerSecond,                                                       // 22kHz
                FormatTag             = WaveFormatTag.Pcm
            };

            captureBuffDesc = new CaptureBufferDescription
            {
                BufferBytes = waveFormat.AverageBytesPerSecond / 5,
                Format      = waveFormat
            };

            bufferDesc = new BufferDescription
            {
                BufferBytes = waveFormat.AverageBytesPerSecond / 5,
                Format      = waveFormat
            };

            bufferplayback = new SecondaryBuffer(bufferDesc, device);
            buffersize     = captureBuffDesc.BufferBytes;
        }
Exemplo n.º 2
0
        WaveFormat TryNewFormat(int hz, short bits, short channels)
        {
            WaveFormat format = new WaveFormat();

            format.FormatTag             = WaveFormatTag.Pcm;
            format.SamplesPerSecond      = hz;
            format.BitsPerSample         = bits;
            format.Channels              = channels;
            format.BlockAlign            = (short)(format.Channels * (format.BitsPerSample / 8));
            format.AverageBytesPerSecond = format.BlockAlign * format.SamplesPerSecond;

            CaptureBufferDescription dscheckboxd      = new CaptureBufferDescription();
            CaptureBuffer            pDSCaptureBuffer = null;

            dscheckboxd.BufferBytes = format.AverageBytesPerSecond;
            dscheckboxd.Format      = format;
            try
            {
                pDSCaptureBuffer = new CaptureBuffer(dscheckboxd, device);
                pDSCaptureBuffer.Dispose();
                return(format);
            }
            catch
            {
                // Can't return null, because WaveFormat is a value type.
                throw;
            }
        }
        private void CreateCaptureBuffer()
        {
            // 缓冲区的描述对象

            CaptureBufferDescription bufferdescription = new CaptureBufferDescription();

            if (null != mNotify)
            {
                mNotify.Dispose();
                mNotify = null;
            }
            if (null != mRecBuffer)
            {
                mRecBuffer.Dispose();
                mRecBuffer = null;
            }
            // 设定通知的大小,默认为1s钟
            mNotifySize  = (1024 > mWavFormat.AverageBytesPerSecond / 8) ? 1024 : (mWavFormat.AverageBytesPerSecond / 8);
            mNotifySize -= mNotifySize % mWavFormat.BlockAlign;
            // 设定缓冲区大小
            mBufferSize = mNotifySize * cNotifyNum;
            // 创建缓冲区描述
            bufferdescription.BufferBytes = mBufferSize;
            bufferdescription.Format      = mWavFormat;      // 录音格式
            // 创建缓冲区
            mRecBuffer         = new CaptureBuffer(bufferdescription, mCapDev);
            mNextCaptureOffset = 0;
        }
Exemplo n.º 4
0
        public void StartRecording(int deviceIndex)
        {
            if (mCaptureBuffer != null)
            {
                if (mCaptureBuffer.Capturing)
                {
                    mCaptureBuffer.Stop();
                }

                mCaptureBuffer.Dispose();
                mCaptureBuffer = null;
            }

            CaptureDevicesCollection audioDevices = new CaptureDevicesCollection();

            if (deviceIndex != -1 && deviceIndex < audioDevices.Count - 1)
            {
                // initialize the capture buffer and start the animation thread
                Capture capture = new Capture(audioDevices[deviceIndex].DriverGuid);
                CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription();
                WaveFormat waveFormat = new WaveFormat();
                waveFormat.BitsPerSample         = 16;
                waveFormat.SamplesPerSecond      = 8000;
                waveFormat.Channels              = 1;
                waveFormat.BlockAlign            = (short)(waveFormat.Channels * waveFormat.BitsPerSample / 8);
                waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond;
                waveFormat.FormatTag             = WaveFormatTag.Pcm;

                captureBufferDescription.Format      = waveFormat;
                captureBufferDescription.BufferBytes = waveFormat.SamplesPerSecond * 120;

                mCaptureBuffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(captureBufferDescription, capture);
                mCaptureBuffer.Start(true);
            }
        }
Exemplo n.º 5
0
        public static void SetVoiceDevices(int deviceID, short channels, short bitsPerSample, int samplesPerSecond)
        {
            device = new Device();
            device.SetCooperativeLevel(new System.Windows.Forms.Control(), CooperativeLevel.Normal);
            CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();
            DeviceInformation        deviceInfo = captureDeviceCollection[deviceID];

            capture = new Capture(deviceInfo.DriverGuid);

            waveFormat                           = new WaveFormat();
            waveFormat.Channels                  = channels;
            waveFormat.FormatTag                 = WaveFormatTag.Pcm;
            waveFormat.SamplesPerSecond          = samplesPerSecond;
            waveFormat.BitsPerSample             = bitsPerSample;
            waveFormat.BlockAlign                = (short)(channels * (bitsPerSample / (short)8));
            waveFormat.AverageBytesPerSecond     = waveFormat.BlockAlign * samplesPerSecond;
            captureBufferDescription             = new CaptureBufferDescription();
            captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
            captureBufferDescription.Format      = waveFormat;

            playbackBufferDescription             = new BufferDescription();
            playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
            playbackBufferDescription.Format      = waveFormat;
            playbackBufferDescription.GlobalFocus = true;
            playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
            bufferSize     = captureBufferDescription.BufferBytes;
        }
        private void UDP_Initialize()
        {
            try {
                device = new Device();
                device.SetCooperativeLevel(this, CooperativeLevel.Normal);

                CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();

                DeviceInformation deviceInfo = captureDeviceCollection[0];

                capture = new Capture(deviceInfo.DriverGuid);

                short channels         = 1;     //Stereo.
                short bitsPerSample    = 16;    //16Bit, alternatively use 8Bits.
                int   samplesPerSecond = 22050; //11KHz use 11025 , 22KHz use 22050, 44KHz use 44100 etc.

                //Set up the wave format to be captured.
                waveFormat                       = new WaveFormat();
                waveFormat.Channels              = channels;
                waveFormat.FormatTag             = WaveFormatTag.Pcm;
                waveFormat.SamplesPerSecond      = samplesPerSecond;
                waveFormat.BitsPerSample         = bitsPerSample;
                waveFormat.BlockAlign            = (short)(channels * (bitsPerSample / (short)8));
                waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * samplesPerSecond;

                captureBufferDescription             = new CaptureBufferDescription();
                captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;//approx 200 milliseconds of PCM data.
                captureBufferDescription.Format      = waveFormat;

                playbackBufferDescription             = new BufferDescription();
                playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
                playbackBufferDescription.Format      = waveFormat;
                playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);

                bufferSize = captureBufferDescription.BufferBytes;

                bIsCallActive  = false;
                nUdpClientFlag = 0;

                //Using UDP sockets
                clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
                EndPoint ourEP = new IPEndPoint(IPAddress.Any, 9450);
                //Listen asynchronously on port 1450 for coming messages (Invite, Bye, etc).
                clientSocket.Bind(ourEP);

                //Receive data from any IP.
                EndPoint remoteEP = (EndPoint)(new IPEndPoint(IPAddress.Any, 0));

                byteData = new byte[1024];
                //Receive data asynchornously.
                clientSocket.BeginReceiveFrom(byteData,
                                              0, byteData.Length,
                                              SocketFlags.None,
                                              ref remoteEP,
                                              new AsyncCallback(UDP_OnReceive),
                                              null);
            } catch (Exception ex) {
                MessageBox.Show(ex.Message, "VoiceChat-Initialize ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Exemplo n.º 7
0
        public float GetSignFromDevice(out float response)
        {
            cap  = new Capture(deviceGuid);
            desc = new CaptureBufferDescription();

            WaveFormat wf = new WaveFormat();

            wf.BitsPerSample         = bitsPerSample;
            wf.SamplesPerSecond      = sampleRate;
            wf.Channels              = channels;
            wf.BlockAlign            = (short)(wf.Channels * wf.BitsPerSample / 8);
            wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond;
            wf.FormatTag             = WaveFormatTag.Pcm;

            desc.Format      = wf;
            desc.BufferBytes = SAMPLES * wf.BlockAlign;

            buffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(desc, cap);
            buffer.Start(true);

            while (start)
            {
                Array samples = buffer.Read(0, typeof(Int16), LockFlag.FromWriteCursor, SAMPLE_FORMAT_ARRAY);
                response = ((float)samples.GetValue(0, 0, 0)) / 100;
            }
            response = 0.0f;
            return(0.0f);
        }
Exemplo n.º 8
0
        /// <summary>
        /// Pequeno teste conceitual utilizando a placa de som padrao
        /// </summary>
        public void teste()
        {
            Capture cap = new Capture(new DeviceInformation().DriverGuid);
            CaptureBufferDescription desc = new CaptureBufferDescription();
            CaptureBuffer            buffer;

            WaveFormat wf = new WaveFormat();

            wf.BitsPerSample         = 16;
            wf.SamplesPerSecond      = 44100;
            wf.Channels              = 2;
            wf.BlockAlign            = (short)(wf.Channels * wf.BitsPerSample / 8);
            wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond;
            wf.FormatTag             = WaveFormatTag.Pcm;

            desc.Format      = wf;
            desc.BufferBytes = SAMPLES * wf.BlockAlign;



            buffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(desc, cap);
            buffer.Start(true);

zed:
            Array samples = buffer.Read(0, typeof(Int16), LockFlag.FromWriteCursor, SAMPLE_FORMAT_ARRAY);

            System.Console.WriteLine(samples.GetValue(0, 0, 0).ToString());

            goto zed;
        }
Exemplo n.º 9
0
        // Helper method to test a specific WaveFormat instance.
        private void VerifyFormat(WaveFormat newFormat)
        {
            if (this._captureDevice == null)
            {
                throw new InvalidOperationException("Capture device is null.");
            }

            CaptureBufferDescription capBuffDesc = new CaptureBufferDescription();

            capBuffDesc.BufferBytes = newFormat.AverageBytesPerSecond;
            capBuffDesc.Format      = newFormat;

            CaptureBuffer capBuff = null;

            try
            {
                capBuff = new CaptureBuffer(capBuffDesc, this._captureDevice);
            }
            catch (Exception ex)
            {
                string errMsg =
                    string.Format("Sound format not supported: {0} samples/sec, {1} bits/sample, {2} channels.",
                                  newFormat.SamplesPerSecond, newFormat.BitsPerSample, newFormat.Channels);
                throw new Exception(errMsg, ex);
            }

            if (capBuff != null)
            {
                capBuff.Dispose();
                capBuff = null;
            }
        }
        public void StartCapture(int sampleRate, Capture captureDevice)
        {
            StopCapture();
            EmptyRequest();

            this.sampleRate = sampleRate;
            readPos = 0;
            IsRecording = false;
            record = null;
            recordTime = 0;
            noRecordTime = 0;
            lastSample = null;
            lastSize = 0;

            capture = (captureDevice == null) ? new Capture() : captureDevice;

            WaveFormat waveFormat = new WaveFormat();// Load the sound 
            waveFormat.BitsPerSample = 16;
            waveFormat.BlockAlign = 2;
            waveFormat.Channels = 1;
            waveFormat.AverageBytesPerSecond = sampleRate * 2;
            waveFormat.SamplesPerSecond = sampleRate;
            waveFormat.FormatTag = WaveFormatTag.Pcm;

            CaptureBufferDescription captureBuffDesc = new CaptureBufferDescription();
            captureBuffDesc.BufferBytes = bufferSize;
            captureBuffDesc.Format = waveFormat;

            captureBuffer = new CaptureBuffer(captureBuffDesc, capture);
            captureBuffer.Start(true);

            captureThread = new Thread(captureLoop);
            captureThread.Start();
            new Thread(EmptyRequest).Start();
        }
Exemplo n.º 11
0
    public void SetVoiceDevices(int deviceID, short channels, short bitsPerSample, int samplesPerSecond)
    {
        // Установка голосовых устройств
        _device = new Device();                                                                   // Звуковое устройство ввода
        _device.SetCooperativeLevel(new System.Windows.Forms.Control(), CooperativeLevel.Normal); // Установить форму приложения и приоритет
        CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();        // Получить доступ к устройству (звуковая карта)
        DeviceInformation        deviceInfo = captureDeviceCollection[deviceID];                  // установить номер устройства

        _capture = new Capture(deviceInfo.DriverGuid);                                            //Получить информация о драйвере выбранного устройства

        //Настроить wave формат для захвата.
        _waveForm                       = new WaveFormat();                               // Объявление формата
        _waveForm.Channels              = channels;                                       // Каналы
        _waveForm.FormatTag             = WaveFormatTag.Pcm;                              // PCM - Pulse Code Modulation
        _waveForm.SamplesPerSecond      = samplesPerSecond;                               // установить число образцов в секунду
        _waveForm.BitsPerSample         = bitsPerSample;                                  // Установить колличество бит на образец
        _waveForm.BlockAlign            = (short)(channels * (bitsPerSample / (short)8)); // данных в одном байте,  1 * (16/8) = 2 bits
        _waveForm.AverageBytesPerSecond = _waveForm.BlockAlign * samplesPerSecond;        // байт в секунду  22050*2= 44100
        _capBufDescr                    = new CaptureBufferDescription();
        _capBufDescr.BufferBytes        = _waveForm.AverageBytesPerSecond / 5;            // 200 милисекунд для записи = 8820 бит
        _capBufDescr.Format             = _waveForm;                                      // Using Wave Format

        // воспроизведение
        _playbackBufferDescription             = new BufferDescription();
        _playbackBufferDescription.BufferBytes = _waveForm.AverageBytesPerSecond / 5;          // воспроизведение - 200 милисекунд = 8820 Bит
        _playbackBufferDescription.Format      = _waveForm;
        _playbackBuffer = new SecondaryBuffer(_playbackBufferDescription, _device);
        _bufferSize     = _capBufDescr.BufferBytes;
    }
Exemplo n.º 12
0
        static void InicialiceCaptureBuffer()
        {
            try
            {
                CaptureDevicesCollection audioDevices = new CaptureDevicesCollection();

                // initialize the capture buffer and start the animation thread
                Capture cap = new Capture(audioDevices[1].DriverGuid);
                CaptureBufferDescription desc = new CaptureBufferDescription();
                WaveFormat wf = new WaveFormat();
                wf.BitsPerSample         = 16;
                wf.SamplesPerSecond      = 44100;
                wf.Channels              = (short)cap.Caps.Channels;
                wf.BlockAlign            = (short)(wf.Channels * wf.BitsPerSample / 8);
                wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond;
                wf.FormatTag             = WaveFormatTag.Pcm;

                desc.Format      = wf;
                desc.BufferBytes = SAMPLES * wf.BlockAlign;

                buffer = new CaptureBuffer(desc, cap);
                buffer.Start(true);
            }
            catch
            {
                Console.WriteLine("Error al iniciar el capturador de sonido");
            }
        }
    public void SetVoiceDevices(int deviceID, short channels, short bitsPerSample, int samplesPerSecond)
    {
        // Installization Voice Devices
        device = new Device();                                                                   // Sound Input Device
        device.SetCooperativeLevel(new System.Windows.Forms.Control(), CooperativeLevel.Normal); // Set The Application Form and Priority
        CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();       // To Get Available Devices (Input Sound Card)
        DeviceInformation        deviceInfo = captureDeviceCollection[deviceID];                 // Set Device Number

        capture = new Capture(deviceInfo.DriverGuid);                                            // Get The Selected Device Driver Information

        //Set up the wave format to be captured.
        waveFormat                           = new WaveFormat();                               // Wave Format declaration
        waveFormat.Channels                  = channels;                                       // Channels  (2 if Stereo)
        waveFormat.FormatTag                 = WaveFormatTag.Pcm;                              // PCM - Pulse Code Modulation
        waveFormat.SamplesPerSecond          = samplesPerSecond;                               // The Number of Samples Peer One Second
        waveFormat.BitsPerSample             = bitsPerSample;                                  // The Number of bits for each sample
        waveFormat.BlockAlign                = (short)(channels * (bitsPerSample / (short)8)); // Minimum atomic unit of data in one byte, Ex: 1 * (16/8) = 2 bits
        waveFormat.AverageBytesPerSecond     = waveFormat.BlockAlign * samplesPerSecond;       // required Bytes-Peer-Second Ex. 22050*2= 44100
        captureBufferDescription             = new CaptureBufferDescription();
        captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;           //Ex. 200 milliseconds of PCM data = 8820 Bytes (In Record)
        captureBufferDescription.Format      = waveFormat;                                     // Using Wave Format

        // Playback
        playbackBufferDescription             = new BufferDescription();
        playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;          //Ex. 200 milliseconds of PCM data = 8820 Bytes (In Playback)
        playbackBufferDescription.Format      = waveFormat;
        playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
        bufferSize     = captureBufferDescription.BufferBytes;
    }
Exemplo n.º 14
0
        public void CreateCaptureBuffer()
        {//想要创建一个捕捉缓冲区必须要两个参数:缓冲区信息(描述这个缓冲区中的格式等),缓冲设备。
            CaptureBufferDescription bufferdescription = new CaptureBufferDescription();

            bufferdescription.Format = mWavFormat; //设置缓冲区要捕捉的数据格式
            iNotifySize = 1024;                    //设置通知大小
            iBufferSize = iNotifyNum * iNotifySize;
            bufferdescription.BufferBytes = iBufferSize;
            capturebuffer = new CaptureBuffer(bufferdescription, capture);//建立设备缓冲区对象
        }
Exemplo n.º 15
0
        private void InitializeBuffer()
        {
            CaptureBufferDescription bd = new CaptureBufferDescription();

            bd.Format      = getWaveFormat();
            mBufferLength  = (int)getPositionEquivalent(new TimeSpan(BUFFER_MS * TimeSpan.TicksPerMillisecond));
            bd.BufferBytes = mBufferLength;
            mBuffer        = new CaptureBuffer(bd, mRecordCapture);
            mCycleCount    = 0;
        }
Exemplo n.º 16
0
        private void CreateCaptureBuffer()
        {
            //To create a capture buffer, you must have two parameters:
            //the buffer information (describing the format in this buffer, etc.), the buffer device.

            CaptureBufferDescription bufferdescription = new CaptureBufferDescription();

            bufferdescription.Format = mWavFormat; //Sets the data format to be captured by the buffer
            iNotifySize = 1024;                    //Set the notification size
            iBufferSize = iNotifyNum * iNotifySize;
            bufferdescription.BufferBytes = iBufferSize;
            capturebuffer = new CaptureBuffer(bufferdescription, capture);//Create a device buffer object
        }
Exemplo n.º 17
0
        /*
         * Initializes all the data members.
         */
        private void Initialize()
        {
            try
            {
                device = new Device();
                device.SetCooperativeLevel(this, CooperativeLevel.Normal);

                CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();

                DeviceInformation deviceInfo = captureDeviceCollection[0];

                capture = new Capture(deviceInfo.DriverGuid);

                short channels         = 1;     //Stereo.
                short bitsPerSample    = 16;    //16Bit, alternatively use 8Bits.
                int   samplesPerSecond = 44100; //11KHz use 11025 , 22KHz use 22050, 44KHz use 44100 etc.

                //Set up the wave format to be captured.
                waveFormat                       = new WaveFormat();
                waveFormat.Channels              = channels;
                waveFormat.FormatTag             = WaveFormatTag.Pcm;
                waveFormat.SamplesPerSecond      = samplesPerSecond;
                waveFormat.BitsPerSample         = bitsPerSample;
                waveFormat.BlockAlign            = (short)(channels * (bitsPerSample / (short)8));
                waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * samplesPerSecond;

                captureBufferDescription             = new CaptureBufferDescription();
                captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond;//approx 200 milliseconds of PCM data.
                captureBufferDescription.Format      = waveFormat;

                playbackBufferDescription             = new BufferDescription();
                playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond;
                playbackBufferDescription.Format      = waveFormat;
                playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);

                bufferSize = captureBufferDescription.BufferBytes;

                bIsCallActive  = false;
                nUdpClientFlag = 0;

                speaker      = new Speakers();
                speaker.Mono = true;
                Volume vol;
                vol = Volume.Min;
            }
            catch (Exception ex)
            {
                MessageBox.Show("Audio device(s) missing", "VCES - Audio device error", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
        }
        /// <summary>
        /// 创建捕捉缓冲区对象
        /// </summary>
        private void CreateCaptureBuffer()
        {
            //想要创建一个捕捉缓冲区必须要两个参数:缓冲区信息(描述这个缓冲区中的格式等),缓冲设备。
            WaveFormat mWavFormat = SetWaveFormat();
            CaptureBufferDescription bufferdescription = new CaptureBufferDescription();

            bufferdescription.Format = mWavFormat;                       //设置缓冲区要捕捉的数据格式
            iNotifySize = mWavFormat.AverageBytesPerSecond / iNotifyNum; //1秒的数据量/设置的通知数得到的每个通知大小小于0.2s的数据量,话音延迟小于200ms为优质话音
            iBufferSize = iNotifyNum * iNotifySize;
            bufferdescription.BufferBytes    = iBufferSize;
            bufferdescription.ControlEffects = true;
            bufferdescription.WaveMapped     = true;
            capturebuffer = new CaptureBuffer(bufferdescription, capture);//建立设备缓冲区对象
        }
Exemplo n.º 19
0
        public AudioCapture()
        {
            try
            {
                directSoundCapture = new DirectSoundCapture();
            }
            catch
            {
                throw new AudioCaptureException("Could not open recording device");
            }
            //var directSoundCaps = directSoundCapture.Capabilities;

            // Default 44.1kHz 16-bit stereo PCM
            waveFormat = new WaveFormat();

            // Set the buffer size.
            // Note that the buffer position will roll over to 0 when the buffer fills up,
            // so set the notification position's offset to one less than the buffer size.
            bufferSize      = waveFormat.ConvertLatencyToByteSize(latency);
            numberOfSamples = bufferSize / waveFormat.BlockAlign;

            // Create audio capture buffer
            captureBufferDesc             = new CaptureBufferDescription();
            captureBufferDesc.Format      = waveFormat;
            captureBufferDesc.BufferBytes = bufferSize;
            captureBuffer = new CaptureBuffer(directSoundCapture, captureBufferDesc);

            // Wait events allow the thread to wait asynchronously for the buffer to fill
            var evt = new AutoResetEvent(false);

            fullEvent = new WaitHandle[] { evt };

            // Notify the thread when the buffer is full
            var nf = new NotificationPosition();

            nf.Offset     = bufferSize - 1;
            nf.WaitHandle = fullEvent[0];
            var nfs = new NotificationPosition[] { nf };

            captureBuffer.SetNotificationPositions(nfs);

            // Start the processing thread
            thread = new Thread(new ThreadStart(Process));
            thread.IsBackground = true; // Allow application to exit
            thread.Start();
        }
Exemplo n.º 20
0
 public bool Initialize(Form MainForm, string Names)
 {
     checked
     {
         try
         {
             NamesPhone = Names;
             device     = new Device();
             device.SetCooperativeLevel(MainForm, CooperativeLevel.Normal);
             CaptureDevicesCollection captureDevicesCollection = new CaptureDevicesCollection();
             cap = new Capture(captureDevicesCollection[0].DriverGuid);
             short num  = 1;
             short num2 = 16;
             int   num3 = 22050;
             waveFormat                               = new WaveFormat();
             waveFormat.Channels                      = num;
             waveFormat.FormatTag                     = WaveFormatTag.Pcm;
             waveFormat.SamplesPerSecond              = num3;
             waveFormat.BitsPerSample                 = num2;
             waveFormat.BlockAlign                    = (short)Math.Round(unchecked ((double)num * ((double)num2 / 8.0)));
             waveFormat.AverageBytesPerSecond         = waveFormat.BlockAlign * num3;
             captureBufferDescription                 = new CaptureBufferDescription();
             captureBufferDescription.BufferBytes     = (int)Math.Round(unchecked ((double)waveFormat.AverageBytesPerSecond / 5.0));
             captureBufferDescription.Format          = waveFormat;
             playbackBufferDescription                = new BufferDescription();
             playbackBufferDescription.BufferBytes    = (int)Math.Round(unchecked ((double)waveFormat.AverageBytesPerSecond / 5.0));
             playbackBufferDescription.Format         = waveFormat;
             playbackBufferDescription.ControlEffects = true;
             playbackBufferDescription.GlobalFocus    = true;
             playbackBuffer                           = new SecondaryBuffer(playbackBufferDescription, device);
             bufferSize                               = captureBufferDescription.BufferBytes;
             Thread thread = new Thread(Process);
             thread.Start();
         }
         catch (Exception ex)
         {
             ProjectData.SetProjectError(ex);
             Exception ex2    = ex;
             bool      result = false;
             ProjectData.ClearProjectError();
             return(result);
         }
         return(true);
     }
 }
Exemplo n.º 21
0
        /// <summary>
        /// Creates a capture buffer and sets the format
        /// </summary>
        private void CreateCaptureBuffer()
        {
            CaptureBufferDescription dscheckboxd = new CaptureBufferDescription();

            if (null != applicationNotify)
            {
                applicationNotify.Dispose();
                applicationNotify = null;
            }
            if (null != applicationBuffer)
            {
                applicationBuffer.Dispose();
                applicationBuffer = null;
            }

            if (0 == InputFormat.Channels)
            {
                throw new ArgumentException("Audio Channels cannot be zero (use 1 - mono, 2 - stereo, etc.).");
            }

            // Set the notification size
            notifySize  = (1024 > InputFormat.AverageBytesPerSecond / 8) ? 1024 : (InputFormat.AverageBytesPerSecond / 8);
            notifySize -= notifySize % InputFormat.BlockAlign;

            // Set the buffer sizes
            captureBufferSize = notifySize * NumberRecordNotifications;

            // Create the capture buffer
            dscheckboxd.BufferBytes = captureBufferSize;
            dscheckboxd.Format      = InputFormat; // Set the format during creation

            try
            {
                applicationBuffer = new CaptureBuffer(dscheckboxd, applicationDevice);
            }
            catch (ApplicationException)
            {
                //Yeah, I know, D i r e c t X managed...
                throw new ApplicationException("The sound capturing device is not ready. Is '" + CaptureDevice + "' plugged in?");
            }

            nextCaptureOffset = 0;

            InitDirectSoundNotifications();
        }
Exemplo n.º 22
0
        /// <summary>
        /// Setup the buffer and notifications. Needs to be called after the
        /// format has been setup.
        /// </summary>
        public void createRecordBuffer()
        {
            // Create the recordBufferDesciption to be used
            CaptureBufferDescription recordBufferDescription = new CaptureBufferDescription();

            // Setup the pre-calculated buffer size
            recordBufferDescription.BufferBytes = _recordBufferSize;

            // Set the record buffer to the format which has been pre-defined
            recordBufferDescription.Format = _recordFormat;

            // Setup an instance of the capture buffer using the previously
            // formed description and the default capture device (as created in the
            // createCaptureDevice method)
            _recordBuffer = new CaptureBuffer(recordBufferDescription, captureDevice);

            // Now the buffer is setup, its time to config setup the notifications
            setupNotifications();
        }
Exemplo n.º 23
0
        public void Init()
        {
            mCurrentDeviceName = mAudioDevices[mCurrentDeviceIndex].Description;

            Microsoft.DirectX.DirectSound.Capture audioCapture = new Capture(mAudioDevices[mCurrentDeviceIndex].DriverGuid);
            Microsoft.DirectX.DirectSound.CaptureBufferDescription audioCaptureDescription = new CaptureBufferDescription();
            Microsoft.DirectX.DirectSound.WaveFormat audioFormat = new WaveFormat();
            audioFormat.BitsPerSample         = (short)(mChannels * 8);
            audioFormat.SamplesPerSecond      = 44100;
            audioFormat.Channels              = (short)mChannels;
            audioFormat.BlockAlign            = (short)(mChannels * audioFormat.BitsPerSample / 8);
            audioFormat.AverageBytesPerSecond = audioFormat.BlockAlign * audioFormat.SamplesPerSecond;
            audioFormat.FormatTag             = WaveFormatTag.Pcm;

            audioCaptureDescription.Format      = audioFormat;
            audioCaptureDescription.BufferBytes = mSamples * audioFormat.BlockAlign;
            mAudioBuffer = new CaptureBuffer(audioCaptureDescription, audioCapture);
            mIsRunning   = false;
        }
Exemplo n.º 24
0
    private void ScanAvailableInputFormats()
    {
        //-----------------------------------------------------------------------------
        // Name: ScanAvailableInputFormats()
        // Desc: Tests to see if 20 different standard wave formats are supported by
        //       the capture device
        //-----------------------------------------------------------------------------
        WaveFormat format = new WaveFormat();
        CaptureBufferDescription dscheckboxd      = new CaptureBufferDescription();
        CaptureBuffer            pDSCaptureBuffer = null;

        // This might take a second or two, so throw up the hourglass
        Cursor = Cursors.WaitCursor;

        format.FormatTag = WaveFormatTag.Pcm;

        // Try 20 different standard formats to see if they are supported
        for (int iIndex = 0; iIndex < 20; iIndex++)
        {
            GetWaveFormatFromIndex(iIndex, ref format);

            // To test if a capture format is supported, try to create a
            // new capture buffer using a specific format.  If it works
            // then the format is supported, otherwise not.
            dscheckboxd.BufferBytes = format.AverageBytesPerSecond;
            dscheckboxd.Format      = format;

            try
            {
                pDSCaptureBuffer             = new CaptureBuffer(dscheckboxd, mf.applicationDevice);
                InputFormatSupported[iIndex] = true;
            }
            catch
            {
                InputFormatSupported[iIndex] = false;
            }
            if (pDSCaptureBuffer != null)
            {
                pDSCaptureBuffer.Dispose();
            }
        }
        Cursor = Cursors.Default;
    }
Exemplo n.º 25
0
        /// <summary>
        /// Starts capture process.
        /// </summary>
        public void Start()
        {
            EnsureIdle();

            isCapturing = true;

            WaveFormat format = new WaveFormat();

            format.Channels              = ChannelCount;
            format.BitsPerSample         = BitsPerSample;
            format.SamplesPerSecond      = SampleRate;
            format.FormatTag             = WaveFormatTag.Pcm;
            format.BlockAlign            = (short)((format.Channels * format.BitsPerSample + 7) / 8);
            format.AverageBytesPerSecond = format.BlockAlign * format.SamplesPerSecond;

            bufferLength = format.AverageBytesPerSecond * BufferSeconds;
            CaptureBufferDescription desciption = new CaptureBufferDescription();

            desciption.Format      = format;
            desciption.BufferBytes = bufferLength;

            capture = new Capture(device.Id);
            buffer  = new CaptureBuffer(desciption, capture);

            int waitHandleCount = BufferSeconds * NotifyPointsInSecond;

            BufferPositionNotify[] positions = new BufferPositionNotify[waitHandleCount];
            for (int i = 0; i < waitHandleCount; i++)
            {
                BufferPositionNotify position = new BufferPositionNotify();
                position.Offset            = (i + 1) * bufferLength / positions.Length - 1;
                position.EventNotifyHandle = positionEventHandle.DangerousGetHandle();
                positions[i] = position;
            }

            notify = new Notify(buffer);
            notify.SetNotificationPositions(positions);

            terminated.Reset();
            thread      = new Thread(new ThreadStart(ThreadLoop));
            thread.Name = "Sound capture";
            thread.Start();
        }
Exemplo n.º 26
0
        void CreateCaptureBuffer()
        {
            //-----------------------------------------------------------------------------
            // Name: CreateCaptureBuffer()
            // Desc: Creates a capture buffer and sets the format
            //-----------------------------------------------------------------------------
            CaptureBufferDescription dscheckboxd = new CaptureBufferDescription();

            if (null != notify)
            {
                notify.Dispose();
                notify = null;
            }
            if (null != buffer)
            {
                buffer.Dispose();
                buffer = null;
            }

            if (0 == selectedFormat.Channels)
            {
                return;
            }

            // Set the notification size
            notifySize  = (1024 > selectedFormat.AverageBytesPerSecond / 8) ? 1024 : (selectedFormat.AverageBytesPerSecond / 8);
            notifySize -= notifySize % selectedFormat.BlockAlign;

            // Set the buffer sizes
            captureBufferSize = notifySize * NumberRecordNotifications;

            // Create the capture buffer
            dscheckboxd.BufferBytes  = captureBufferSize;
            selectedFormat.FormatTag = WaveFormatTag.Pcm;
            dscheckboxd.Format       = selectedFormat;       // Set the format during creatation

            buffer            = new CaptureBuffer(dscheckboxd, selectedDevice);
            nextCaptureOffset = 0;

            InitNotifications();
        }
Exemplo n.º 27
0
    void CreateCaptureBuffer()
    {
        //-----------------------------------------------------------------------------
        // Name: CreateCaptureBuffer()
        // Desc: Creates a capture buffer and sets the format
        //-----------------------------------------------------------------------------
        CaptureBufferDescription dscheckboxd = new CaptureBufferDescription();

        if (null != applicationNotify)
        {
            applicationNotify.Dispose();
            applicationNotify = null;
        }
        if (null != applicationBuffer)
        {
            applicationBuffer.Dispose();
            applicationBuffer = null;
        }

        if (0 == InputFormat.Channels)
        {
            return;
        }

        // Set the notification size
        NotifySize  = (1024 > InputFormat.AverageBytesPerSecond / 8) ? 1024 : (InputFormat.AverageBytesPerSecond / 8);
        NotifySize -= NotifySize % InputFormat.BlockAlign;

        // Set the buffer sizes
        CaptureBufferSize = NotifySize * NumberRecordNotifications;

        // Create the capture buffer
        dscheckboxd.BufferBytes = CaptureBufferSize;
        InputFormat.FormatTag   = WaveFormatTag.Pcm;
        dscheckboxd.Format      = InputFormat;    // Set the format during creatation

        applicationBuffer = new CaptureBuffer(dscheckboxd, applicationDevice);
        NextCaptureOffset = 0;

        InitNotifications();
    }
Exemplo n.º 28
0
        public void CreateCaptureBuffer()
        {
            // Desc: Creates a capture buffer and sets the format
            CaptureBufferDescription dsc = new CaptureBufferDescription();

            if (null != applicationNotify)
            {
                applicationNotify.Dispose();
                applicationNotify = null;
            }
            if (null != applicationBuffer)
            {
                applicationBuffer.Dispose();
                applicationBuffer = null;
            }
            if (0 == InputFormat.Channels)
            {
                return;
            }
            m_iNotifySize  = (1024 > InputFormat.AverageBytesPerSecond / 8) ? 1024 : (InputFormat.AverageBytesPerSecond / 8);
            m_iNotifySize -= m_iNotifySize % InputFormat.BlockAlign;
            // Set the buffer sizes
            m_iCaptureBufferSize = m_iNotifySize * NumberRecordNotifications;
            //calculate the size of VuMeter Update array length

            /*
             *                      m_UpdateVMArrayLength = m_iCaptureBufferSize/ 50 ;
             *                      CalculationFunctions cf = new CalculationFunctions();
             *                      m_UpdateVMArrayLength = Convert.ToInt32 (cf.AdaptToFrame ( Convert.ToInt32 ( m_UpdateVMArrayLength ),  m_FrameSize)  );
             *                      arUpdateVM = new byte [ m_UpdateVMArrayLength ] ;
             */
            // Create the capture buffer
            dsc.BufferBytes       = m_iCaptureBufferSize;
            InputFormat.FormatTag = WaveFormatTag.Pcm;
            // Set the format during creatation
            dsc.Format        = InputFormat;
            applicationBuffer = new CaptureBuffer(dsc, this.m_cApplicationDevice);
            NextCaptureOffset = 0;
            InitNotifications();
        }
Exemplo n.º 29
0
        private void InitCaptureBuffer()
        {
            if (captureBuffer_ != null)
            {
                captureBuffer_.Dispose();
                captureBuffer_ = null;
            }

            notifySize_        = (1024 > waveFormat_.AverageBytesPerSecond / 8) ? 1024 : (waveFormat_.AverageBytesPerSecond / 8);
            notifySize_       -= notifySize_ % waveFormat_.BlockAlign;
            captureBufferSize_ = notifySize_ * NOTIFY_NUM;

            CaptureBufferDescription bufferDescription = new CaptureBufferDescription();

            bufferDescription.BufferBytes = captureBufferSize_;
            bufferDescription.Format      = waveFormat_;

            captureBuffer_ = new CaptureBuffer(bufferDescription, capture_);

            captureOffset_     = 0;
            captureDataLength_ = 0;
        }
Exemplo n.º 30
0
            private CaptureBuffer CreateCaptureBuffer(ISource aSource, int aBytes)
            {
                var format = new WaveFormat
                {
                    SamplesPerSecond      = 44100,
                    BitsPerSample         = 16,
                    Channels              = 2,
                    FormatTag             = WaveFormatTag.Pcm,
                    BlockAlignment        = 4,
                    AverageBytesPerSecond = 44100 * 4 // 2 channels, 2 bytes per sample
                };

                var desc = new CaptureBufferDescription
                {
                    Format      = format,
                    BufferBytes = aBytes
                };

                DirectSoundCapture capture = new DirectSoundCapture();

                return(new CaptureBuffer(capture, desc));
            }
Exemplo n.º 31
0
        public override void Start()
        {
            if (this.isCapturing)
            {
                throw new InvalidOperationException("Capture is in process");
            }

            this.isCapturing  = true;
            this.bufferLength = this.Format.AverageBytesPerSecond * bufferSeconds;

            CaptureBufferDescription desciption = new CaptureBufferDescription();

            desciption.Format      = this.Format;
            desciption.BufferBytes = bufferLength;

            this.captureDevice = new DirectSoundCapture();
            this.captureBuffer = new CaptureBuffer(captureDevice, desciption);

            int waitHandleCount = bufferSeconds * notifyPointsInSecond;

            NotificationPosition[] notificationPositions = new NotificationPosition[waitHandleCount];

            for (int i = 0; i < waitHandleCount; i++)
            {
                NotificationPosition position = new NotificationPosition();
                position.Offset = (i + 1) * bufferLength / notificationPositions.Length - 1;
                position.Event  = positionEvent;

                notificationPositions[i] = position;
            }

            this.captureBuffer.SetNotificationPositions(notificationPositions);

            this.terminatedEvent.Reset();
            this.captureThread      = new Thread(new ThreadStart(this.CaptureLoop));
            this.captureThread.Name = "Sound capture";
            this.captureThread.Start();
        }
Exemplo n.º 32
0
        // bufferSize is a SAMPLE COUNT
        // NOTE: we always capture 16 bits/sample
        public WaveCapture(Guid deviceGuid, int Fs, int bufferSize, int timerInterval)
        {
            CaptureBufferDescription desc = new CaptureBufferDescription();
            desc.BufferBytes = bufferSize * 2;
            desc.ControlEffects = false;
            desc.WaveMapped = true;
            desc.Format = new WaveFormat();
            desc.Format.FormatTag = SlimDX.WaveFormatTag.Pcm;
            desc.Format.SamplesPerSecond = Fs;
            desc.Format.Channels = 1;
            desc.Format.BitsPerSample = 16;
            desc.Format.BlockAlignment = 2;
            desc.Format.AverageBytesPerSecond = Fs * 2;

            buf = new byte[bufferSize * 2];

            capture = new DirectSoundCapture(deviceGuid);
            captureBuffer = new CaptureBuffer(capture, desc);

            timer = new Timer();
            timer.Interval = timerInterval;
            timer.Tick += new EventHandler(timer_Tick);
        }
Exemplo n.º 33
0
        private void InicializeCaptureSound()
        {
            device = new Device();
            //device.SetCooperativeLevel(this, CooperativeLevel.Normal);

            //CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();
            //capture = new Capture(captureDeviceCollection[0].DriverGuid);
            //DeviceInformation deviceInfo = (DeviceInformation) cmbRecordDevices.SelectedItem;  //captureDeviceCollection[0];
            capture = new Capture(record_source);

            SetWaveFormat();

            captureBufferDescription = new CaptureBufferDescription();
            captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;//approx 200 milliseconds of PCM data.
            captureBufferDescription.Format = waveFormat;

            playbackBufferDescription = new BufferDescription();
            playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
            playbackBufferDescription.Format = waveFormat;

            playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
            bufferSize = captureBufferDescription.BufferBytes;
        }
Exemplo n.º 34
0
        /// <summary>
        /// Starts capture process.
        /// </summary>
        public void Start()
        {
            EnsureIdle();

            isCapturing = true;

            WaveFormat format = new WaveFormat();
            format.Channels = ChannelCount;
            format.BitsPerSample = BitsPerSample;
            format.SamplesPerSecond = SampleRate;
            format.FormatTag = WaveFormatTag.Pcm;
            format.BlockAlign = (short)((format.Channels * format.BitsPerSample + 7) / 8);
            format.AverageBytesPerSecond = format.BlockAlign * format.SamplesPerSecond;

            bufferLength = format.AverageBytesPerSecond * BufferSeconds;
            CaptureBufferDescription desciption = new CaptureBufferDescription();
            desciption.Format = format;
            desciption.BufferBytes = bufferLength;

            capture = new Capture(device.Id);
            buffer = new CaptureBuffer(desciption, capture);

            int waitHandleCount = BufferSeconds * NotifyPointsInSecond;
            BufferPositionNotify[] positions = new BufferPositionNotify[waitHandleCount];
            for (int i = 0; i < waitHandleCount; i++)
            {
                BufferPositionNotify position = new BufferPositionNotify();
                position.Offset = (i + 1) * bufferLength / positions.Length - 1;
                position.EventNotifyHandle = positionEventHandle.DangerousGetHandle();
                positions[i] = position;
            }

            notify = new Notify(buffer);
            notify.SetNotificationPositions(positions);

            terminated.Reset();
            thread = new Thread(new ThreadStart(ThreadLoop));
            thread.Name = "Sound capture";
            thread.Start();
        }
Exemplo n.º 35
0
        private void CreateCaptureBuffer()
        {
            CaptureBufferDescription bufferdescription = new CaptureBufferDescription();
            if (null != mNotify)
            {
                mNotify.Dispose();
                mNotify = null;
            }

            if (null != mRecBuffer)
            {
                mRecBuffer.Dispose();
                mRecBuffer = null;
            }

            // 设定通知的大小,默认为1s钟
            mNotifySize = (1024 > mWavFormat.AverageBytesPerSecond / 8) ? 1024 : (mWavFormat.AverageBytesPerSecond / 8);
            mNotifySize -= mNotifySize % mWavFormat.BlockAlign;

            // 设定缓冲区大小
            mBufferSize = mNotifySize * cNotifyNum;

            // 创建缓冲区描述
            bufferdescription.BufferBytes = mBufferSize;
            bufferdescription.Format = mWavFormat;

            // 创建缓冲区
            mRecBuffer = new CaptureBuffer(bufferdescription, mCapDev);
            mNextCaptureOffset = 0;
        }
Exemplo n.º 36
0
        /// <summary>
        ///   Worker thread.
        /// </summary>
        /// 
        private void WorkerThread()
        {
            // Get the selected capture device
            DirectSoundCapture captureDevice = new DirectSoundCapture(device);


            // Set the capture format
            WaveFormat format = new WaveFormat();
            format.Channels = 1;
            format.SamplesPerSecond = sampleRate;
            format.FormatTag = sampleFormat.ToWaveFormat();
            format.BitsPerSample = (short)Signal.GetSampleSize(sampleFormat);
            format.BlockAlignment = (short)(format.BitsPerSample / 8);
            format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment;

            // Setup the capture buffer
            CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription();
            captureBufferDescription.Format = format;
            captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlignment;
            captureBufferDescription.WaveMapped = true;
            captureBufferDescription.ControlEffects = false;

            CaptureBuffer captureBuffer = null;
            NotificationPosition[] notifications = new NotificationPosition[2];

            try
            {
                captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription);

                // Setup the notification positions
                int bufferPortionSize = captureBuffer.SizeInBytes / 2;
                notifications[0] = new NotificationPosition();
                notifications[0].Offset = bufferPortionSize - 1;
                notifications[0].Event = new AutoResetEvent(false);
                notifications[1] = new NotificationPosition();
                notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize;
                notifications[1].Event = new AutoResetEvent(false);
                captureBuffer.SetNotificationPositions(notifications);

                // Make a copy of the wait handles
                WaitHandle[] waitHandles = new WaitHandle[notifications.Length];
                for (int i = 0; i < notifications.Length; i++)
                    waitHandles[i] = notifications[i].Event;



                // Start capturing
                captureBuffer.Start(true);


                if (sampleFormat == SampleFormat.Format32BitIeeeFloat)
                {
                    float[] currentSample = new float[desiredCaptureSize];

                    while (!stopEvent.WaitOne(0, true))
                    {
                        int bufferPortionIndex = WaitHandle.WaitAny(waitHandles);
                        captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex);
                        OnNewFrame(currentSample);
                    }
                }
                else if (sampleFormat == SampleFormat.Format16Bit)
                {
                    short[] currentSample = new short[desiredCaptureSize];

                    while (!stopEvent.WaitOne(0, true))
                    {
                        int bufferPortionIndex = WaitHandle.WaitAny(waitHandles);
                        captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex);
                        OnNewFrame(currentSample);
                    }
                }
            }
            catch (Exception ex)
            {
                if (AudioSourceError != null)
                    AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message));
                else throw;
            }
            finally
            {
                if (captureBuffer != null)
                {
                    captureBuffer.Stop();
                    captureBuffer.Dispose();
                }

                if (captureDevice != null)
                    captureDevice.Dispose();

                for (int i = 0; i < notifications.Length; i++)
                    if (notifications[i].Event != null)
                        notifications[i].Event.Close();
            }
        }
Exemplo n.º 37
0
        /// <summary>
        /// 创建录音服务对象
        /// </summary>
        /// <param name="owner">宿主</param>
        /// <param name="deviceIndex">设备序号(0 - 默认设备)</param>
        /// <param name="channelCount">通道数</param>
        /// <param name="frequency">采样频率(次/秒)</param>
        /// <param name="sampleSize">样本尺寸(位/次)</param>
        /// <param name="bufferSectionTimeSpan">缓冲区片段的时间间隔</param>
        /// <param name="bufferSectionCount">缓冲区片段数</param>
        /// <param name="notificationEvent">通知点信号(工作线程等待此信号)</param>
        public SoundRecorder(Control owner, int deviceIndex, short channelCount, int frequency, short sampleSize, TimeSpan bufferSectionTimeSpan, int bufferSectionCount, AutoResetEvent notificationEvent)
            : base()
        {
            //宿主
            this.Owner = owner;

            #region 设备

            CaptureDevicesCollection devices = new CaptureDevicesCollection();
            this.Device = new Capture(devices[deviceIndex].DriverGuid);

            #endregion

            #region 缓冲区描述

            #region Wave格式

            WaveFormat format = new WaveFormat();
            format.FormatTag = WaveFormatTag.Pcm;                                       //格式类型
            format.Channels = channelCount;                                             //通道数
            format.SamplesPerSecond = frequency;                                        //采样频率(次/秒)
            format.BitsPerSample = sampleSize;                                          //样本尺寸(位/次)
            format.BlockAlign = (short)((format.BitsPerSample / 8) * format.Channels);  //数据块的最小单元(字节/次)
            format.AverageBytesPerSecond = format.BlockAlign * format.SamplesPerSecond; //采样性能(字节/秒)

            #endregion

            //缓冲区片段尺寸
            int bufferSectionSize = format.AverageBytesPerSecond * (int)bufferSectionTimeSpan.TotalSeconds;
            //缓冲区尺寸
            int bufferSize = bufferSectionSize * bufferSectionCount;

            //创建
            CaptureBufferDescription bufferDescription = new CaptureBufferDescription();
            bufferDescription.Format = format;
            bufferDescription.BufferBytes = bufferSize;
            this.BufferDescription = bufferDescription;

            #endregion

            #region 缓冲区位置通知序列

            //通知点信号
            this.NotificationEvent = notificationEvent;
            //位置通知序列
            this.BufferPositionNotifys = new BufferPositionNotify[bufferSectionCount];
            for (int i = 0; i < bufferSectionCount; i++)
            {
                this.BufferPositionNotifys[i].Offset = bufferSectionSize * (i + 1) - 1; //通知点
                this.BufferPositionNotifys[i].EventNotifyHandle = this.NotificationEvent.SafeWaitHandle.DangerousGetHandle(); //通知点信号 Set()
            }

            #endregion

            #region 工作线程

            this.ThreadWorking = true;
            this.WorkingThread = new Thread(new ParameterizedThreadStart(this.WorkingThreadProcess));
            this.WorkingThread.IsBackground = true;
            this.WorkingThread.Start(this.NotificationEvent); //带入参数

            #endregion
        }
Exemplo n.º 38
0
        /*
         * Initializes all the data members.
         */
        private void Initialize()
        {
            try
            {
                device = new Device();
                device.SetCooperativeLevel(this, CooperativeLevel.Normal);

                CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();

                DeviceInformation deviceInfo = captureDeviceCollection[0];

                capture = new Capture(deviceInfo.DriverGuid);

                short channels = 1; //Stereo.
                short bitsPerSample = 16; //16Bit, alternatively use 8Bits.
                int samplesPerSecond = 22050; //11KHz use 11025 , 22KHz use 22050, 44KHz use 44100 etc.

                //Set up the wave format to be captured.
                waveFormat = new WaveFormat();
                waveFormat.Channels = channels;
                waveFormat.FormatTag = WaveFormatTag.Pcm;
                waveFormat.SamplesPerSecond = samplesPerSecond;
                waveFormat.BitsPerSample = bitsPerSample;
                waveFormat.BlockAlign = (short)(channels * (bitsPerSample / (short)8));
                waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * samplesPerSecond;

                captureBufferDescription = new CaptureBufferDescription();
                captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;//approx 200 milliseconds of PCM data.
                captureBufferDescription.Format = waveFormat;

                playbackBufferDescription = new BufferDescription();
                playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
                playbackBufferDescription.Format = waveFormat;
                playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);

                bufferSize = captureBufferDescription.BufferBytes;

                bIsCallActive = false;
                nUdpClientFlag = 0;

                //Using UDP sockets
                clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
                EndPoint ourEP = new IPEndPoint(IPAddress.Any, 1450);
                //Listen asynchronously on port 1450 for coming messages (Invite, Bye, etc).
                clientSocket.Bind(ourEP);

                //Receive data from any IP.
                EndPoint remoteEP = (EndPoint)(new IPEndPoint(IPAddress.Any, 0));

                byteData = new byte[1024];
                //Receive data asynchornously.
                clientSocket.BeginReceiveFrom(byteData,
                                           0, byteData.Length,
                                           SocketFlags.None,
                                           ref remoteEP,
                                           new AsyncCallback(OnReceive),
                                           null);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message, "VoiceChat-Initialize ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Exemplo n.º 39
0
 /// <summary>
 /// 创建缓冲区
 /// </summary>
 private void CreateCaptureBuffer()
 {
     // 想要创建一个捕捉缓冲区必须要两个参数:缓冲区信息(描述这个缓冲区中的格式等),缓冲设备。
     CaptureBufferDescription cbdBufDescription = new CaptureBufferDescription();
     cbdBufDescription.Format = m_wavFormat; // 缓冲区捕捉的数据格式
     // 1秒的数据量/设置的通知数 得到的每个通知大小小于0.2s的数据量,话音延迟小于200ms为优质话音
     m_nNotifySize = m_wavFormat.AverageBytesPerSecond / m_nNotifyNum;
     m_nBufSize = m_nNotifyNum * m_nNotifySize;
     cbdBufDescription.BufferBytes = m_nBufSize;
     m_objCaptureBuf = new CaptureBuffer(cbdBufDescription, m_objCapture);
 }
Exemplo n.º 40
0
        public void Initialize()
        {
            try
            {
                LogLine("Getting local IP address...");
                LocalIP.Text = NetProcedures.GetLocalIPAddress();
                LogLine("Using global IP address...");
                GlobalIP.Text = NetProcedures.globalIP;

                LogLine("Connecting to capturing device...");
                device = new Device();
                device.SetCooperativeLevel(this, CooperativeLevel.Normal);

                CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection();

                DeviceInformation deviceInfo = captureDeviceCollection[0];

                capture = new Capture(deviceInfo.DriverGuid);

                LogLine("Preparing audio format to be captured...");

                short channels = (short)Channels.Value;
                short bitsPerSample;
                if (radioButton7.Checked) bitsPerSample = 8;
                else bitsPerSample = 16;
                int samplesPerSecond;
                if (radioButton1.Checked) samplesPerSecond = 11025;
                else if (radioButton2.Checked) samplesPerSecond = 22050;
                else samplesPerSecond = 44100;

                //Set up the wave format to be captured.
                waveFormat = new WaveFormat();
                waveFormat.Channels = channels;
                waveFormat.FormatTag = WaveFormatTag.Pcm;
                waveFormat.SamplesPerSecond = samplesPerSecond;
                waveFormat.BitsPerSample = bitsPerSample;
                waveFormat.BlockAlign = (short)(channels * (bitsPerSample / (short)8));
                waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * samplesPerSecond;

                captureBufferDescription = new CaptureBufferDescription();
                captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;//approx 200 milliseconds of PCM data.
                captureBufferDescription.Format = waveFormat;

                playbackBufferDescription = new BufferDescription();
                playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
                playbackBufferDescription.Format = waveFormat;
                playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);

                bufferSize = captureBufferDescription.BufferBytes;

                bIsCallActive = false;
                nUdpClientFlag = 0;

                //Using UDP sockets
                LogLine("Initializing a socket...");
                clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
                
                EndPoint ourEP = new IPEndPoint(IPAddress.Any, 3535);
                clientSocket.Bind(ourEP);

                //Receive data from any IP.
                EndPoint remoteEP = (EndPoint)(new IPEndPoint(IPAddress.Any, 0));

                byteData = new byte[1024];
                //Receive data asynchornously.
                LogLine("Preparing to receive data...");
                clientSocket.BeginReceiveFrom(byteData,
                                           0, byteData.Length,
                                           SocketFlags.None,
                                           ref remoteEP,
                                           new AsyncCallback(OnReceive),
                                           null);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message, "Ошибка инициализации | Fenazy", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
        }
Exemplo n.º 41
0
 public void ConfigureSoundDevice(short channels, Control appForm, short bitsPerSample, int samplesPerSecond)
 {
     this.cooperativeForm = appForm;
     RefreshSoundDevice();
     waveFormat = new WaveFormat();
     waveFormat.Channels = channels;
     waveFormat.FormatTag = WaveFormatTag.Pcm;
     waveFormat.SamplesPerSecond = samplesPerSecond;
     waveFormat.BitsPerSample = bitsPerSample;
     waveFormat.BlockAlign = (short)(channels * (bitsPerSample / (short)8));
     waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * samplesPerSecond;
     captureBufferDescription = new CaptureBufferDescription();
     captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
     captureBufferDescription.Format = waveFormat;
     playbackBufferDescription = new BufferDescription();
     playbackBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 5;
     playbackBufferDescription.Format = waveFormat;
     playbackBuffer = new SecondaryBuffer(playbackBufferDescription, new Device());
     bufferSize = captureBufferDescription.BufferBytes;
 }
Exemplo n.º 42
0
        static void InicialiceCaptureBuffer()
        {
            try
            {
                CaptureDevicesCollection audioDevices = new CaptureDevicesCollection();

                // initialize the capture buffer and start the animation thread
                Capture cap = new Capture(audioDevices[1].DriverGuid);
                CaptureBufferDescription desc = new CaptureBufferDescription();
                WaveFormat wf = new WaveFormat();
                wf.BitsPerSample = 16;
                wf.SamplesPerSecond = 44100;
                wf.Channels = (short)cap.Caps.Channels;
                wf.BlockAlign = (short)(wf.Channels * wf.BitsPerSample / 8);
                wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond;
                wf.FormatTag = WaveFormatTag.Pcm;

                desc.Format = wf;
                desc.BufferBytes = SAMPLES * wf.BlockAlign;

                buffer = new CaptureBuffer(desc, cap);
                buffer.Start(true);
            }
            catch
            {
                Console.WriteLine("Error al iniciar el capturador de sonido");
            }
        }
Exemplo n.º 43
0
Arquivo: Main.cs Projeto: ptaa32/ARDOP
        public bool StartCodec(ref string strFault)
        {
            bool functionReturnValue = false;
            //Returns true if successful
            Thread.Sleep(100);
            // This delay is necessary for reliable starup following a StopCodec
            lock (objCodecLock) {
            dttLastSoundCardSample = Now;
            bool blnSpectrumSave = MCB.DisplaySpectrum;
            bool blnWaterfallSave = MCB.DisplayWaterfall;
            System.DateTime dttStartWait = Now;
            MCB.DisplayWaterfall = false;
            MCB.DisplaySpectrum = false;
            string[] strCaptureDevices = EnumerateCaptureDevices();
            string[] strPlaybackDevices = EnumeratePlaybackDevices();
            functionReturnValue = false;
            DeviceInformation objDI = new DeviceInformation();
            int intPtr = 0;
            // Playback devices
            try {
                cllPlaybackDevices = null;

                cllPlaybackDevices = new Microsoft.DirectX.DirectSound.DevicesCollection();
                if ((devSelectedPlaybackDevice != null)) {
                    devSelectedPlaybackDevice.Dispose();
                    devSelectedPlaybackDevice = null;
                }

                foreach (DeviceInformation objDI in cllPlaybackDevices) {
                    DeviceDescription objDD = new DeviceDescription(objDI);
                    if (strPlaybackDevices(intPtr) == MCB.PlaybackDevice) {
                        if (MCB.DebugLog)
                            Logs.WriteDebug("[Main.StartCodec] Setting SelectedPlaybackDevice = " + MCB.PlaybackDevice);
                        devSelectedPlaybackDevice = new Device(objDD.info.DriverGuid);
                        functionReturnValue = true;
                        break; // TODO: might not be correct. Was : Exit For
                    }
                    intPtr += 1;
                }
                if (!functionReturnValue) {
                    strFault = "Playback Device setup, Device " + MCB.PlaybackDevice + " not found in Windows enumerated Playback Devices";
                }
            } catch (Exception ex) {
                strFault = Err.Number.ToString + "/" + Err.Description;
                Logs.Exception("[StartCodec], Playback Device setup] Err: " + ex.ToString);
                functionReturnValue = false;
            }
            if (functionReturnValue) {
                // Capture Device
                CaptureBufferDescription dscheckboxd = new CaptureBufferDescription();
                try {
                    functionReturnValue = false;
                    cllCaptureDevices = null;
                    cllCaptureDevices = new CaptureDevicesCollection();
                    intPtr = 0;
                    for (int i = 0; i <= cllCaptureDevices.Count - 1; i++) {
                        if (MCB.CaptureDevice == strCaptureDevices(i)) {
                            objCaptureDeviceGuid = cllCaptureDevices(i).DriverGuid;
                            devCaptureDevice = new Capture(objCaptureDeviceGuid);
                            stcSCFormat.SamplesPerSecond = 12000;
                            // 12000 Hz sample rate
                            stcSCFormat.Channels = 1;
                            stcSCFormat.BitsPerSample = 16;
                            stcSCFormat.BlockAlign = 2;
                            stcSCFormat.AverageBytesPerSecond = 2 * 12000;
                            stcSCFormat.FormatTag = WaveFormatTag.Pcm;
                            objApplicationNotify = null;
                            objCapture = null;
                            // Set the buffer sizes
                            intCaptureBufferSize = intNotifySize * intNumberRecordNotifications;
                            // Create the capture buffer
                            dscheckboxd.BufferBytes = intCaptureBufferSize;
                            stcSCFormat.FormatTag = WaveFormatTag.Pcm;
                            dscheckboxd.Format = stcSCFormat;
                            // Set the format during creatation
                            if ((objCapture != null)) {
                                objCapture.Dispose();
                                objCapture = null;
                            }
                            //objCapture = New CaptureBuffer(dscheckboxd, devCaptureDevice)
                            intNextCaptureOffset = 0;
                            WriteTextToSpectrum("CODEC Start OK", Brushes.LightGreen);
                            while (Now.Subtract(dttStartWait).TotalSeconds < 3) {
                                Application.DoEvents();
                                Thread.Sleep(100);
                            }
                            objCapture = new CaptureBuffer(dscheckboxd, devCaptureDevice);
                            InititializeNotifications();
                            objCapture.Start(true);
                            // start with looping
                            InititializeSpectrum(Color.Black);

                            functionReturnValue = true;
                        }
                    }
                    if (!functionReturnValue) {
                        strFault = "Could not find DirectSound capture device " + MCB.CaptureDevice.ToUpper;
                        //Logs.Exception("[Main.StartCodec] Could not find DirectSound capture device " & MCB.CaptureDevice & " in Windows enumerated Capture Devices")
                    }
                } catch (Exception ex) {
                    strFault = Err.Number.ToString + "/" + Err.Description;
                    functionReturnValue = false;
                    //Logs.Exception("[Main.StartCodec] Err: " & ex.ToString)
                }
            }

            if (functionReturnValue) {
                if (MCB.DebugLog)
                    Logs.WriteDebug("[Main.StartCodec] Successful start of codec");
                objProtocol.ARDOPProtocolState = ProtocolState.DISC;
            } else {
                if (MCB.DebugLog)
                    Logs.WriteDebug("[Main.StartCodec] CODEC Start Failed");
                WriteTextToSpectrum("CODEC Start Failed", Brushes.Red);
                objProtocol.ARDOPProtocolState = ProtocolState.OFFLINE;
                while (Now.Subtract(dttStartWait).TotalSeconds < 3) {
                    Application.DoEvents();
                    Thread.Sleep(100);
                }
                tmrStartCODEC.Interval = 5000;
                tmrStartCODEC.Start();
            }
            InititializeSpectrum(Color.Black);
            MCB.DisplayWaterfall = blnWaterfallSave;
            MCB.DisplaySpectrum = blnSpectrumSave;
            }
            return functionReturnValue;
        }
Exemplo n.º 44
0
        private void CreateCaptureBuffer()
        {
            //想要创建一个捕捉缓冲区必须要两个参数:缓冲区信息(描述这个缓冲区中的格式等),缓冲设备。

            CaptureBufferDescription bufferdescription = new CaptureBufferDescription();
            bufferdescription.Format = mWavFormat;//设置缓冲区要捕捉的数据格式
            iNotifySize = 1024;//设置通知大小
            iBufferSize = iNotifyNum * iNotifySize;
            bufferdescription.BufferBytes = iBufferSize;
            capturebuffer = new CaptureBuffer(bufferdescription, capture);//建立设备缓冲区对象
        }
Exemplo n.º 45
0
        // Helper method to test a specific WaveFormat instance.
        private void VerifyFormat(WaveFormat newFormat)
        {
            if (this._captureDevice == null)
            {
                throw new InvalidOperationException("Capture device is null.");
            }

            CaptureBufferDescription capBuffDesc = new CaptureBufferDescription();
            capBuffDesc.BufferBytes = newFormat.AverageBytesPerSecond;
            capBuffDesc.Format = newFormat;

            CaptureBuffer capBuff = null;

            try
            {
                capBuff = new CaptureBuffer(capBuffDesc, this._captureDevice);
            }
            catch (Exception ex)
            {
                string errMsg =
                   string.Format("Sound format not supported: {0} samples/sec, {1} bits/sample, {2} channels.",
                       newFormat.SamplesPerSecond, newFormat.BitsPerSample, newFormat.Channels);
                throw new Exception(errMsg, ex);
            }

            if (capBuff != null)
            {
                capBuff.Dispose();
                capBuff = null;
            }
        }
Exemplo n.º 46
0
        public void CreateCaptureBuffer()
        {
            // 想要創建一個捕捉緩衝區必须要兩個參數:缓衝區信息(描述這個緩衝區中的格式等),緩衝設備。
            mWavFormat = SetWaveFormat();  // 先設定waveformat格式
            CaptureBufferDescription bufferdescription = new CaptureBufferDescription();
            bufferdescription.Format = mWavFormat;  // 設置缓衝區要捕捉的數據格式
            //iNotifySize = 1024;  // 設置通知大小
            iNotifySize = mWavFormat.AverageBytesPerSecond / iNotifyNum;  // 1秒的數據量 / 設置的通知數得到的每個通知大小小於0.2s的數據量,語音延遲小於200ms為優質語音
            iBufferSize = iNotifyNum * iNotifySize;
            bufferdescription.BufferBytes = iBufferSize;

            bufferdescription.ControlEffects = true;
            bufferdescription.WaveMapped = true;

            capturebuffer = new CaptureBuffer(bufferdescription, capture);  // 建立設備緩衝區對象
        }