public void StartCapturing() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, capture); // Set Buffer Size,Voice Recording Format & Input Voice Device SetBufferEvents(); // Set the events Positions to Send While Recording int halfBuffer = bufferSize / 2; // Take the half buffer size captureBuffer.Start(true); // start capturing bool readFirstBufferPart = true; // to know which part has been filled (the buufer has been divided into tow parts) int offset = 0; // at point 0 MemoryStream memStream = new MemoryStream(halfBuffer); // set the half buffer size to the memory stream while (true) // Looping until Stoploop=true Set by the talker { //WaitOne() Blocks the current thread until the current WaitHandle receives a signal //WaitHandle("Encapsulates operating system–specific objects that wait for exclusive access to shared resources") autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); //Sets the position within the current stream to 0 captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); // capturing and set to MemoryStream readFirstBufferPart = !readFirstBufferPart; // reflecting the boolean value to set the new comming buffer to the other part offset = readFirstBufferPart ? 0 : halfBuffer; // if readFirstBufferPart set to true then set the offset to 0 else set the offset to the half buffer byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); // G.711 Encoding, Compress to less then 50% if (!StopLoop) { OnBufferFulfill(dataToWrite, null); } } } catch {} }
private void CreateCaptureBuffer() { // 缓冲区的描述对象 CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); if (null != mNotify) { mNotify.Dispose(); mNotify = null; } if (null != mRecBuffer) { mRecBuffer.Dispose(); mRecBuffer = null; } // 设定通知的大小,默认为1s钟 mNotifySize = (1024 > mWavFormat.AverageBytesPerSecond / 8) ? 1024 : (mWavFormat.AverageBytesPerSecond / 8); mNotifySize -= mNotifySize % mWavFormat.BlockAlign; // 设定缓冲区大小 mBufferSize = mNotifySize * cNotifyNum; // 创建缓冲区描述 bufferdescription.BufferBytes = mBufferSize; bufferdescription.Format = mWavFormat; // 录音格式 // 创建缓冲区 mRecBuffer = new CaptureBuffer(bufferdescription, mCapDev); mNextCaptureOffset = 0; }
public void CaptureCurrent() { if (-1 < Current) { CaptureBuffer.Append((char)Current); } }
// this is to stop the recording // desc: this will first check the condition and stops the recording and then capture any left overs recorded data which is not saved public void StopRecording() { events.AudioRecorderEvents.StateChanged e = new events.AudioRecorderEvents.StateChanged(mState); mState = AudioRecorderState.Idle; StateChanged(this, e); if (null != NotificationEvent) { Capturing = false; NotificationEvent.Set(); } if (null != applicationBuffer) { if (applicationBuffer.Capturing) { InitRecording(false); } } applicationNotify = null; applicationBuffer = null; FileInfo fi = new FileInfo(m_sFileName); if (fi.Length == 44) { File.Delete(m_sFileName); } }
public void Stop() { _Running = false; if (_CaptureThread != null) { _CaptureThread.Join(); _CaptureThread = null; } if (_CaptureBuffer != null) { _CaptureBuffer.Dispose(); _CaptureBuffer = null; } if (_Notifications != null) { foreach (NotificationPosition notification in _Notifications) { notification.Event.Close(); } _Notifications.Clear(); _Notifications = null; } }
// Helper method to test a specific WaveFormat instance. private void VerifyFormat(WaveFormat newFormat) { if (this._captureDevice == null) { throw new InvalidOperationException("Capture device is null."); } CaptureBufferDescription capBuffDesc = new CaptureBufferDescription(); capBuffDesc.BufferBytes = newFormat.AverageBytesPerSecond; capBuffDesc.Format = newFormat; CaptureBuffer capBuff = null; try { capBuff = new CaptureBuffer(capBuffDesc, this._captureDevice); } catch (Exception ex) { string errMsg = string.Format("Sound format not supported: {0} samples/sec, {1} bits/sample, {2} channels.", newFormat.SamplesPerSecond, newFormat.BitsPerSample, newFormat.Channels); throw new Exception(errMsg, ex); } if (capBuff != null) { capBuff.Dispose(); capBuff = null; } }
public void StartRecording(int deviceIndex) { if (mCaptureBuffer != null) { if (mCaptureBuffer.Capturing) { mCaptureBuffer.Stop(); } mCaptureBuffer.Dispose(); mCaptureBuffer = null; } CaptureDevicesCollection audioDevices = new CaptureDevicesCollection(); if (deviceIndex != -1 && deviceIndex < audioDevices.Count - 1) { // initialize the capture buffer and start the animation thread Capture capture = new Capture(audioDevices[deviceIndex].DriverGuid); CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); WaveFormat waveFormat = new WaveFormat(); waveFormat.BitsPerSample = 16; waveFormat.SamplesPerSecond = 8000; waveFormat.Channels = 1; waveFormat.BlockAlign = (short)(waveFormat.Channels * waveFormat.BitsPerSample / 8); waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond; waveFormat.FormatTag = WaveFormatTag.Pcm; captureBufferDescription.Format = waveFormat; captureBufferDescription.BufferBytes = waveFormat.SamplesPerSecond * 120; mCaptureBuffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(captureBufferDescription, capture); mCaptureBuffer.Start(true); } }
static void InicialiceCaptureBuffer() { try { CaptureDevicesCollection audioDevices = new CaptureDevicesCollection(); // initialize the capture buffer and start the animation thread Capture cap = new Capture(audioDevices[1].DriverGuid); CaptureBufferDescription desc = new CaptureBufferDescription(); WaveFormat wf = new WaveFormat(); wf.BitsPerSample = 16; wf.SamplesPerSecond = 44100; wf.Channels = (short)cap.Caps.Channels; wf.BlockAlign = (short)(wf.Channels * wf.BitsPerSample / 8); wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond; wf.FormatTag = WaveFormatTag.Pcm; desc.Format = wf; desc.BufferBytes = SAMPLES * wf.BlockAlign; buffer = new CaptureBuffer(desc, cap); buffer.Start(true); } catch { Console.WriteLine("Error al iniciar el capturador de sonido"); } }
private void DisposeDirectSound() { if (null != applicationNotify) { applicationNotify.Dispose(); applicationNotify = null; } if (null != applicationBuffer) { applicationBuffer.Dispose(); applicationBuffer = null; } if (null != applicationDevice) { applicationDevice.Dispose(); applicationDevice = null; } if (null != notificationArrivalEvent) { notificationArrivalEvent.Close(); notificationArrivalEvent = null; } notificationListenerThread = null; }
public void StartCapturing() { try { _captureBuffer = new CaptureBuffer(_capBufDescr, _capture); SetBufferEvents(); int halfBuffer = _bufferSize / 2; _captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); while (true) { _eventToReset.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); _captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); if (!_StopLoop) { OnBufferFulfill(dataToWrite, null); } } } catch {} }
private void Send() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, cap); CreateNotifyPositions(); int num = checked ((int)Math.Round(unchecked ((double)bufferSize / 2.0))); captureBuffer.Start(true); bool flag = true; int bufferStartingLocation = 0; MemoryStream memoryStream = new MemoryStream(num); while (flagSrarting) { autoResetEvent.WaitOne(); memoryStream.Seek(0L, SeekOrigin.Begin); captureBuffer.Read(bufferStartingLocation, memoryStream, num, LockFlag.None); flag = !flag; bufferStartingLocation = ((!flag) ? num : 0); byte[] buffer = memoryStream.GetBuffer(); udpSend.Send(buffer, buffer.Length, send_Com); } } catch (Exception ex) { ProjectData.SetProjectError(ex); Exception ex2 = ex; ProjectData.ClearProjectError(); } finally { captureBuffer.Stop(); captureBuffer.Dispose(); udpSend.Close(); } }
public static void StartCapturing() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, capture); SetBufferEvents(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); while (true) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); if (!StopLoop) { OnBufferFulfill(dataToWrite, null); } } } catch (Exception e) { // } }
public void Stop() { this.running = false; if (this.captureThread != null) { this.captureThread.Join(); this.captureThread = null; } if (this.buffer != null) { this.buffer.Dispose(); this.buffer = null; } if (this.notifications != null) { for (int i = 0; i < this.notifications.Count; i++) { this.notifications[i].Event.Close(); } this.notifications.Clear(); this.notifications = null; } }
public float GetSignFromDevice(out float response) { cap = new Capture(deviceGuid); desc = new CaptureBufferDescription(); WaveFormat wf = new WaveFormat(); wf.BitsPerSample = bitsPerSample; wf.SamplesPerSecond = sampleRate; wf.Channels = channels; wf.BlockAlign = (short)(wf.Channels * wf.BitsPerSample / 8); wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond; wf.FormatTag = WaveFormatTag.Pcm; desc.Format = wf; desc.BufferBytes = SAMPLES * wf.BlockAlign; buffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(desc, cap); buffer.Start(true); while (start) { Array samples = buffer.Read(0, typeof(Int16), LockFlag.FromWriteCursor, SAMPLE_FORMAT_ARRAY); response = ((float)samples.GetValue(0, 0, 0)) / 100; } response = 0.0f; return(0.0f); }
WaveFormat TryNewFormat(int hz, short bits, short channels) { WaveFormat format = new WaveFormat(); format.FormatTag = WaveFormatTag.Pcm; format.SamplesPerSecond = hz; format.BitsPerSample = bits; format.Channels = channels; format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8)); format.AverageBytesPerSecond = format.BlockAlign * format.SamplesPerSecond; CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); CaptureBuffer pDSCaptureBuffer = null; dscheckboxd.BufferBytes = format.AverageBytesPerSecond; dscheckboxd.Format = format; try { pDSCaptureBuffer = new CaptureBuffer(dscheckboxd, device); pDSCaptureBuffer.Dispose(); return(format); } catch { // Can't return null, because WaveFormat is a value type. throw; } }
/// <summary> /// Captures the current character under the cursor, if any /// </summary> public void Capture() { _CheckDisposed(); if (EndOfInput != _current && BeforeInput != _current) { CaptureBuffer.Append((char)_current); } }
private void StreamSource(ISource aSource) { SendResponse("200 OK"); iSocket.Send(iWavFileHeader); const int kAudioChunkBytes = 144 * 1024; const int kAudioChunks = 4; CaptureBuffer capture = CreateCaptureBuffer(aSource, kAudioChunks * kAudioChunkBytes); int offset = 0; NotificationPosition[] notifications = new NotificationPosition[kAudioChunks]; WaitHandle[] handles = new WaitHandle[kAudioChunks]; for (uint i = 0; i < kAudioChunks; i++) { NotificationPosition notification = new NotificationPosition(); notification.Offset = offset; notification.Event = new ManualResetEvent(false); handles[i] = notification.Event; notifications[i] = notification; offset += kAudioChunkBytes; } capture.SetNotificationPositions(notifications); // Rotate notifications for (uint i = 0; i < kAudioChunks - 1; i++) { WaitHandle a = handles[i]; handles[i] = handles[i + 1]; handles[i + 1] = a; } byte[] audio = new byte[kAudioChunkBytes]; capture.Start(true); try { while (true) { int x = WaitHandle.WaitAny(handles); ManualResetEvent manual = handles[x] as ManualResetEvent; manual.Reset(); capture.Read <byte>(audio, 0, kAudioChunkBytes, notifications[x].Offset, false); iSocket.Send(audio); } } catch (SocketException) { } capture.Stop(); }
/// <summary> /// Gets all or a subset of the current capture buffer /// </summary> /// <param name="startIndex">The start index</param> /// <param name="length">The number of characters to retrieve, or zero to retrieve the remainder of the buffer</param> /// <returns>A string containing the specified subset of the capture buffer</returns> public string GetCapture(int startIndex = 0, int length = 0) { _CheckDisposed(); if (0 == length) { length = CaptureBuffer.Length - startIndex; } return(CaptureBuffer.ToString(startIndex, length)); }
/// <summary> /// Gets the capture buffer at the specified start index /// </summary> /// <param name="startIndex">The index to begin copying</param> /// <param name="count">The number of characters to copy</param> /// <returns>A string representing the specified subset of the capture buffer</returns> public string GetCapture(int startIndex, int count = 0) { _CheckDisposed(); if (0 == count) { count = CaptureBuffer.Length - startIndex; } return(CaptureBuffer.ToString(startIndex, count)); }
public void Start() { if (_Running) { throw new InvalidOperationException(); } if (_CaptureDevice == null) { _CaptureDevice = new DirectSoundCapture(new Guid(_Guid)); } _WaveFormat.FormatTag = WaveFormatTag.Pcm; // Change to WaveFormatTag.IeeeFloat for float _WaveFormat.BitsPerSample = 16; // Set this to 32 for float _WaveFormat.BlockAlignment = (short)(_Channels * (_WaveFormat.BitsPerSample / 8)); _WaveFormat.Channels = _Channels; _WaveFormat.SamplesPerSecond = (int)(SampleRateKhz * 1000D); _WaveFormat.AverageBytesPerSecond = _WaveFormat.SamplesPerSecond * _WaveFormat.BlockAlignment; _BufferPortionCount = 2; _BufferDescription.BufferBytes = _BufferSize * sizeof(short) * _BufferPortionCount * _Channels; _BufferDescription.Format = _WaveFormat; _BufferDescription.WaveMapped = false; _CaptureBuffer = new CaptureBuffer(_CaptureDevice, _BufferDescription); _BufferPortionSize = _CaptureBuffer.SizeInBytes / _BufferPortionCount; _Notifications = new List <NotificationPosition>(); for (int i = 0; i < _BufferPortionCount; i++) { var notification = new NotificationPosition { Offset = _BufferPortionCount - 1 + (_BufferPortionSize * i), Event = new AutoResetEvent(false) }; _Notifications.Add(notification); } _CaptureBuffer.SetNotificationPositions(_Notifications.ToArray()); _WaitHandles = new WaitHandle[_Notifications.Count]; for (int i = 0; i < _Notifications.Count; i++) { _WaitHandles[i] = _Notifications[i].Event; } _CaptureThread = new Thread(_DoCapture) { Name = "DirectSoundCapture", IsBackground = true }; _Running = true; _CaptureThread.Start(); }
private void InitializeBuffer() { CaptureBufferDescription bd = new CaptureBufferDescription(); bd.Format = getWaveFormat(); mBufferLength = (int)getPositionEquivalent(new TimeSpan(BUFFER_MS * TimeSpan.TicksPerMillisecond)); bd.BufferBytes = mBufferLength; mBuffer = new CaptureBuffer(bd, mRecordCapture); mCycleCount = 0; }
public void CreateCaptureBuffer() {//想要创建一个捕捉缓冲区必须要两个参数:缓冲区信息(描述这个缓冲区中的格式等),缓冲设备。 CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); bufferdescription.Format = mWavFormat; //设置缓冲区要捕捉的数据格式 iNotifySize = 1024; //设置通知大小 iBufferSize = iNotifyNum * iNotifySize; bufferdescription.BufferBytes = iBufferSize; capturebuffer = new CaptureBuffer(bufferdescription, capture);//建立设备缓冲区对象 }
/// <summary> /// Closes the current instance and releases any resources being held /// </summary> public void Close() { if (Disposed != _current) { _current = Disposed; GC.SuppressFinalize(this); CloseInner(); CaptureBuffer.Clear(); } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; byte[] dataToWrite = memStream.GetBuffer(); udpClient.Send(dataToWrite, dataToWrite.Length, "169.254.133.4", 1550); } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void UDP_Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); UDP_CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 6068); } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
public void Start() { if (this.running) { throw new InvalidOperationException(); } if (this.captureDevice == null) { this.captureDevice = new DirectSoundCapture(guid); } this.waveFormat.FormatTag = WaveFormatTag.Pcm; // Change to WaveFormatTag.IeeeFloat for float this.waveFormat.BitsPerSample = 16; // Set this to 32 for float this.waveFormat.BlockAlignment = (short)(channels * (waveFormat.BitsPerSample / 8)); this.waveFormat.Channels = this.channels; this.waveFormat.SamplesPerSecond = (int)(this.SampleRateKHz * 1000D); this.waveFormat.AverageBytesPerSecond = this.waveFormat.SamplesPerSecond * this.waveFormat.BlockAlignment; this.bufferPortionCount = 2; this.bufferDescription.BufferBytes = this.bufferSize * sizeof(short) * bufferPortionCount * this.channels; this.bufferDescription.Format = this.waveFormat; this.bufferDescription.WaveMapped = false; this.buffer = new CaptureBuffer(this.captureDevice, this.bufferDescription); this.bufferPortionSize = this.buffer.SizeInBytes / this.bufferPortionCount; this.notifications = new List <NotificationPosition>(); for (int i = 0; i < this.bufferPortionCount; i++) { NotificationPosition notification = new NotificationPosition(); notification.Offset = this.bufferPortionCount - 1 + (bufferPortionSize * i); notification.Event = new AutoResetEvent(false); this.notifications.Add(notification); } this.buffer.SetNotificationPositions(this.notifications.ToArray()); this.waitHandles = new WaitHandle[this.notifications.Count]; for (int i = 0; i < this.notifications.Count; i++) { this.waitHandles[i] = this.notifications[i].Event; } this.captureThread = new Thread(new ThreadStart(this.CaptureThread)); this.captureThread.IsBackground = true; this.running = true; this.captureThread.Start(); }
/// <summary> /// Attempts to read a floating point literal into the capture buffer while parsing it /// </summary> /// <param name="result">The value the literal represents</param> /// <returns>True if the value was a valid literal, otherwise false</returns> public bool TryParseReal(out double result) { result = default(double); int l = CaptureBuffer.Length; if (!TryReadReal()) { return(false); } return(double.TryParse(CaptureBuffer.ToString(l, CaptureBuffer.Length - l), out result)); }
/// <summary> /// Captures the current character if available /// </summary> public void CaptureCurrent() { _CheckDisposed(); if (-2 == Current) { throw new InvalidOperationException("The parse context has not been started."); } if (-1 != Current) { CaptureBuffer.Append((char)Current); } }
static void DisposeCaptureBuffer() { if (buffer != null) { if (buffer.Capturing) { buffer.Stop(); } buffer.Dispose(); buffer = null; } }
private void CreateCaptureBuffer() { //To create a capture buffer, you must have two parameters: //the buffer information (describing the format in this buffer, etc.), the buffer device. CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); bufferdescription.Format = mWavFormat; //Sets the data format to be captured by the buffer iNotifySize = 1024; //Set the notification size iBufferSize = iNotifyNum * iNotifySize; bufferdescription.BufferBytes = iBufferSize; capturebuffer = new CaptureBuffer(bufferdescription, capture);//Create a device buffer object }
// bufferSize is a SAMPLE COUNT // NOTE: we always capture 16 bits/sample public WaveCapture(Guid deviceGuid, int Fs, int bufferSize, int timerInterval) { CaptureBufferDescription desc = new CaptureBufferDescription(); desc.BufferBytes = bufferSize * 2; desc.ControlEffects = false; desc.WaveMapped = true; desc.Format = new WaveFormat(); desc.Format.FormatTag = SlimDX.WaveFormatTag.Pcm; desc.Format.SamplesPerSecond = Fs; desc.Format.Channels = 1; desc.Format.BitsPerSample = 16; desc.Format.BlockAlignment = 2; desc.Format.AverageBytesPerSecond = Fs * 2; buf = new byte[bufferSize * 2]; capture = new DirectSoundCapture(deviceGuid); captureBuffer = new CaptureBuffer(capture, desc); timer = new Timer(); timer.Interval = timerInterval; timer.Tick += new EventHandler(timer_Tick); }
private void Form1_Load(object sender, EventArgs e) { waveFormat.Channels = 1; //モノラル waveFormat.FormatTag = WaveFormatTag.Pcm; //PCM指定 waveFormat.BitsPerSample = 16; //16bit waveFormat.SamplesPerSecond = 44100; //44.1KHz waveFormat.BlockAlign = (short)(waveFormat.Channels * (waveFormat.BitsPerSample / (short)8));//1サンプルあたりのバイト数 waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond;//1秒間あたりのバイト数 // バッファの確保 //バッファを0.1秒分確保 captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 10; captureBufferDescription.Format = waveFormat; // 録音デバイスの準備 DeviceInformation deviceInfo = captureDevice[0]; capture = new Capture(deviceInfo.DriverGuid); //バッファ作成 try { //フォーマット構造体・デバイス指定 buffer取得 captureBuffer = new CaptureBuffer(captureBufferDescription, capture); } catch { MessageBox.Show("録音デバイスが無いか、録音フォーマットをサポートしていません。"); Close(); return; } if (null == capture) throw new NullReferenceException(); captureBuffer.Start(true); timer1.Enabled = true; }
/// <summary> /// 创建缓冲区 /// </summary> private void CreateCaptureBuffer() { // 想要创建一个捕捉缓冲区必须要两个参数:缓冲区信息(描述这个缓冲区中的格式等),缓冲设备。 CaptureBufferDescription cbdBufDescription = new CaptureBufferDescription(); cbdBufDescription.Format = m_wavFormat; // 缓冲区捕捉的数据格式 // 1秒的数据量/设置的通知数 得到的每个通知大小小于0.2s的数据量,话音延迟小于200ms为优质话音 m_nNotifySize = m_wavFormat.AverageBytesPerSecond / m_nNotifyNum; m_nBufSize = m_nNotifyNum * m_nNotifySize; cbdBufDescription.BufferBytes = m_nBufSize; m_objCaptureBuf = new CaptureBuffer(cbdBufDescription, m_objCapture); }
private void StartRecordAndSend() { try { Capture capture = null; CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection(); try { capture = new Capture(captureDeviceCollection[ConfSingleton.Instance.CaptureDeviceIndex].DriverGuid); } catch { capture = new Capture(captureDeviceCollection[0].DriverGuid); } captureBuffer = new CaptureBuffer(captureBufferDescription, capture); SetBufferEvents(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; rtpSender.Send(ConfSingleton.Instance.Compression ? ALawEncoder.ALawEncode(memStream.GetBuffer()) : memStream.GetBuffer()); } } catch (ThreadAbortException) { /* This is OK. It's raised when the record thread is stopped. */ } /* Catch DirectSound's uninformative exceptions and attempt to expand on them... */ catch (Exception ex) { if (OnCaptureError != null) { AudioCaptureException captureException = new AudioCaptureException("There was a problem in the audio capture process. This is often due to no working capture device being available.", ex); OnCaptureError(this, new AudioCaptureExceptionEventArgs() { Exception = captureException }); } } finally { try { if (captureBuffer != null) captureBuffer.Stop(); bStop = true; } catch { } } }
public void Start() { if (this.running) { throw new InvalidOperationException(); } if (this.captureDevice == null) { this.captureDevice = new DirectSoundCapture(guid); } this.waveFormat.FormatTag = WaveFormatTag.Pcm; // Change to WaveFormatTag.IeeeFloat for float this.waveFormat.BitsPerSample = 16; // Set this to 32 for float this.waveFormat.BlockAlignment = (short)(channels * (waveFormat.BitsPerSample / 8)); this.waveFormat.Channels = this.channels; this.waveFormat.SamplesPerSecond = (int)(this.SampleRateKHz * 1000D); this.waveFormat.AverageBytesPerSecond = this.waveFormat.SamplesPerSecond * this.waveFormat.BlockAlignment; this.bufferPortionCount = 2; this.bufferDescription.BufferBytes = this.bufferSize * sizeof(short) * bufferPortionCount * this.channels; this.bufferDescription.Format = this.waveFormat; this.bufferDescription.WaveMapped = false; this.buffer = new CaptureBuffer(this.captureDevice, this.bufferDescription); this.bufferPortionSize = this.buffer.SizeInBytes / this.bufferPortionCount; this.notifications = new List<NotificationPosition>(); for (int i = 0; i < this.bufferPortionCount; i++) { NotificationPosition notification = new NotificationPosition(); notification.Offset = this.bufferPortionCount - 1 + (bufferPortionSize * i); notification.Event = new AutoResetEvent(false); this.notifications.Add(notification); } this.buffer.SetNotificationPositions(this.notifications.ToArray()); this.waitHandles = new WaitHandle[this.notifications.Count]; for (int i = 0; i < this.notifications.Count; i++) { this.waitHandles[i] = this.notifications[i].Event; } this.captureThread = new Thread(new ThreadStart(this.CaptureThread)); this.captureThread.IsBackground = true; this.running = true; this.captureThread.Start(); }
/// <summary> /// Records sound data from the given audio input. /// </summary> /// /// <remarks> /// Note that this method will block forever. Threading will be required /// to get the data back. /// </remarks> /// /// <param name="capture">The input to record from.</param> /// <returns>The audio data recorded from the input.</returns> public void Record(Capture capture) { if (Recording) { throw new Exception("Already recording."); } WaveFormat format = (WaveFormat) GetAmibiguousType(typeof(WaveFormat)); format.SamplesPerSecond = 96000; format.BitsPerSample = 16; format.Channels = 1; format.FormatTag = WaveFormatTag.Pcm; format.BlockAlign = (Int16) (format.Channels * (format.BitsPerSample / 8)); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign; int notifySize = Math.Max(4096, format.AverageBytesPerSecond / 16); notifySize -= notifySize % format.BlockAlign; // This is a fairly arbitrary choice. int inputSize = notifySize * 16; // Output is half of input, as every two bytes is a piece of sound data. int outputSize = inputSize / 2; CaptureBufferDescription description = (CaptureBufferDescription) GetAmibiguousType(typeof(CaptureBufferDescription)); description.Format = format; description.BufferBytes = inputSize; CaptureBuffer buffer; try { buffer = new CaptureBuffer(description, capture); } catch { throw new IOException( "An error occurred attempting to set up a read buffer."); } AutoResetEvent reset = new AutoResetEvent(false); Notify notify = new Notify(buffer); BufferPositionNotify bpn1 = (BufferPositionNotify) GetAmibiguousType(typeof(BufferPositionNotify)); bpn1.Offset = buffer.Caps.BufferBytes / 2 - 1; bpn1.EventNotifyHandle = reset.SafeWaitHandle.DangerousGetHandle(); BufferPositionNotify bpn2 = (BufferPositionNotify) GetAmibiguousType(typeof(BufferPositionNotify)); bpn2.Offset = buffer.Caps.BufferBytes - 1; bpn2.EventNotifyHandle = reset.SafeWaitHandle.DangerousGetHandle(); notify.SetNotificationPositions(new BufferPositionNotify[] { bpn1, bpn2 }); int offset = 0; Data = new List<Int16>(); Recording = true; new Thread((ThreadStart) delegate { buffer.Start(true); while (Recording) { // Let the buffer fill up from the last read. reset.WaitOne(); byte[] read; try { read = (byte[]) buffer.Read(offset, typeof(byte), LockFlag.None, outputSize); } catch { throw new IOException( "An error occurred attempting to read the input data."); } offset = (offset + outputSize) % inputSize; bool written = false; Int16 old = 0; foreach (byte b in read) { if (!written) { old = (Int16) b; } else { old = (Int16) (old | (((Int16) (b << 8)))); Data.Add(old); } written = !written; } } buffer.Stop(); }).Start(); }
private void CreateCaptureBuffer() { //想要创建一个捕捉缓冲区必须要两个参数:缓冲区信息(描述这个缓冲区中的格式等),缓冲设备。 CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); bufferdescription.Format = mWavFormat;//设置缓冲区要捕捉的数据格式 iNotifySize = 1024;//设置通知大小 iBufferSize = iNotifyNum * iNotifySize; bufferdescription.BufferBytes = iBufferSize; capturebuffer = new CaptureBuffer(bufferdescription, capture);//建立设备缓冲区对象 }
/// <summary> /// Records sound data from the given audio input. /// </summary> /// /// <remarks> /// Note that this method will block forever. Threading will be required /// to get the data back. /// </remarks> /// /// <param name="capture">The input to record from.</param> /// <returns>The audio data recorded from the input.</returns> public bool Record(Capture cap) { if(recording){ return false; } // string captureDescriptor – string for eg “Mic”, “Input” // Control owner – maybe Window or Form would do for this – was Native.GetDesktopWindow() // if windowless application use desktop window as message broker // Returns true for setup done and thread started, false for problem // Choose a Wave format, calculating BlockAlign and AverageBytesPerSecond ConstructorInfo nom = typeof(WaveFormat).GetConstructor(Type.EmptyTypes); format = (WaveFormat)nom.Invoke(null); format.SamplesPerSecond = 96000; format.BitsPerSample = 16; format.Channels = 1; format.FormatTag = WaveFormatTag.Pcm; SData = new List<Int16>(); // Both of these are calculate for All channels // BlockAlign = BytesPerSampleAllChannels, AverageBytesPerSecond = BytesPerSecondAllChannels format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8)); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign; // Set the size of input and output buffers // Multiplier of both delay and minimum buffer size in units of 1/16th secs, int NUM_BUFFERS = 8; // Sets _dwNotifySize to enough bytes for 1/16th of a second, all channels // Note that this was 1/8th (ie line ended ‘/ 8);’), and output buffer size = capture size/2 // But this was changed to allow output buffer size to be a multiple of BlockAlign int _dwNotifySize = Math.Max(4096, format.AverageBytesPerSecond / (8 * 2)); // rounds _dwNotifySize to a multiple of BlockAlign (BytesPerSampleAllChannel) _dwNotifySize -= _dwNotifySize % format.BlockAlign; // Capture buffer is looped – when the end is reached, it starts from the beginning again. // Capturing one should be twice as large as output – so that when completed capture // is being read to output buffer there is still room to for the buffer to keep filling // without overwriting the output. I think. int _dwCaptureBufferSize = NUM_BUFFERS * _dwNotifySize * 2; int _dwOutputBufferSize = NUM_BUFFERS * _dwNotifySize; // Check a matching capture device was found if (cap == null) return false; // no matching sound card/capture device { // Make the description and create a CaptureBuffer accordingly ConstructorInfo capnom = typeof(CaptureBufferDescription).GetConstructor(Type.EmptyTypes); var capDesc = (CaptureBufferDescription)capnom.Invoke(null); capDesc.Format = format; capDesc.BufferBytes = _dwCaptureBufferSize; var _dwCapBuffer = new CaptureBuffer(capDesc, cap); // Create two output buffers – this seems to avoid the buffer being locked and written // to while it's still playing, helping to avoid a sound glitch on my machine. var _dwDevBuffers = new SecondaryBuffer[2]; // Set autoResetEvent to be fired when it's filled and subscribe to buffer notifications var _resetEvent = new AutoResetEvent(false); var _notify = new Notify(_dwCapBuffer); // Half&half – one notification halfway through the output buffer, one at the end ConstructorInfo buffnom = typeof(BufferPositionNotify).GetConstructor(Type.EmptyTypes); var bpn1 = (BufferPositionNotify)buffnom.Invoke(null); bpn1.Offset = _dwCapBuffer.Caps.BufferBytes / 2 - 1; bpn1.EventNotifyHandle = _resetEvent.SafeWaitHandle.DangerousGetHandle(); var bpn2 = (BufferPositionNotify)buffnom.Invoke(null); bpn2.Offset = _dwCapBuffer.Caps.BufferBytes - 1; bpn2.EventNotifyHandle = _resetEvent.SafeWaitHandle.DangerousGetHandle(); _notify.SetNotificationPositions(new BufferPositionNotify[] { bpn1, bpn2 }); recording = true; // ready to capture sound // Fire worker thread to take care of messages // Note that on a uniprocessor, the new thread may not get any processor time // until the main thread is preempted or yields, eg by ending button click event or // calling Thread.Sleep(0) // botch – not sure if these are thread safe for multiple threads int offset = 0; int devbuffer = 0; // Make a new thread – as countained in the { } Thread _dwCaptureThread = new Thread((ThreadStart)delegate { _dwCapBuffer.Start(true); // start capture // IsReady – This should be true while you wish to capture and then output the sound. while (recording) { _resetEvent.WaitOne(); // blocks thread until _dwCapBuffer is half/totally full // Read the capture buffer into an array, and output it to the next DevBuffer byte[] read = (byte[])_dwCapBuffer.Read(offset, typeof(byte), LockFlag.None, _dwOutputBufferSize); for (int i = 0; i < read.Length; i++) { SData.Add(Int16.Parse(read[i].ToString())); } // _dwDevBuffers[devbuffer].Write(0, read, LockFlag.EntireBuffer); // Update offset offset = (offset + _dwOutputBufferSize) % _dwCaptureBufferSize; devbuffer = 1 - devbuffer; // toggle between 0 and 1 } _dwCapBuffer.Stop(); // stop capture }); _dwCaptureThread.Start(); // start the new Thread return true; } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format WaveFormat format = new WaveFormat(); format.Channels = 1; format.SamplesPerSecond = sampleRate; format.FormatTag = sampleFormat.ToWaveFormat(); format.BitsPerSample = (short)Signal.GetSampleSize(sampleFormat); format.BlockAlignment = (short)(format.BitsPerSample / 8); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment; // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlignment; captureBufferDescription.WaveMapped = true; captureBufferDescription.ControlEffects = false; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.SizeInBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].Event = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].Event = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) waitHandles[i] = notifications[i].Event; // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } } catch (Exception ex) { if (AudioSourceError != null) AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message)); else throw; } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) captureDevice.Dispose(); for (int i = 0; i < notifications.Length; i++) if (notifications[i].Event != null) notifications[i].Event.Close(); } }
public bool StopCodec(ref string strFault) { bool functionReturnValue = false; // Stop the capture lock (objCodecLock) { try { if (MCB.DebugLog) Logs.WriteDebug("[Main.StopCodec] Stop thrNotify with blnSCCapturing = False"); blnSCCapturing = false; // this should end the wait thread if it is still running Thread.Sleep(200); //If blnInWaitThread Then if (thrNotify != null && thrNotify.IsAlive) { if (MCB.DebugLog) Logs.WriteDebug("[Main.StopCodec] Aborting thrNotify"); thrNotify.Abort(); Thread.Sleep(100); thrNotify.Join(3000); } thrNotify = null; //blnInWaitThread = False //lblCapture.BackColor = Color.LightSalmon // Stop the buffer if (objCapture != null) { objCapture.Stop(); objCapture.Dispose(); } objCapture = null; if (devCaptureDevice != null) devCaptureDevice.Dispose(); devCaptureDevice = null; if (devSelectedPlaybackDevice != null) { devSelectedPlaybackDevice.Dispose(); } devSelectedPlaybackDevice = null; if (MCB.DebugLog) Logs.WriteDebug("[Main.StopCodec] = True"); functionReturnValue = true; objProtocol.ARDOPProtocolState = ProtocolState.OFFLINE; } catch (Exception ex) { Logs.Exception("[Main.StopCodec] Err: " + ex.ToString); if (MCB.DebugLog) Logs.WriteDebug("[Main.StopCodec] = False"); strFault = Err.Number.ToString + "/" + Err.Description; functionReturnValue = false; } //blnEnableCaptureRestart = False } return functionReturnValue; }
public bool StartCodec(ref string strFault) { bool functionReturnValue = false; //Returns true if successful Thread.Sleep(100); // This delay is necessary for reliable starup following a StopCodec lock (objCodecLock) { dttLastSoundCardSample = Now; bool blnSpectrumSave = MCB.DisplaySpectrum; bool blnWaterfallSave = MCB.DisplayWaterfall; System.DateTime dttStartWait = Now; MCB.DisplayWaterfall = false; MCB.DisplaySpectrum = false; string[] strCaptureDevices = EnumerateCaptureDevices(); string[] strPlaybackDevices = EnumeratePlaybackDevices(); functionReturnValue = false; DeviceInformation objDI = new DeviceInformation(); int intPtr = 0; // Playback devices try { cllPlaybackDevices = null; cllPlaybackDevices = new Microsoft.DirectX.DirectSound.DevicesCollection(); if ((devSelectedPlaybackDevice != null)) { devSelectedPlaybackDevice.Dispose(); devSelectedPlaybackDevice = null; } foreach (DeviceInformation objDI in cllPlaybackDevices) { DeviceDescription objDD = new DeviceDescription(objDI); if (strPlaybackDevices(intPtr) == MCB.PlaybackDevice) { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] Setting SelectedPlaybackDevice = " + MCB.PlaybackDevice); devSelectedPlaybackDevice = new Device(objDD.info.DriverGuid); functionReturnValue = true; break; // TODO: might not be correct. Was : Exit For } intPtr += 1; } if (!functionReturnValue) { strFault = "Playback Device setup, Device " + MCB.PlaybackDevice + " not found in Windows enumerated Playback Devices"; } } catch (Exception ex) { strFault = Err.Number.ToString + "/" + Err.Description; Logs.Exception("[StartCodec], Playback Device setup] Err: " + ex.ToString); functionReturnValue = false; } if (functionReturnValue) { // Capture Device CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); try { functionReturnValue = false; cllCaptureDevices = null; cllCaptureDevices = new CaptureDevicesCollection(); intPtr = 0; for (int i = 0; i <= cllCaptureDevices.Count - 1; i++) { if (MCB.CaptureDevice == strCaptureDevices(i)) { objCaptureDeviceGuid = cllCaptureDevices(i).DriverGuid; devCaptureDevice = new Capture(objCaptureDeviceGuid); stcSCFormat.SamplesPerSecond = 12000; // 12000 Hz sample rate stcSCFormat.Channels = 1; stcSCFormat.BitsPerSample = 16; stcSCFormat.BlockAlign = 2; stcSCFormat.AverageBytesPerSecond = 2 * 12000; stcSCFormat.FormatTag = WaveFormatTag.Pcm; objApplicationNotify = null; objCapture = null; // Set the buffer sizes intCaptureBufferSize = intNotifySize * intNumberRecordNotifications; // Create the capture buffer dscheckboxd.BufferBytes = intCaptureBufferSize; stcSCFormat.FormatTag = WaveFormatTag.Pcm; dscheckboxd.Format = stcSCFormat; // Set the format during creatation if ((objCapture != null)) { objCapture.Dispose(); objCapture = null; } //objCapture = New CaptureBuffer(dscheckboxd, devCaptureDevice) intNextCaptureOffset = 0; WriteTextToSpectrum("CODEC Start OK", Brushes.LightGreen); while (Now.Subtract(dttStartWait).TotalSeconds < 3) { Application.DoEvents(); Thread.Sleep(100); } objCapture = new CaptureBuffer(dscheckboxd, devCaptureDevice); InititializeNotifications(); objCapture.Start(true); // start with looping InititializeSpectrum(Color.Black); functionReturnValue = true; } } if (!functionReturnValue) { strFault = "Could not find DirectSound capture device " + MCB.CaptureDevice.ToUpper; //Logs.Exception("[Main.StartCodec] Could not find DirectSound capture device " & MCB.CaptureDevice & " in Windows enumerated Capture Devices") } } catch (Exception ex) { strFault = Err.Number.ToString + "/" + Err.Description; functionReturnValue = false; //Logs.Exception("[Main.StartCodec] Err: " & ex.ToString) } } if (functionReturnValue) { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] Successful start of codec"); objProtocol.ARDOPProtocolState = ProtocolState.DISC; } else { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] CODEC Start Failed"); WriteTextToSpectrum("CODEC Start Failed", Brushes.Red); objProtocol.ARDOPProtocolState = ProtocolState.OFFLINE; while (Now.Subtract(dttStartWait).TotalSeconds < 3) { Application.DoEvents(); Thread.Sleep(100); } tmrStartCODEC.Interval = 5000; tmrStartCODEC.Start(); } InititializeSpectrum(Color.Black); MCB.DisplayWaterfall = blnWaterfallSave; MCB.DisplaySpectrum = blnSpectrumSave; } return functionReturnValue; }
public void StartCapture(int sampleRate, Capture captureDevice) { StopCapture(); EmptyRequest(); this.sampleRate = sampleRate; readPos = 0; IsRecording = false; record = null; recordTime = 0; noRecordTime = 0; lastSample = null; lastSize = 0; capture = (captureDevice == null) ? new Capture() : captureDevice; WaveFormat waveFormat = new WaveFormat();// Load the sound waveFormat.BitsPerSample = 16; waveFormat.BlockAlign = 2; waveFormat.Channels = 1; waveFormat.AverageBytesPerSecond = sampleRate * 2; waveFormat.SamplesPerSecond = sampleRate; waveFormat.FormatTag = WaveFormatTag.Pcm; CaptureBufferDescription captureBuffDesc = new CaptureBufferDescription(); captureBuffDesc.BufferBytes = bufferSize; captureBuffDesc.Format = waveFormat; captureBuffer = new CaptureBuffer(captureBuffDesc, capture); captureBuffer.Start(true); captureThread = new Thread(captureLoop); captureThread.Start(); new Thread(EmptyRequest).Start(); }
public void StopCapture() { if (captureThread != null) { captureThread.Abort(); captureThread = null; } if (captureBuffer != null) { //captureBuffer.Stop(); captureBuffer.Dispose(); captureBuffer = null; } if (capture != null) { capture.Dispose(); capture = null; } IsRecording = false; }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. //循环聊天室里面的用户发送语音数据 List<ChatUser> chatUserlist = LoginRoler.chatUserlist; if (chatUserlist!=null && chatUserlist.Count>0) { //chatroomusers for (int a = 0; a < chatUserlist.Count; a++) { //Console.WriteLine("ip=" + chatroomusers.Items[a].Text.ToString() + "进入聊天"); string ip = (((ChatUser)chatUserlist[a]).ChatIp).ToString(); if (ip.Equals(LoginRoler.ip)) continue; //Console.WriteLine("发送音频数据到:" + ip); if (vocoder == Vocoder.ALaw) { byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); //udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); udpClient.Send(dataToWrite, dataToWrite.Length, ip, 1550); } else if (vocoder == Vocoder.uLaw) { byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); //udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); udpClient.Send(dataToWrite, dataToWrite.Length, ip, 1550); //udpClient.Send(dataToWrite, dataToWrite.Length, "192.168.0.104", 1550); } else { byte[] dataToWrite = memStream.GetBuffer(); //udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); udpClient.Send(dataToWrite, dataToWrite.Length, ip, 1550); } } } } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. //udpClient.Close(); } }
public void CreateCaptureBuffer() { // 想要創建一個捕捉緩衝區必须要兩個參數:缓衝區信息(描述這個緩衝區中的格式等),緩衝設備。 mWavFormat = SetWaveFormat(); // 先設定waveformat格式 CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); bufferdescription.Format = mWavFormat; // 設置缓衝區要捕捉的數據格式 //iNotifySize = 1024; // 設置通知大小 iNotifySize = mWavFormat.AverageBytesPerSecond / iNotifyNum; // 1秒的數據量 / 設置的通知數得到的每個通知大小小於0.2s的數據量,語音延遲小於200ms為優質語音 iBufferSize = iNotifyNum * iNotifySize; bufferdescription.BufferBytes = iBufferSize; bufferdescription.ControlEffects = true; bufferdescription.WaveMapped = true; capturebuffer = new CaptureBuffer(bufferdescription, capture); // 建立設備緩衝區對象 }
private void CreateCaptureBuffer() { CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); if (null != mNotify) { mNotify.Dispose(); mNotify = null; } if (null != mRecBuffer) { mRecBuffer.Dispose(); mRecBuffer = null; } // 设定通知的大小,默认为1s钟 mNotifySize = (1024 > mWavFormat.AverageBytesPerSecond / 8) ? 1024 : (mWavFormat.AverageBytesPerSecond / 8); mNotifySize -= mNotifySize % mWavFormat.BlockAlign; // 设定缓冲区大小 mBufferSize = mNotifySize * cNotifyNum; // 创建缓冲区描述 bufferdescription.BufferBytes = mBufferSize; bufferdescription.Format = mWavFormat; // 创建缓冲区 mRecBuffer = new CaptureBuffer(bufferdescription, mCapDev); mNextCaptureOffset = 0; }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. if (vocoder == Vocoder.ALaw) { byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString (), 1550); } else if (vocoder == Vocoder.uLaw) { byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } else { byte[] dataToWrite = memStream.GetBuffer(); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
/// <summary> /// Starts capture process. /// </summary> public void Start() { EnsureIdle(); isCapturing = true; WaveFormat format = new WaveFormat(); format.Channels = ChannelCount; format.BitsPerSample = BitsPerSample; format.SamplesPerSecond = SampleRate; format.FormatTag = WaveFormatTag.Pcm; format.BlockAlign = (short)((format.Channels * format.BitsPerSample + 7) / 8); format.AverageBytesPerSecond = format.BlockAlign * format.SamplesPerSecond; bufferLength = format.AverageBytesPerSecond * BufferSeconds; CaptureBufferDescription desciption = new CaptureBufferDescription(); desciption.Format = format; desciption.BufferBytes = bufferLength; capture = new Capture(device.Id); buffer = new CaptureBuffer(desciption, capture); int waitHandleCount = BufferSeconds * NotifyPointsInSecond; BufferPositionNotify[] positions = new BufferPositionNotify[waitHandleCount]; for (int i = 0; i < waitHandleCount; i++) { BufferPositionNotify position = new BufferPositionNotify(); position.Offset = (i + 1) * bufferLength / positions.Length - 1; position.EventNotifyHandle = positionEventHandle.DangerousGetHandle(); positions[i] = position; } notify = new Notify(buffer); notify.SetNotificationPositions(positions); terminated.Reset(); thread = new Thread(new ThreadStart(ThreadLoop)); thread.Name = "Sound capture"; thread.Start(); }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { IsThreadSendEnd = false; //The following lines get audio from microphone and then send them //across network. int users_count = 0; captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; LogAppend("Sending Started"); while (!bStop) { lock (otherPartyIPs) { users_count = otherPartyIPs.Count; if (users_count > 0) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. //if (vocoder == Vocoder.ALaw) //{ //byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); //MULAW //byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); //ALAW (RECOMENdADO) byte[] dataToWrite = memStream.GetBuffer(); //NORMAL if (bStop) return; for (int i = 0; i < users_count; i++) udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIPs[i].Address.ToString(), 1550); } } } IsThreadSendEnd = true; LogAppend("Sending Ended"); } catch (Exception ex) { // MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); LogAppend("VoiceChat-Send >> " + ex.Message); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }